-
Notifications
You must be signed in to change notification settings - Fork 1.6k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Move data type macros into dbt-core (#5428)
* Move data type macros into dbt-core * Changelog entry * Code quality checks. Fix type_float
- Loading branch information
Showing
9 changed files
with
334 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
kind: Features | ||
body: Move type_* macros from dbt-utils into dbt-core, with tests | ||
time: 2022-06-30T13:54:52.165139+02:00 | ||
custom: | ||
Author: jtcohen6 | ||
Issue: "5317" | ||
PR: "5428" |
117 changes: 117 additions & 0 deletions
117
core/dbt/include/global_project/macros/utils/data_types.sql
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,117 @@ | ||
{# string ------------------------------------------------- #} | ||
|
||
{%- macro type_string() -%} | ||
{{ return(adapter.dispatch('type_string', 'dbt')()) }} | ||
{%- endmacro -%} | ||
|
||
{% macro default__type_string() %} | ||
{{ return(api.Column.translate_type("string")) }} | ||
{% endmacro %} | ||
|
||
-- This will return 'text' by default | ||
-- On Postgres + Snowflake, that's equivalent to varchar (no size) | ||
-- Redshift will treat that as varchar(256) | ||
|
||
|
||
{# timestamp ------------------------------------------------- #} | ||
|
||
{%- macro type_timestamp() -%} | ||
{{ return(adapter.dispatch('type_timestamp', 'dbt')()) }} | ||
{%- endmacro -%} | ||
|
||
{% macro default__type_timestamp() %} | ||
{{ return(api.Column.translate_type("timestamp")) }} | ||
{% endmacro %} | ||
|
||
/* | ||
POSTGRES | ||
https://www.postgresql.org/docs/current/datatype-datetime.html: | ||
The SQL standard requires that writing just `timestamp` | ||
be equivalent to `timestamp without time zone`, and | ||
PostgreSQL honors that behavior. | ||
`timestamptz` is accepted as an abbreviation for `timestamp with time zone`; | ||
this is a PostgreSQL extension. | ||
SNOWFLAKE | ||
https://docs.snowflake.com/en/sql-reference/data-types-datetime.html#timestamp | ||
The TIMESTAMP_* variation associated with TIMESTAMP is specified by the | ||
TIMESTAMP_TYPE_MAPPING session parameter. The default is TIMESTAMP_NTZ. | ||
BIGQUERY | ||
TIMESTAMP means 'timestamp with time zone' | ||
DATETIME means 'timestamp without time zone' | ||
TODO: shouldn't this return DATETIME instead of TIMESTAMP, for consistency with other databases? | ||
e.g. dateadd returns a DATETIME | ||
/* Snowflake: | ||
https://docs.snowflake.com/en/sql-reference/data-types-datetime.html#timestamp | ||
The TIMESTAMP_* variation associated with TIMESTAMP is specified by the TIMESTAMP_TYPE_MAPPING session parameter. The default is TIMESTAMP_NTZ. | ||
*/ | ||
|
||
|
||
{# float ------------------------------------------------- #} | ||
|
||
{%- macro type_float() -%} | ||
{{ return(adapter.dispatch('type_float', 'dbt')()) }} | ||
{%- endmacro -%} | ||
|
||
{% macro default__type_float() %} | ||
{{ return(api.Column.translate_type("float")) }} | ||
{% endmacro %} | ||
|
||
{# numeric ------------------------------------------------ #} | ||
|
||
{%- macro type_numeric() -%} | ||
{{ return(adapter.dispatch('type_numeric', 'dbt')()) }} | ||
{%- endmacro -%} | ||
|
||
/* | ||
This one can't be just translate_type, since precision/scale make it a bit more complicated. | ||
On most databases, the default (precision, scale) is something like: | ||
Redshift: (18, 0) | ||
Snowflake: (38, 0) | ||
Postgres: (<=131072, 0) | ||
https://www.postgresql.org/docs/current/datatype-numeric.html: | ||
Specifying NUMERIC without any precision or scale creates an “unconstrained numeric” | ||
column in which numeric values of any length can be stored, up to the implementation limits. | ||
A column of this kind will not coerce input values to any particular scale, | ||
whereas numeric columns with a declared scale will coerce input values to that scale. | ||
(The SQL standard requires a default scale of 0, i.e., coercion to integer precision. | ||
We find this a bit useless. If you're concerned about portability, always specify | ||
the precision and scale explicitly.) | ||
*/ | ||
|
||
{% macro default__type_numeric() %} | ||
{{ return(api.Column.numeric_type("numeric", 28, 6)) }} | ||
{% endmacro %} | ||
|
||
|
||
{# bigint ------------------------------------------------- #} | ||
|
||
{%- macro type_bigint() -%} | ||
{{ return(adapter.dispatch('type_bigint', 'dbt')()) }} | ||
{%- endmacro -%} | ||
|
||
-- We don't have a conversion type for 'bigint' in TYPE_LABELS, | ||
-- so this actually just returns the string 'bigint' | ||
|
||
{% macro default__type_bigint() %} | ||
{{ return(api.Column.translate_type("bigint")) }} | ||
{% endmacro %} | ||
|
||
-- Good news: BigQuery now supports 'bigint' (and 'int') as an alias for 'int64' | ||
|
||
{# int ------------------------------------------------- #} | ||
|
||
{%- macro type_int() -%} | ||
{{ return(adapter.dispatch('type_int', 'dbt')()) }} | ||
{%- endmacro -%} | ||
|
||
{%- macro default__type_int() -%} | ||
{{ return(api.Column.translate_type("integer")) }} | ||
{%- endmacro -%} | ||
|
||
-- returns 'int' everywhere, except BigQuery, where it returns 'int64' | ||
-- (but BigQuery also now accepts 'int' as a valid alias for 'int64') |
34 changes: 34 additions & 0 deletions
34
tests/adapter/dbt/tests/adapter/utils/data_types/base_data_type_macro.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
from dbt.tests.util import run_dbt, check_relations_equal, get_relation_columns | ||
|
||
|
||
class BaseDataTypeMacro: | ||
# make it possible to dynamically update the macro call with a namespace | ||
# (e.g.) 'dateadd', 'dbt.dateadd', 'dbt_utils.dateadd' | ||
def macro_namespace(self): | ||
return "" | ||
|
||
def interpolate_macro_namespace(self, model_sql, macro_name): | ||
macro_namespace = self.macro_namespace() | ||
return ( | ||
model_sql.replace(f"{macro_name}(", f"{macro_namespace}.{macro_name}(") | ||
if macro_namespace | ||
else model_sql | ||
) | ||
|
||
def assert_columns_equal(self, project, expected_cols, actual_cols): | ||
assert ( | ||
expected_cols == actual_cols | ||
), f"Type difference detected: {expected_cols} vs. {actual_cols}" | ||
|
||
def test_check_types_assert_match(self, project): | ||
run_dbt(["build"]) | ||
|
||
# check contents equal | ||
check_relations_equal(project.adapter, ["expected", "actual"]) | ||
|
||
# check types equal | ||
expected_cols = get_relation_columns(project.adapter, "expected") | ||
actual_cols = get_relation_columns(project.adapter, "actual") | ||
print(f"Expected: {expected_cols}") | ||
print(f"Actual: {actual_cols}") | ||
self.assert_columns_equal(project, expected_cols, actual_cols) |
23 changes: 23 additions & 0 deletions
23
tests/adapter/dbt/tests/adapter/utils/data_types/test_type_bigint.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
import pytest | ||
from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro | ||
|
||
models__expected_sql = """ | ||
select 9223372036854775800 as bigint_col | ||
""".lstrip() | ||
|
||
models__actual_sql = """ | ||
select cast('9223372036854775800' as {{ type_bigint() }}) as bigint_col | ||
""" | ||
|
||
|
||
class BaseTypeBigInt(BaseDataTypeMacro): | ||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return { | ||
"expected.sql": models__expected_sql, | ||
"actual.sql": self.interpolate_macro_namespace(models__actual_sql, "type_bigint"), | ||
} | ||
|
||
|
||
class TestTypeBigInt(BaseTypeBigInt): | ||
pass |
24 changes: 24 additions & 0 deletions
24
tests/adapter/dbt/tests/adapter/utils/data_types/test_type_float.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
import pytest | ||
from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro | ||
|
||
seeds__expected_csv = """float_col | ||
1.2345 | ||
""".lstrip() | ||
|
||
models__actual_sql = """ | ||
select cast('1.2345' as {{ type_float() }}) as float_col | ||
""" | ||
|
||
|
||
class BaseTypeFloat(BaseDataTypeMacro): | ||
@pytest.fixture(scope="class") | ||
def seeds(self): | ||
return {"expected.csv": seeds__expected_csv} | ||
|
||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return {"actual.sql": self.interpolate_macro_namespace(models__actual_sql, "type_float")} | ||
|
||
|
||
class TestTypeFloat(BaseTypeFloat): | ||
pass |
24 changes: 24 additions & 0 deletions
24
tests/adapter/dbt/tests/adapter/utils/data_types/test_type_int.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
import pytest | ||
from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro | ||
|
||
seeds__expected_csv = """int_col | ||
12345678 | ||
""".lstrip() | ||
|
||
models__actual_sql = """ | ||
select cast('12345678' as {{ type_int() }}) as int_col | ||
""" | ||
|
||
|
||
class BaseTypeInt(BaseDataTypeMacro): | ||
@pytest.fixture(scope="class") | ||
def seeds(self): | ||
return {"expected.csv": seeds__expected_csv} | ||
|
||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return {"actual.sql": self.interpolate_macro_namespace(models__actual_sql, "type_int")} | ||
|
||
|
||
class TestTypeInt(BaseTypeInt): | ||
pass |
40 changes: 40 additions & 0 deletions
40
tests/adapter/dbt/tests/adapter/utils/data_types/test_type_numeric.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
import pytest | ||
from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro | ||
|
||
seeds__expected_csv = """numeric_col | ||
1.2345 | ||
""".lstrip() | ||
|
||
# need to explicitly cast this to avoid it being a double/float | ||
seeds__expected_yml = """ | ||
version: 2 | ||
seeds: | ||
- name: expected | ||
config: | ||
column_types: | ||
numeric_col: {} | ||
""" | ||
|
||
models__actual_sql = """ | ||
select cast('1.2345' as {{ type_numeric() }}) as numeric_col | ||
""" | ||
|
||
|
||
class BaseTypeNumeric(BaseDataTypeMacro): | ||
def numeric_fixture_type(self): | ||
return "numeric(28,6)" | ||
|
||
@pytest.fixture(scope="class") | ||
def seeds(self): | ||
return { | ||
"expected.csv": seeds__expected_csv, | ||
"expected.yml": seeds__expected_yml.format(self.numeric_fixture_type()), | ||
} | ||
|
||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return {"actual.sql": self.interpolate_macro_namespace(models__actual_sql, "type_numeric")} | ||
|
||
|
||
class TestTypeNumeric(BaseTypeNumeric): | ||
pass |
25 changes: 25 additions & 0 deletions
25
tests/adapter/dbt/tests/adapter/utils/data_types/test_type_string.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
import pytest | ||
from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro | ||
|
||
seeds__expected_csv = """string_col | ||
"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum." | ||
""".lstrip() | ||
|
||
models__actual_sql = """ | ||
select cast('Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.' | ||
as {{ type_string() }}) as string_col | ||
""" | ||
|
||
|
||
class BaseTypeString(BaseDataTypeMacro): | ||
@pytest.fixture(scope="class") | ||
def seeds(self): | ||
return {"expected.csv": seeds__expected_csv} | ||
|
||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return {"actual.sql": self.interpolate_macro_namespace(models__actual_sql, "type_string")} | ||
|
||
|
||
class TestTypeString(BaseTypeString): | ||
pass |
40 changes: 40 additions & 0 deletions
40
tests/adapter/dbt/tests/adapter/utils/data_types/test_type_timestamp.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
import pytest | ||
from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro | ||
|
||
seeds__expected_csv = """timestamp_col | ||
2021-01-01 01:01:01 | ||
""".lstrip() | ||
|
||
# need to explicitly cast this to avoid it being a DATETIME on BigQuery | ||
# (but - should it actually be a DATETIME, for consistency with other dbs?) | ||
seeds__expected_yml = """ | ||
version: 2 | ||
seeds: | ||
- name: expected | ||
config: | ||
column_types: | ||
timestamp_col: timestamp | ||
""" | ||
|
||
models__actual_sql = """ | ||
select cast('2021-01-01 01:01:01' as {{ type_timestamp() }}) as timestamp_col | ||
""" | ||
|
||
|
||
class BaseTypeTimestamp(BaseDataTypeMacro): | ||
@pytest.fixture(scope="class") | ||
def seeds(self): | ||
return { | ||
"expected.csv": seeds__expected_csv, | ||
"expected.yml": seeds__expected_yml, | ||
} | ||
|
||
@pytest.fixture(scope="class") | ||
def models(self): | ||
return { | ||
"actual.sql": self.interpolate_macro_namespace(models__actual_sql, "type_timestamp") | ||
} | ||
|
||
|
||
class TestTypeTimestamp(BaseTypeTimestamp): | ||
pass |