forked from brooklyn-data/dbt_artifacts
-
Notifications
You must be signed in to change notification settings - Fork 0
/
tox.ini
252 lines (216 loc) · 7.32 KB
/
tox.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
[tox]
skipsdist = True
envlist = lint_all
[sqlfluff]
dialect = snowflake
templater = dbt
rules = LT01,LT02,LT03,CP01,AL01,AL02,CP02,ST08,LT06,LT07,AM01,LT08,AL05,RF02,RF03,CP03,ST01,LT09,AM03,CP04,LT10,ST05,ST03,JJ01,AM05,CV08
# LT01: [aliasing.table] Implicit/explicit aliasing of table.
# AL02: [aliasing.column] Implicit/explicit aliasing of columns.
# AL05: [aliasing.unused] Tables should not be aliased if that alias is not used.
# AM01: [ambiguous.distinct] Ambiguous use of 'DISTINCT' in a 'SELECT' statement with 'GROUP BY'.
# AM03: [ambiguous.order_by] Ambiguous ordering directions for columns in order by clause.
# AM05: [ambiguous.join] Join clauses should be fully qualified.
# CP01: [capitalisation.keywords] Inconsistent capitalisation of keywords.
# CP02: [capitalisation.identifiers] Inconsistent capitalisation of unquoted identifiers.
# CP03: [capitalisation.functions] Inconsistent capitalisation of function names.
# CP04: [capitalisation.literals] Inconsistent capitalisation of boolean/null literal.
# CV08: [convention.left_join] Use 'LEFT JOIN' instead of 'RIGHT JOIN'.
# JJ01: [jinja.padding] Jinja tags should have a single whitespace on either side.
# LT01: [layout.spacing] Inappropriate Spacing.
# LT02: [layout.indent] Incorrect Indentation.
# LT03: [layout.operators] Operators should follow a standard for being before/after newlines.
# LT06: [layout.functions] Function name not immediately followed by parenthesis.
# LT07: [layout.functions] 'WITH' clause closing bracket should be on a new line.
# LT08: [layout.cte_newline] Blank line expected but not found after CTE closing bracket.
# LT09: [layout.select_targets] Select targets should be on a new line unless there is only one select target.
# LT10: [layout.select_modifiers] 'SELECT' modifiers (e.g. 'DISTINCT') must be on the same line as 'SELECT'.
# RF02: [references.qualification] References should be qualified if select has more than one referenced table/view.
# RF03: [references.consistent] References should be consistent in statements with a single table.
# ST01: [structure.else_null] Do not specify 'else null' in a case when statement (redundant).
# ST03: [structure.unused_cte] Query defines a CTE (common-table expression) but does not use it.
# ST05: [structure.subquery] Join/From clauses should not contain subqueries. Use CTEs instead.
# ST08: [structure.distinct] 'DISTINCT' used with parentheses.
deps =
sqlfluff-templater-dbt~=2.0.2
dbt-snowflake~=1.6.0
[sqlfluff:indentation]
indent_unit = space
tab_space_size = 4
[sqlfluff:layout:type:comma]
spacing_before = touch
line_position = leading
[sqlfluff:rules:capitalisation.keywords]
capitalisation_policy = lower
[sqlfluff:rules:capitalisation.identifiers]
capitalisation_policy = lower
[sqlfluff:rules:references.consistent]
single_table_references = unqualified
[sqlfluff:rules:capitalisation.functions]
capitalisation_policy = lower
[sqlfluff:rules:capitalisation.literals]
capitalisation_policy = lower
[sqlfluff:rules:structure.subquery]
forbid_subquery_in = both
[sqlfluff:templater:dbt]
profiles_dir = integration_test_project
[testenv]
passenv =
DBT_PROFILES_DIR
GITHUB_SHA_OVERRIDE
GITHUB_SHA
DBT_VERSION
DBT_ENV_SECRET_SNOWFLAKE_TEST_ACCOUNT
DBT_ENV_SECRET_SNOWFLAKE_TEST_USER
DBT_ENV_SECRET_SNOWFLAKE_TEST_PASSWORD
DBT_ENV_SECRET_SNOWFLAKE_TEST_ROLE
DBT_ENV_SECRET_SNOWFLAKE_TEST_DATABASE
DBT_ENV_SECRET_SNOWFLAKE_TEST_WAREHOUSE
DBT_ENV_SECRET_DATABRICKS_HOST
DBT_ENV_SECRET_DATABRICKS_HTTP_PATH
DBT_ENV_SECRET_DATABRICKS_TOKEN
DBT_ENV_SECRET_GCP_PROJECT
DBT_ENV_SPARK_DRIVER_PATH
DBT_ENV_SPARK_ENDPOINT
GOOGLE_APPLICATION_CREDENTIALS
DBT_CLOUD_PROJECT_ID
DBT_CLOUD_JOB_ID
DBT_CLOUD_RUN_ID
DBT_CLOUD_RUN_REASON_CATEGORY
DBT_CLOUD_RUN_REASON
TEST_ENV_VAR_1
TEST_ENV_VAR_NUMBER
TEST_ENV_VAR_EMPTY
TEST_ENV_VAR_WITH_QUOTE
[testenv:lint]
deps = {[sqlfluff]deps}
commands = sqlfluff lint {posargs} --ignore parsing
[testenv:lint_all]
deps = {[sqlfluff]deps}
commands = sqlfluff lint models --ignore parsing
[testenv:fix]
deps = {[sqlfluff]deps}
commands = sqlfluff fix {posargs} --ignore parsing
[testenv:fix_all]
deps = {[sqlfluff]deps}
commands = sqlfluff fix models --ignore parsing
# Generate docs
[testenv:generate_docs]
deps = dbt-snowflake~=1.6.0
commands = dbt docs generate --profiles-dir integration_test_project
# Snowflake integration tests
[testenv:integration_snowflake]
changedir = integration_test_project
deps = dbt-snowflake~=1.6.0
commands =
dbt clean
dbt deps
dbt build --target snowflake
[testenv:integration_snowflake_1_3_0]
changedir = integration_test_project
deps = dbt-snowflake~=1.3.0
commands =
dbt clean
dbt deps
dbt build --target snowflake
[testenv:integration_snowflake_1_4_0]
changedir = integration_test_project
deps = dbt-snowflake~=1.4.0
commands =
dbt clean
dbt deps
dbt build --target snowflake
[testenv:integration_snowflake_1_5_0]
changedir = integration_test_project
deps = dbt-snowflake~=1.5.0
commands =
dbt clean
dbt deps
dbt build --target snowflake
[testenv:integration_snowflake_1_6_0]
changedir = integration_test_project
deps = dbt-snowflake~=1.6.0
commands =
dbt clean
dbt deps
dbt build --target snowflake
# Databricks integration tests
[testenv:integration_databricks]
changedir = integration_test_project
deps = dbt-databricks~=1.6.0
commands =
dbt clean
dbt deps
dbt build --target databricks
[testenv:integration_databricks_1_3_0]
changedir = integration_test_project
deps = dbt-databricks~=1.3.0
commands =
dbt clean
dbt deps
dbt build --target databricks
[testenv:integration_databricks_1_4_0]
changedir = integration_test_project
deps = dbt-databricks~=1.4.0
commands =
dbt clean
dbt deps
dbt build --target databricks
[testenv:integration_databricks_1_5_0]
changedir = integration_test_project
deps = dbt-databricks~=1.5.0
commands =
dbt clean
dbt deps
dbt build --target databricks
[testenv:integration_databricks_1_6_0]
changedir = integration_test_project
deps = dbt-databricks~=1.6.0
commands =
dbt clean
dbt deps
dbt build --target databricks
# Bigquery integration tests
[testenv:integration_bigquery]
changedir = integration_test_project
deps = dbt-bigquery~=1.6.0
commands =
dbt clean
dbt deps
dbt build --target bigquery --vars '"my_var": "my value"'
[testenv:integration_bigquery_1_3_0]
changedir = integration_test_project
deps = dbt-bigquery~=1.3.0
commands =
dbt clean
dbt deps
dbt build --target bigquery --vars '"my_var": "my value"'
[testenv:integration_bigquery_1_4_0]
changedir = integration_test_project
deps = dbt-bigquery~=1.4.0
commands =
dbt clean
dbt deps
dbt build --target bigquery --vars '"my_var": "my value"'
[testenv:integration_bigquery_1_5_0]
changedir = integration_test_project
deps = dbt-bigquery~=1.5.0
commands =
dbt clean
dbt deps
dbt build --target bigquery --vars '"my_var": "my value"'
[testenv:integration_bigquery_1_6_0]
changedir = integration_test_project
deps = dbt-bigquery~=1.6.0
commands =
dbt clean
dbt deps
dbt build --target bigquery --vars '"my_var": "my value"'
# Spark integration test (disabled)
[testenv:integration_spark]
changedir = integration_test_project
deps = dbt-spark[ODBC]~=1.4.0
commands =
dbt clean
dbt deps
dbt build --exclude snapshot --target spark