diff --git a/CHANGELOG.md b/CHANGELOG.md index 8da90756..6966ef66 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ +### Release [1.6.0], TBD +#### Improvement +- Compatible with dbt 1.6.x + ### Release [1.5.0], 2023-11-23 #### Improvements - Compatible with dbt 1.5.x diff --git a/dbt/adapters/clickhouse/__version__.py b/dbt/adapters/clickhouse/__version__.py index 4139253f..f7c7de21 100644 --- a/dbt/adapters/clickhouse/__version__.py +++ b/dbt/adapters/clickhouse/__version__.py @@ -1 +1 @@ -version = '1.5.0' +version = '1.6.0' diff --git a/dbt/adapters/clickhouse/connections.py b/dbt/adapters/clickhouse/connections.py index c4098649..dcb411f8 100644 --- a/dbt/adapters/clickhouse/connections.py +++ b/dbt/adapters/clickhouse/connections.py @@ -73,7 +73,7 @@ def get_table_from_response(cls, response, column_names) -> agate.Table: return dbt.clients.agate_helper.table_from_data_flat(data, column_names) def execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False + self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None ) -> Tuple[AdapterResponse, agate.Table]: # Don't try to fetch result of clustered DDL responses, we don't know what to do with them if fetch and ddl_re.match(sql): diff --git a/dbt/adapters/clickhouse/dbclient.py b/dbt/adapters/clickhouse/dbclient.py index ef069ab3..59b4bfed 100644 --- a/dbt/adapters/clickhouse/dbclient.py +++ b/dbt/adapters/clickhouse/dbclient.py @@ -159,7 +159,9 @@ def _ensure_database(self, database_engine, cluster_name) -> None: if cluster_name is not None and cluster_name.strip() != '' else '' ) - self.command(f'CREATE DATABASE {self.database}{cluster_clause}{engine_clause}') + self.command( + f'CREATE DATABASE IF NOT EXISTS {self.database}{cluster_clause}{engine_clause}' + ) db_exists = self.command(check_db) if not db_exists: raise FailedToConnectError( diff --git a/dev_requirements.txt b/dev_requirements.txt index 5e1771ce..8906bfec 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,16 +1,16 @@ -dbt-core~=1.5.8 +dbt-core~=1.6.9 clickhouse-connect>=0.6.21 clickhouse-driver>=0.2.6 pytest>=7.2.0 pytest-dotenv==0.5.2 -dbt-tests-adapter~=1.5.8 -black==22.3.0 +dbt-tests-adapter~=1.6.9 +black==23.11.0 isort==5.10.1 mypy==0.991 yamllint==1.26.3 flake8==4.0.1 types-requests==2.27.29 -agate~=1.6.3 +agate~=1.7.1 requests~=2.27.1 setuptools~=65.3.0 types-setuptools==67.1.0.0 \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 34c3848d..68570715 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.black] line-length = 100 skip-string-normalization = true -target-version = ['py38', 'py39'] +target-version = ['py310', 'py311'] exclude = '(\.eggs|\.git|\.mypy_cache|\.venv|venv|env|_build|build|build|dist|)' [tool.isort] diff --git a/setup.py b/setup.py index 0bb32f68..7a3e4779 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ def _dbt_clickhouse_version(): package_version = _dbt_clickhouse_version() description = '''The Clickhouse plugin for dbt (data build tool)''' -dbt_version = '1.5.0' +dbt_version = '1.6.0' dbt_minor = '.'.join(dbt_version.split('.')[0:2]) if not package_version.startswith(dbt_minor): diff --git a/tests/integration/adapter/constraints/test_constraints.py b/tests/integration/adapter/constraints/test_constraints.py index 2fe35537..f18a7ca9 100644 --- a/tests/integration/adapter/constraints/test_constraints.py +++ b/tests/integration/adapter/constraints/test_constraints.py @@ -60,7 +60,7 @@ def test__contract_wrong_column_names(self, project): assert all([(exp in log_output or exp.upper() in log_output) for exp in expected]) def test__contract_wrong_column_data_types(self, project, data_types): - for (sql_column_value, schema_data_type, error_data_type) in data_types: + for sql_column_value, schema_data_type, error_data_type in data_types: # Write parametrized data_type to sql file write_file( my_model_data_type_sql.format(sql_value=sql_column_value), @@ -91,7 +91,7 @@ def test__contract_wrong_column_data_types(self, project, data_types): assert all([(exp in log_output or exp.upper() in log_output) for exp in expected]) def test__contract_correct_column_data_types(self, project, data_types): - for (sql_column_value, schema_data_type, _) in data_types: + for sql_column_value, schema_data_type, _ in data_types: # Write parametrized data_type to sql file write_file( my_model_data_type_sql.format(sql_value=sql_column_value),