From c5d0da6e9bea5bd7ad3f033ee2601d403cf1a368 Mon Sep 17 00:00:00 2001 From: Xch1 Date: Thu, 2 Oct 2025 10:36:25 +0800 Subject: [PATCH 1/5] modify test_dingding Signed-off-by: Xch1 --- providers/dingding/tests/unit/dingding/hooks/test_dingding.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/providers/dingding/tests/unit/dingding/hooks/test_dingding.py b/providers/dingding/tests/unit/dingding/hooks/test_dingding.py index 3f5f5da085fc2..d87f1519b4020 100644 --- a/providers/dingding/tests/unit/dingding/hooks/test_dingding.py +++ b/providers/dingding/tests/unit/dingding/hooks/test_dingding.py @@ -215,5 +215,5 @@ def test_send_not_support_type(self): "message": "Airflow dingding text message remind no one", } hook = DingdingHook(**config) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="receive not_support_type"): hook.send() From 08f31f6e62b3797d58489a4b955cacf103be78b7 Mon Sep 17 00:00:00 2001 From: Xch1 Date: Thu, 2 Oct 2025 10:49:14 +0800 Subject: [PATCH 2/5] modify test_dbt Signed-off-by: Xch1 --- .../dbt/cloud/tests/unit/dbt/cloud/hooks/test_dbt.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/providers/dbt/cloud/tests/unit/dbt/cloud/hooks/test_dbt.py b/providers/dbt/cloud/tests/unit/dbt/cloud/hooks/test_dbt.py index d3d126b8f3574..08bc9c6975704 100644 --- a/providers/dbt/cloud/tests/unit/dbt/cloud/hooks/test_dbt.py +++ b/providers/dbt/cloud/tests/unit/dbt/cloud/hooks/test_dbt.py @@ -36,7 +36,11 @@ TokenAuth, fallback_to_default_account, ) -from airflow.utils import timezone + +try: + from airflow.sdk import timezone +except ImportError: + from airflow.utils import timezone # type: ignore[attr-defined,no-redef] ACCOUNT_ID_CONN = "account_id_conn" NO_ACCOUNT_ID_CONN = "no_account_id_conn" @@ -57,6 +61,7 @@ BASE_URL = "https://cloud.getdbt.com/" SINGLE_TENANT_URL = "https://single.tenant.getdbt.com/" +NOT_VAILD_DBT_STATUS = "not a valid DbtCloudJobRunStatus" DEFAULT_LIST_PROJECTS_RESPONSE = { "data": [ @@ -127,7 +132,7 @@ def test_valid_job_run_status(self, statuses): ids=_get_ids(invalid_job_run_statuses), ) def test_invalid_job_run_status(self, statuses): - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=NOT_VAILD_DBT_STATUS): DbtCloudJobRunStatus.check_is_valid(statuses) @pytest.mark.parametrize( @@ -144,7 +149,7 @@ def test_valid_terminal_job_run_status(self, statuses): ids=_get_ids(invalid_job_run_statuses), ) def test_invalid_terminal_job_run_status(self, statuses): - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=NOT_VAILD_DBT_STATUS): DbtCloudJobRunStatus.check_is_valid(statuses) From 1734bc06b9a14c0548ca68654a6dc6d402fa7165 Mon Sep 17 00:00:00 2001 From: Xch1 Date: Thu, 2 Oct 2025 11:04:22 +0800 Subject: [PATCH 3/5] modify test_databrick Signed-off-by: Xch1 --- .../databricks/operators/test_databricks.py | 21 ++++++++----------- 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/providers/databricks/tests/unit/databricks/operators/test_databricks.py b/providers/databricks/tests/unit/databricks/operators/test_databricks.py index 715a0d35f55e6..e1f1eb7e92f9f 100644 --- a/providers/databricks/tests/unit/databricks/operators/test_databricks.py +++ b/providers/databricks/tests/unit/databricks/operators/test_databricks.py @@ -2404,10 +2404,10 @@ def test_both_new_and_existing_cluster_set(self): existing_cluster_id="existing_cluster_id", databricks_conn_id="test_conn_id", ) - with pytest.raises(ValueError) as exc_info: + with pytest.raises( + ValueError, match="Both new_cluster and existing_cluster_id are set. Only one should be set." + ): operator._get_run_json() - exception_message = "Both new_cluster and existing_cluster_id are set. Only one should be set." - assert str(exc_info.value) == exception_message def test_both_new_and_existing_cluster_unset(self): operator = DatabricksNotebookOperator( @@ -2416,10 +2416,8 @@ def test_both_new_and_existing_cluster_unset(self): source="test_source", databricks_conn_id="test_conn_id", ) - with pytest.raises(ValueError) as exc_info: + with pytest.raises(ValueError, match="Must specify either existing_cluster_id or new_cluster."): operator._get_run_json() - exception_message = "Must specify either existing_cluster_id or new_cluster." - assert str(exc_info.value) == exception_message def test_job_runs_forever_by_default(self): operator = DatabricksNotebookOperator( @@ -2442,13 +2440,12 @@ def test_zero_execution_timeout_raises_error(self): existing_cluster_id="existing_cluster_id", execution_timeout=timedelta(seconds=0), ) - with pytest.raises(ValueError) as exc_info: + with pytest.raises( + ValueError, + match="If you've set an `execution_timeout` for the task, ensure it's not `0`. " + "Set it instead to `None` if you desire the task to run indefinitely.", + ): operator._get_run_json() - exception_message = ( - "If you've set an `execution_timeout` for the task, ensure it's not `0`. " - "Set it instead to `None` if you desire the task to run indefinitely." - ) - assert str(exc_info.value) == exception_message def test_extend_workflow_notebook_packages(self): """Test that the operator can extend the notebook packages of a Databricks workflow task group.""" From edf397715f94af826d83e744cdcf681beabfdf26 Mon Sep 17 00:00:00 2001 From: Xch1 Date: Thu, 2 Oct 2025 11:06:44 +0800 Subject: [PATCH 4/5] modify hooks/test_databricks_sql Signed-off-by: Xch1 --- .../tests/unit/databricks/hooks/test_databricks_sql.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/providers/databricks/tests/unit/databricks/hooks/test_databricks_sql.py b/providers/databricks/tests/unit/databricks/hooks/test_databricks_sql.py index 38fd5e26bdd4e..2d1b74b4cec13 100644 --- a/providers/databricks/tests/unit/databricks/hooks/test_databricks_sql.py +++ b/providers/databricks/tests/unit/databricks/hooks/test_databricks_sql.py @@ -373,9 +373,8 @@ def test_query( ], ) def test_no_query(databricks_hook, empty_statement): - with pytest.raises(ValueError) as err: + with pytest.raises(ValueError, match="List of SQL statements is empty"): databricks_hook.run(sql=empty_statement) - assert err.value.args[0] == "List of SQL statements is empty" @pytest.mark.parametrize( From 83d52334358cb7658888ce4a499481593106cea9 Mon Sep 17 00:00:00 2001 From: Xch1 Date: Thu, 2 Oct 2025 11:08:58 +0800 Subject: [PATCH 5/5] modify hooks/test_databricks Signed-off-by: Xch1 --- .../databricks/tests/unit/databricks/hooks/test_databricks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/providers/databricks/tests/unit/databricks/hooks/test_databricks.py b/providers/databricks/tests/unit/databricks/hooks/test_databricks.py index fb57aacf83596..0b3ad5fbd14c5 100644 --- a/providers/databricks/tests/unit/databricks/hooks/test_databricks.py +++ b/providers/databricks/tests/unit/databricks/hooks/test_databricks.py @@ -358,7 +358,7 @@ def test_parse_host_with_scheme(self): assert host == HOST def test_init_bad_retry_limit(self): - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Retry limit must be greater than or equal to 1"): DatabricksHook(retry_limit=0) def test_do_api_call_retries_with_retryable_error(self):