Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,11 @@
from airflow.providers.edge3.executors.edge_executor import EdgeExecutor
from airflow.providers.edge3.models.edge_job import EdgeJobModel
from airflow.providers.edge3.models.edge_worker import EdgeWorkerModel, EdgeWorkerState
from airflow.utils import timezone

try:
from airflow.sdk import timezone
except ImportError:
from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
from airflow.utils.session import create_session
from airflow.utils.state import TaskInstanceState

Expand Down Expand Up @@ -62,7 +66,7 @@ def get_test_executor(self, pool_slots=1):
def test__process_tasks_bad_command(self):
executor, key = self.get_test_executor()
task_tuple = (key, ["hello", "world"], None, None)
with pytest.raises(ValueError):
with pytest.raises(ValueError, match="The command must start with "):
executor._process_tasks([task_tuple])

@pytest.mark.skipif(AIRFLOW_V_3_0_PLUS, reason="_process_tasks is not used in Airflow 3.0+")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def test_format_url(self, host, expected):
Test the format_url method of the ElasticsearchTaskHandler class.
"""
if expected == "ValueError":
with pytest.raises(ValueError):
with pytest.raises(ValueError, match="'https://' is not a valid URL."):
assert ElasticsearchTaskHandler.format_url(host) == expected
else:
assert ElasticsearchTaskHandler.format_url(host) == expected
Expand Down
5 changes: 2 additions & 3 deletions providers/exasol/tests/unit/exasol/hooks/test_exasol.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def test_sqlalchemy_scheme_property(self, init_scheme, extra_scheme, expected_re
if not expect_error:
assert hook.sqlalchemy_scheme == expected_result
else:
with pytest.raises(ValueError):
with pytest.raises(ValueError, match="sqlalchemy_scheme in connection extra should be one of"):
_ = hook.sqlalchemy_scheme

@pytest.mark.parametrize(
Expand Down Expand Up @@ -213,9 +213,8 @@ def test_run_multi_queries(self):
self.conn.commit.assert_not_called()

def test_run_no_queries(self):
with pytest.raises(ValueError) as err:
with pytest.raises(ValueError, match="List of SQL statements is empty"):
self.db_hook.run(sql=[])
assert err.value.args[0] == "List of SQL statements is empty"

def test_no_result_set(self):
"""Queries like DROP and SELECT are of type rowCount (not resultSet),
Expand Down
3 changes: 1 addition & 2 deletions providers/exasol/tests/unit/exasol/hooks/test_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,6 +285,5 @@ def test_query(
def test_no_query(empty_statement):
dbapi_hook = ExasolHookForTests()
dbapi_hook.get_conn.return_value.cursor.rowcount = lambda: 0
with pytest.raises(ValueError) as err:
with pytest.raises(ValueError, match="List of SQL statements is empty"):
dbapi_hook.run(sql=empty_statement)
assert err.value.args[0] == "List of SQL statements is empty"
4 changes: 2 additions & 2 deletions providers/ftp/tests/unit/ftp/operators/test_ftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,15 +168,15 @@ def test_arg_checking(self, mock_put):
task_1.execute(None)

def test_unequal_local_remote_file_paths(self):
with pytest.raises(ValueError):
with pytest.raises(ValueError, match="1 paths in local_filepath != 2 paths in remote_filepath"):
FTPFileTransmitOperator(
task_id="test_ftp_unequal_paths",
ftp_conn_id=DEFAULT_CONN_ID,
local_filepath="/tmp/test",
remote_filepath=["/tmp/test1", "/tmp/test2"],
).execute(None)

with pytest.raises(ValueError):
with pytest.raises(ValueError, match="2 paths in local_filepath != 1 paths in remote_filepath"):
FTPFileTransmitOperator(
task_id="test_ftp_unequal_paths",
ftp_conn_id=DEFAULT_CONN_ID,
Expand Down
Loading