Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,7 @@ repos:
types_or: [python, pyi]
args: [--fix]
require_serial: true
additional_dependencies: ['ruff==0.11.13']
additional_dependencies: ['ruff==0.12.1']
exclude: ^airflow-core/tests/unit/dags/test_imports\.py$|^performance/tests/test_.*\.py$
- id: ruff-format
name: Run 'ruff format'
Expand All @@ -437,7 +437,7 @@ repos:
types_or: [python, pyi]
args: []
require_serial: true
additional_dependencies: ['ruff==0.11.13']
additional_dependencies: ['ruff==0.12.1']
exclude: ^airflow-core/tests/unit/dags/test_imports\.py$
- id: replace-bad-characters
name: Replace bad characters
Expand Down Expand Up @@ -1590,7 +1590,7 @@ repos:
name: Check imports in providers
entry: ./scripts/ci/pre_commit/check_imports_in_providers.py
language: python
additional_dependencies: ['rich>=12.4.4', 'ruff==0.11.13']
additional_dependencies: ['rich>=12.4.4', 'ruff==0.12.1']
files: ^providers/.*/src/airflow/providers/.*version_compat.*\.py$
require_serial: true
## ONLY ADD PRE-COMMITS HERE THAT REQUIRE CI IMAGE
6 changes: 3 additions & 3 deletions airflow-core/src/airflow/utils/log/logging_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import sys
from io import TextIOBase, UnsupportedOperation
from logging import Handler, StreamHandler
from typing import IO, TYPE_CHECKING, Any, Optional, TypeVar, cast
from typing import IO, TYPE_CHECKING, Any, TypeVar, cast

if TYPE_CHECKING:
from logging import Logger
Expand Down Expand Up @@ -72,9 +72,9 @@ class LoggingMixin:
# Parent logger used by this class. It should match one of the loggers defined in the
# `logging_config_class`. By default, this attribute is used to create the final name of the logger, and
# will prefix the `_logger_name` with a separating dot.
_log_config_logger_name: Optional[str] = None # noqa: UP007
_log_config_logger_name: str | None = None

_logger_name: Optional[str] = None # noqa: UP007
_logger_name: str | None = None

def __init__(self, context=None):
self._set_context(context)
Expand Down
10 changes: 5 additions & 5 deletions airflow-core/tests/unit/always/test_providers_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,12 @@ def test_providers_are_loaded(self):
assert self._caplog.records == []

def test_hooks_deprecation_warnings_generated(self):
providers_manager = ProvidersManager()
providers_manager._provider_dict["test-package"] = ProviderInfo(
version="0.0.1",
data={"hook-class-names": ["airflow.providers.sftp.hooks.sftp.SFTPHook"]},
)
with pytest.warns(expected_warning=DeprecationWarning, match="hook-class-names") as warning_records:
providers_manager = ProvidersManager()
providers_manager._provider_dict["test-package"] = ProviderInfo(
version="0.0.1",
data={"hook-class-names": ["airflow.providers.sftp.hooks.sftp.SFTPHook"]},
)
providers_manager._discover_hooks()
assert warning_records

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1909,7 +1909,7 @@ def test_get_task_group_states_with_multiple_task(self, client, session, dag_mak
},
}

def test_get_task_group_states_with_logical_dates(self, client, session, dag_maker, serialized=True):
def test_get_task_group_states_with_logical_dates(self, client, session, dag_maker):
with dag_maker("test_get_task_group_states_with_logical_dates", serialized=True):
with TaskGroup("group1"):
EmptyOperator(task_id="task1")
Expand Down
49 changes: 26 additions & 23 deletions airflow-core/tests/unit/core/test_configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -1040,11 +1040,14 @@ def test_deprecated_options(self):
# Remove it so we are sure we use the right setting
conf.remove_option("celery", "worker_concurrency")

with pytest.warns(DeprecationWarning):
with pytest.warns(DeprecationWarning, match="celeryd_concurrency"):
with mock.patch.dict("os.environ", AIRFLOW__CELERY__CELERYD_CONCURRENCY="99"):
assert conf.getint("celery", "worker_concurrency") == 99

with pytest.warns(DeprecationWarning), conf_vars({("celery", "celeryd_concurrency"): "99"}):
with (
pytest.warns(DeprecationWarning, match="celeryd_concurrency"),
conf_vars({("celery", "celeryd_concurrency"): "99"}),
):
assert conf.getint("celery", "worker_concurrency") == 99

@pytest.mark.parametrize(
Expand Down Expand Up @@ -1109,13 +1112,13 @@ def test_deprecated_options_cmd(self):
):
conf.remove_option("celery", "result_backend")
with conf_vars({("celery", "celery_result_backend_cmd"): "/bin/echo 99"}):
with pytest.warns(DeprecationWarning):
tmp = None
if "AIRFLOW__CELERY__RESULT_BACKEND" in os.environ:
tmp = os.environ.pop("AIRFLOW__CELERY__RESULT_BACKEND")
tmp = None
if "AIRFLOW__CELERY__RESULT_BACKEND" in os.environ:
tmp = os.environ.pop("AIRFLOW__CELERY__RESULT_BACKEND")
with pytest.warns(DeprecationWarning, match="result_backend"):
assert conf.getint("celery", "result_backend") == 99
if tmp:
os.environ["AIRFLOW__CELERY__RESULT_BACKEND"] = tmp
if tmp:
os.environ["AIRFLOW__CELERY__RESULT_BACKEND"] = tmp

def test_deprecated_values_from_conf(self):
test_conf = AirflowConfigParser(
Expand All @@ -1135,7 +1138,7 @@ def test_deprecated_values_from_conf(self):

with pytest.warns(FutureWarning):
test_conf.validate()
assert test_conf.get("core", "hostname_callable") == "airflow.utils.net.getfqdn"
assert test_conf.get("core", "hostname_callable") == "airflow.utils.net.getfqdn"

@pytest.mark.parametrize(
"old, new",
Expand All @@ -1160,19 +1163,19 @@ def test_deprecated_env_vars_upgraded_and_removed(self, old, new):
old_env_var = test_conf._env_var_name(old_section, old_key)
new_env_var = test_conf._env_var_name(new_section, new_key)

with pytest.warns(FutureWarning):
with mock.patch.dict("os.environ", **{old_env_var: old_value}):
# Can't start with the new env var existing...
os.environ.pop(new_env_var, None)
with mock.patch.dict("os.environ", **{old_env_var: old_value}):
# Can't start with the new env var existing...
os.environ.pop(new_env_var, None)

with pytest.warns(FutureWarning):
test_conf.validate()
assert test_conf.get(new_section, new_key) == new_value
# We also need to make sure the deprecated env var is removed
# so that any subprocesses don't use it in place of our updated
# value.
assert old_env_var not in os.environ
# and make sure we track the old value as well, under the new section/key
assert test_conf.upgraded_values[(new_section, new_key)] == old_value
assert test_conf.get(new_section, new_key) == new_value
# We also need to make sure the deprecated env var is removed
# so that any subprocesses don't use it in place of our updated
# value.
assert old_env_var not in os.environ
# and make sure we track the old value as well, under the new section/key
assert test_conf.upgraded_values[(new_section, new_key)] == old_value

@pytest.mark.parametrize(
"conf_dict",
Expand Down Expand Up @@ -1200,10 +1203,10 @@ def make_config():
test_conf.validate()
return test_conf

with pytest.warns(FutureWarning):
with mock.patch.dict("os.environ", AIRFLOW__CORE__HOSTNAME_CALLABLE="airflow.utils.net:getfqdn"):
with mock.patch.dict("os.environ", AIRFLOW__CORE__HOSTNAME_CALLABLE="airflow.utils.net:getfqdn"):
with pytest.warns(FutureWarning):
test_conf = make_config()
assert test_conf.get("core", "hostname_callable") == "airflow.utils.net.getfqdn"
assert test_conf.get("core", "hostname_callable") == "airflow.utils.net.getfqdn"

with reset_warning_registry():
with warnings.catch_warnings(record=True) as warning:
Expand Down
10 changes: 6 additions & 4 deletions airflow-core/tests/unit/datasets/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,12 +73,14 @@
),
)
def test_backward_compat_import_before_airflow_3_2(module_path, attr_name, expected_value, warning_message):
with pytest.warns() as record:
import importlib
import importlib

with pytest.warns() as record:
mod = importlib.import_module(module_path, __name__)
attr = getattr(mod, attr_name)
assert f"{attr.__module__}.{attr.__name__}" == expected_value

assert f"{attr.__module__}.{attr.__name__}" == expected_value
assert record[0].category is DeprecationWarning
assert str(record[0].message) == warning_message


# ruff: noqa: PT031
12 changes: 6 additions & 6 deletions airflow-core/tests/unit/listeners/test_listeners.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def clean_listener_manager():


@provide_session
def test_listener_gets_calls(create_task_instance, session=None):
def test_listener_gets_calls(create_task_instance, session):
lm = get_listener_manager()
lm.add_listener(full_listener)

Expand All @@ -84,7 +84,7 @@ def test_listener_gets_calls(create_task_instance, session=None):


@provide_session
def test_multiple_listeners(create_task_instance, session=None):
def test_multiple_listeners(create_task_instance, session):
lm = get_listener_manager()
lm.add_listener(full_listener)
lm.add_listener(lifecycle_listener)
Expand All @@ -105,7 +105,7 @@ def test_multiple_listeners(create_task_instance, session=None):


@provide_session
def test_listener_gets_only_subscribed_calls(create_task_instance, session=None):
def test_listener_gets_only_subscribed_calls(create_task_instance, session):
lm = get_listener_manager()
lm.add_listener(partial_listener)

Expand All @@ -130,7 +130,7 @@ def test_listener_suppresses_exceptions(create_task_instance, session, cap_struc


@provide_session
def test_listener_captures_failed_taskinstances(create_task_instance_of_operator, session=None):
def test_listener_captures_failed_taskinstances(create_task_instance_of_operator, session):
lm = get_listener_manager()
lm.add_listener(full_listener)

Expand All @@ -145,7 +145,7 @@ def test_listener_captures_failed_taskinstances(create_task_instance_of_operator


@provide_session
def test_listener_captures_longrunning_taskinstances(create_task_instance_of_operator, session=None):
def test_listener_captures_longrunning_taskinstances(create_task_instance_of_operator, session):
lm = get_listener_manager()
lm.add_listener(full_listener)

Expand All @@ -159,7 +159,7 @@ def test_listener_captures_longrunning_taskinstances(create_task_instance_of_ope


@provide_session
def test_class_based_listener(create_task_instance, session=None):
def test_class_based_listener(create_task_instance, session):
lm = get_listener_manager()
listener = class_listener.ClassBasedListener()
lm.add_listener(listener)
Expand Down
2 changes: 1 addition & 1 deletion airflow-core/tests/unit/models/test_dagbag.py
Original file line number Diff line number Diff line change
Expand Up @@ -1032,7 +1032,7 @@ def test_capture_warnings(self):
with pytest.warns(UserWarning, match="(Foo|Bar|Baz)") as ctx:
with _capture_with_reraise() as cw:
self.raise_warnings()
assert len(cw) == 3
assert len(cw) == 3
assert len(ctx.list) == 3

def test_capture_warnings_with_parent_error_filter(self):
Expand Down
6 changes: 3 additions & 3 deletions airflow-core/tests/unit/models/test_taskinstance.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,7 @@ def test_pool_slots_property(self):
)

@provide_session
def test_ti_updates_with_task(self, create_task_instance, session=None):
def test_ti_updates_with_task(self, create_task_instance, session):
"""
test that updating the executor_config propagates to the TaskInstance DB
"""
Expand Down Expand Up @@ -1269,7 +1269,7 @@ def test_respects_prev_dagrun_dep(self, create_task_instance):
)
@provide_session
def test_are_dependents_done(
self, downstream_ti_state, expected_are_dependents_done, create_task_instance, session=None
self, downstream_ti_state, expected_are_dependents_done, create_task_instance, session
):
ti = create_task_instance(session=session)
dag = ti.task.dag
Expand Down Expand Up @@ -2288,7 +2288,7 @@ def test_template_with_json_variable_missing(self, create_task_instance, session
ti.task.render_template('{{ var.json.get("missing_variable") }}', context)

@provide_session
def test_handle_failure(self, dag_maker, session=None):
def test_handle_failure(self, dag_maker, session):
class CustomOp(BaseOperator):
def execute(self, context): ...

Expand Down
4 changes: 2 additions & 2 deletions airflow-core/tests/unit/models/test_timestamp.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def add_log(execdate, session, dag_maker, timezone_override=None):


@provide_session
def test_timestamp_behaviour(dag_maker, session=None):
def test_timestamp_behaviour(dag_maker, session):
execdate = timezone.utcnow()
with time_machine.travel(execdate, tick=False):
current_time = timezone.utcnow()
Expand All @@ -66,7 +66,7 @@ def test_timestamp_behaviour(dag_maker, session=None):


@provide_session
def test_timestamp_behaviour_with_timezone(dag_maker, session=None):
def test_timestamp_behaviour_with_timezone(dag_maker, session):
execdate = timezone.utcnow()
with time_machine.travel(execdate, tick=False):
current_time = timezone.utcnow()
Expand Down
2 changes: 1 addition & 1 deletion devel-common/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ dependencies = [
"kgb>=7.2.0",
"requests_mock>=1.11.0",
"rich>=13.6.0",
"ruff==0.11.13",
"ruff==0.12.1",
"semver>=3.0.2",
"time-machine>=2.13.0",
"wheel>=0.42.0",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -223,3 +223,6 @@ def test_log_groomer_sidecar_container_setting(self, hook_type="preStop"):
assert lifecycle_hook_params["lifecycle_parsed"] == jmespath.search(
f"spec.template.spec.containers[1].lifecycle.{hook_type}", doc
)


# ruff: noqa: PT028
3 changes: 3 additions & 0 deletions providers/amazon/tests/unit/amazon/aws/hooks/test_eks.py
Original file line number Diff line number Diff line change
Expand Up @@ -1360,3 +1360,6 @@ def assert_is_valid_uri(value: str) -> None:

assert all([result.scheme, result.netloc, result.path])
assert REGION in value


# ruff: noqa: PT028
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def test_unexpected_aws_connection_type(self, conn_type):
warning_message = f"expected connection type 'aws', got '{conn_type}'"
with pytest.warns(UserWarning, match=warning_message):
wrap_conn = AwsConnectionWrapper(conn=mock_connection_factory(conn_type=conn_type))
assert wrap_conn.conn_type == conn_type
assert wrap_conn.conn_type == conn_type

@pytest.mark.parametrize("aws_session_token", [None, "mock-aws-session-token"])
@pytest.mark.parametrize("aws_secret_access_key", ["mock-aws-secret-access-key"])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ def test_cleanup_stuck_queued_tasks(self, mock_fail):
executor.running = {ti.key}
executor.tasks = {ti.key: AsyncResult("231")}
assert executor.has_task(ti)
with pytest.warns(DeprecationWarning):
with pytest.warns(DeprecationWarning, match="cleanup_stuck_queued_tasks"):
executor.cleanup_stuck_queued_tasks(tis=tis)
executor.sync()
assert executor.tasks == {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1250,7 +1250,7 @@ def test_cleanup_stuck_queued_tasks(self, mock_kube_dynamic_client, dag_maker, c
executor.kube_scheduler = mock.MagicMock()
ti.refresh_from_db()
tis = [ti]
with pytest.warns(DeprecationWarning):
with pytest.warns(DeprecationWarning, match="cleanup_stuck_queued_tasks"):
executor.cleanup_stuck_queued_tasks(tis=tis)
executor.kube_scheduler.delete_pod.assert_called_once()
assert executor.running == set()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -573,3 +573,6 @@ def test_get_df(df_type, df_class, description):
assert df.row(1)[0] == result_sets[1][0]

assert isinstance(df, df_class)


# ruff: noqa: PT028
Loading