diff --git a/airflow-core/src/airflow/cli/commands/db_command.py b/airflow-core/src/airflow/cli/commands/db_command.py index a0a02ade87b24..ea3241320fdf8 100644 --- a/airflow-core/src/airflow/cli/commands/db_command.py +++ b/airflow-core/src/airflow/cli/commands/db_command.py @@ -81,8 +81,7 @@ def _get_version_revision(version: str, revision_heads_map: dict[str, str] | Non if current < wanted: return head - else: - return None + return None def run_db_migrate_command(args, command, revision_heads_map: dict[str, str]): diff --git a/airflow-core/src/airflow/configuration.py b/airflow-core/src/airflow/configuration.py index 042d6c84834bc..18b376ccba520 100644 --- a/airflow-core/src/airflow/configuration.py +++ b/airflow-core/src/airflow/configuration.py @@ -1717,8 +1717,7 @@ def _deprecated_value_is_set_in_config( deprecated_section_array = config.items(section=deprecated_section, raw=True) if any(key == deprecated_key for key, _ in deprecated_section_array): return True - else: - return False + return False @staticmethod def _deprecated_variable_is_set(deprecated_section: str, deprecated_key: str) -> bool: diff --git a/airflow-core/src/airflow/settings.py b/airflow-core/src/airflow/settings.py index 5a87384487641..9a54060ea5cfe 100644 --- a/airflow-core/src/airflow/settings.py +++ b/airflow-core/src/airflow/settings.py @@ -19,7 +19,7 @@ import atexit import functools -import json +import json as json_lib import logging import os import sys @@ -122,7 +122,7 @@ AsyncSession: Callable[..., SAAsyncSession] # The JSON library to use for DAG Serialization and De-Serialization -json = json +json = json_lib # Display alerts on the dashboard # Useful for warning about setup issues or announcing changes to end users diff --git a/dev/breeze/src/airflow_breeze/utils/packages.py b/dev/breeze/src/airflow_breeze/utils/packages.py index 340831d6b6ef5..1cd6ec9fc7750 100644 --- a/dev/breeze/src/airflow_breeze/utils/packages.py +++ b/dev/breeze/src/airflow_breeze/utils/packages.py @@ -185,8 +185,7 @@ def get_provider_id_from_path(file_path: Path) -> str | None: for providers_root_candidate in parent.parents: if providers_root_candidate.name == "providers": return parent.relative_to(providers_root_candidate).as_posix().replace("/", ".") - else: - return None + return None return None diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py index fcf7f4c547ceb..28426270d46f6 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py @@ -386,8 +386,7 @@ def poll_job_status(self, job_id: str, match_status: list[str]) -> bool: ) if job_status in match_status: return True - else: - raise AirflowException(f"AWS Batch job ({job_id}) status checks exceed max_retries") + raise AirflowException(f"AWS Batch job ({job_id}) status checks exceed max_retries") def get_job_description(self, job_id: str) -> dict: """ @@ -426,10 +425,9 @@ def get_job_description(self, job_id: str) -> dict: "check Amazon Provider AWS Connection documentation for more details.", str(err), ) - else: - raise AirflowException( - f"AWS Batch job ({job_id}) description error: exceeded status_retries ({self.status_retries})" - ) + raise AirflowException( + f"AWS Batch job ({job_id}) description error: exceeded status_retries ({self.status_retries})" + ) @staticmethod def parse_job_description(job_id: str, response: dict) -> dict: diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/datasync.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/datasync.py index aca571fa62c11..8074978bbb1a1 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/datasync.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/datasync.py @@ -319,5 +319,4 @@ def wait_for_task_execution(self, task_execution_arn: str, max_iterations: int = else: raise AirflowException(f"Unknown status: {status}") # Should never happen time.sleep(self.wait_interval_seconds) - else: - raise AirflowTaskTimeout("Max iterations exceeded!") + raise AirflowTaskTimeout("Max iterations exceeded!") diff --git a/providers/common/sql/tests/unit/common/sql/operators/test_sql.py b/providers/common/sql/tests/unit/common/sql/operators/test_sql.py index a5c1d3819f06b..1524464ffacfc 100644 --- a/providers/common/sql/tests/unit/common/sql/operators/test_sql.py +++ b/providers/common/sql/tests/unit/common/sql/operators/test_sql.py @@ -362,7 +362,7 @@ def test_fail_all_checks_check(self, monkeypatch): ("X", "min", -1), ("X", "max", 20), ] - operator = operator = self._construct_operator(monkeypatch, self.valid_column_mapping, records) + operator = self._construct_operator(monkeypatch, self.valid_column_mapping, records) with pytest.raises(AirflowException): operator.execute(context=MagicMock()) diff --git a/providers/elasticsearch/tests/unit/elasticsearch/log/elasticmock/__init__.py b/providers/elasticsearch/tests/unit/elasticsearch/log/elasticmock/__init__.py index 1393e42e7753a..a612e16630ff8 100644 --- a/providers/elasticsearch/tests/unit/elasticsearch/log/elasticmock/__init__.py +++ b/providers/elasticsearch/tests/unit/elasticsearch/log/elasticmock/__init__.py @@ -83,8 +83,7 @@ def _normalize_hosts(hosts): h["url_prefix"] = parsed_url.path out.append(h) - else: - out.append(host) + out.append(host) return out diff --git a/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py b/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py index 5fec3a31b8ddd..35d7579fd05b3 100644 --- a/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py +++ b/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py @@ -1802,15 +1802,14 @@ def _prepare_query_configuration( "must be a dict with {'projectId':'', " "'datasetId':'', 'tableId':''}" ) - else: - configuration["query"].update( - { - "allowLargeResults": allow_large_results, - "flattenResults": flatten_results, - "writeDisposition": write_disposition, - "createDisposition": create_disposition, - } - ) + configuration["query"].update( + { + "allowLargeResults": allow_large_results, + "flattenResults": flatten_results, + "writeDisposition": write_disposition, + "createDisposition": create_disposition, + } + ) if ( "useLegacySql" in configuration["query"] diff --git a/providers/google/src/airflow/providers/google/cloud/hooks/gcs.py b/providers/google/src/airflow/providers/google/cloud/hooks/gcs.py index a063b39b1a35f..42dee568eb9e1 100644 --- a/providers/google/src/airflow/providers/google/cloud/hooks/gcs.py +++ b/providers/google/src/airflow/providers/google/cloud/hooks/gcs.py @@ -371,8 +371,7 @@ def download( num_max_attempts, ) raise - else: - raise NotImplementedError # should not reach this, but makes mypy happy + raise NotImplementedError # should not reach this, but makes mypy happy def download_as_byte_array( self, diff --git a/providers/jenkins/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py b/providers/jenkins/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py index 3f9ccddc0ab40..fe399df721f91 100644 --- a/providers/jenkins/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py +++ b/providers/jenkins/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py @@ -179,11 +179,9 @@ def poll_job_in_queue(self, location: str, jenkins_server: Jenkins) -> int: build_number = json_response["executable"]["number"] self.log.info("Job executed on Jenkins side with the build number %s", build_number) return build_number - else: - raise AirflowException( - f"The job hasn't been executed after polling the queue " - f"{self.max_try_before_job_appears} times" - ) + raise AirflowException( + f"The job hasn't been executed after polling the queue {self.max_try_before_job_appears} times" + ) @cached_property def hook(self) -> JenkinsHook: diff --git a/pyproject.toml b/pyproject.toml index d0fba5df9a5bd..538f08db153ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -587,6 +587,9 @@ extend-select = [ "TID25", # flake8-tidy-imports rules "E", # pycodestyle rules "W", # pycodestyle rules + # Warning (PLW) re-implemented in ruff from Pylint + "PLW0120", # else clause on loop without a break statement; remove the else and dedent its contents + "PLW0127", # Self-assignment of variable # Per rule enables "RUF006", # Checks for asyncio dangling task "RUF015", # Checks for unnecessary iterable allocation for first element diff --git a/scripts/ci/prek/common_prek_utils.py b/scripts/ci/prek/common_prek_utils.py index 03c22391187fb..1477c35dc9eba 100644 --- a/scripts/ci/prek/common_prek_utils.py +++ b/scripts/ci/prek/common_prek_utils.py @@ -317,8 +317,7 @@ def get_provider_id_from_path(file_path: Path) -> str | None: for providers_root_candidate in parent.parents: if providers_root_candidate.name == "providers": return parent.relative_to(providers_root_candidate).as_posix().replace("/", ".") - else: - return None + return None return None diff --git a/scripts/ci/prek/update_providers_dependencies.py b/scripts/ci/prek/update_providers_dependencies.py index ea4a39e8f7bf5..1fa0363780774 100755 --- a/scripts/ci/prek/update_providers_dependencies.py +++ b/scripts/ci/prek/update_providers_dependencies.py @@ -145,8 +145,7 @@ def get_provider_id_from_path(file_path: Path) -> str | None: for providers_root_candidate in parent.parents: if providers_root_candidate.name == "providers": return parent.relative_to(providers_root_candidate).as_posix().replace("/", ".") - else: - return None + return None return None diff --git a/scripts/in_container/in_container_utils.py b/scripts/in_container/in_container_utils.py index 2083f00c537eb..b6f5bd4d95f5a 100644 --- a/scripts/in_container/in_container_utils.py +++ b/scripts/in_container/in_container_utils.py @@ -122,8 +122,7 @@ def get_provider_id_from_path(file_path: Path) -> str | None: for providers_root_candidate in parent.parents: if providers_root_candidate.name == "providers": return parent.relative_to(providers_root_candidate).as_posix().replace("/", ".") - else: - return None + return None return None