diff --git a/airflow-core/src/airflow/api_fastapi/auth/tokens.py b/airflow-core/src/airflow/api_fastapi/auth/tokens.py index f80b7513d8ee8..f653532e29fb1 100644 --- a/airflow-core/src/airflow/api_fastapi/auth/tokens.py +++ b/airflow-core/src/airflow/api_fastapi/auth/tokens.py @@ -226,7 +226,7 @@ def _conf_factory(section, key, **kwargs): def factory() -> str: from airflow.configuration import conf - return conf.get(section, key, **kwargs, suppress_warnings=True) # type: ignore[return-value] + return conf.get(section, key, **kwargs, suppress_warnings=True) return factory @@ -538,7 +538,7 @@ def get_signing_key(section: str, key: str, make_secret_key_if_needed: bool = Tr raise ValueError(f"The value {section}/{key} must be set!") # Mypy can't grock the `if not secret_key` - return secret_key # type: ignore[return-value] + return secret_key def get_signing_args(make_secret_key_if_needed: bool = True) -> dict[str, Any]: diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_versions.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_versions.py index 09a18f8a4fc12..a32e315bbee29 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_versions.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dag_versions.py @@ -37,7 +37,7 @@ class DagVersionResponse(BaseModel): dag_display_name: str = Field(validation_alias=AliasPath("dag_model", "dag_display_name")) # Mypy issue https://github.com/python/mypy/issues/1362 - @computed_field # type: ignore[misc] + @computed_field # type: ignore[prop-decorator] @property def bundle_url(self) -> str | None: if self.bundle_name: diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dags.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dags.py index 499b66c8e98be..9b06657220e83 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dags.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/dags.py @@ -105,7 +105,7 @@ def get_timetable_summary(cls, tts: str | None) -> str | None: return str(tts) # Mypy issue https://github.com/python/mypy/issues/1362 - @computed_field # type: ignore[misc] + @computed_field # type: ignore[prop-decorator] @property def file_token(self) -> str: """Return file token.""" @@ -185,14 +185,14 @@ def get_params(cls, params: abc.MutableMapping | None) -> dict | None: return {k: v.dump() for k, v in params.items()} # Mypy issue https://github.com/python/mypy/issues/1362 - @computed_field # type: ignore[misc] + @computed_field # type: ignore[prop-decorator] @property def concurrency(self) -> int: """Return max_active_tasks as concurrency.""" return self.max_active_tasks # Mypy issue https://github.com/python/mypy/issues/1362 - @computed_field # type: ignore[misc] + @computed_field # type: ignore[prop-decorator] @property def latest_dag_version(self) -> DagVersionResponse | None: """Return the latest DagVersion.""" diff --git a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/tasks.py b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/tasks.py index 13a9af7043680..d2e8e285fb3ce 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/datamodels/tasks.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/datamodels/tasks.py @@ -100,7 +100,7 @@ def get_params(cls, params: abc.MutableMapping | None) -> dict | None: return {param_name: param_val.dump() for param_name, param_val in params.items()} # Mypy issue https://github.com/python/mypy/issues/1362 - @computed_field # type: ignore[misc] + @computed_field # type: ignore[prop-decorator] @property def extra_links(self) -> list[str]: """Extract and return extra_links.""" diff --git a/airflow-core/src/airflow/api_fastapi/core_api/services/public/common.py b/airflow-core/src/airflow/api_fastapi/core_api/services/public/common.py index dc1bd65e95fc0..94e1157d1c0c0 100644 --- a/airflow-core/src/airflow/api_fastapi/core_api/services/public/common.py +++ b/airflow-core/src/airflow/api_fastapi/core_api/services/public/common.py @@ -50,11 +50,11 @@ def handle_request(self) -> BulkResponse: results[action.action.value] = BulkActionResponse() if action.action == BulkAction.CREATE: - self.handle_bulk_create(action, results[action.action.value]) # type: ignore + self.handle_bulk_create(action, results[action.action.value]) elif action.action == BulkAction.UPDATE: - self.handle_bulk_update(action, results[action.action.value]) # type: ignore + self.handle_bulk_update(action, results[action.action.value]) elif action.action == BulkAction.DELETE: - self.handle_bulk_delete(action, results[action.action.value]) # type: ignore + self.handle_bulk_delete(action, results[action.action.value]) return BulkResponse(**results) diff --git a/airflow-core/src/airflow/api_fastapi/execution_api/app.py b/airflow-core/src/airflow/api_fastapi/execution_api/app.py index 691853f322a1f..7f7fb87c6f3b9 100644 --- a/airflow-core/src/airflow/api_fastapi/execution_api/app.py +++ b/airflow-core/src/airflow/api_fastapi/execution_api/app.py @@ -100,7 +100,7 @@ class CadwynWithOpenAPICustomization(Cadwyn): # Workaround lack of customzation https://github.com/zmievsa/cadwyn/issues/255 async def openapi_jsons(self, req: Request) -> JSONResponse: resp = await super().openapi_jsons(req) - open_apischema = json.loads(resp.body) # type: ignore[arg-type] + open_apischema = json.loads(resp.body) open_apischema = self.customize_openapi(open_apischema) resp.body = resp.render(open_apischema) diff --git a/airflow-core/src/airflow/configuration.py b/airflow-core/src/airflow/configuration.py index 2fbaa798fd7df..7d381f6fc3728 100644 --- a/airflow-core/src/airflow/configuration.py +++ b/airflow-core/src/airflow/configuration.py @@ -961,10 +961,10 @@ def get_mandatory_list_value(self, section: str, key: str, **kwargs) -> list[str @overload # type: ignore[override] def get(self, section: str, key: str, fallback: str = ..., **kwargs) -> str: ... - @overload # type: ignore[override] + @overload def get(self, section: str, key: str, **kwargs) -> str | None: ... - def get( # type: ignore[override,misc] + def get( # type: ignore[misc] self, section: str, key: str, @@ -2102,7 +2102,7 @@ def load_standard_airflow_configuration(airflow_config_parser: AirflowConfigPars ) else: # there - AIRFLOW_HOME = airflow_config_parser.get("core", "airflow_home") # type: ignore[assignment] + AIRFLOW_HOME = airflow_config_parser.get("core", "airflow_home") warnings.warn(msg, category=DeprecationWarning, stacklevel=1) diff --git a/airflow-core/src/airflow/dag_processing/manager.py b/airflow-core/src/airflow/dag_processing/manager.py index 32cb267705f8b..21ebffeb628b2 100644 --- a/airflow-core/src/airflow/dag_processing/manager.py +++ b/airflow-core/src/airflow/dag_processing/manager.py @@ -881,7 +881,7 @@ def client(self) -> Client: client = Client(base_url=None, token="", dry_run=True, transport=self._api_server.transport) # Mypy is wrong -- the setter accepts a string on the property setter! `URLType = URL | str` - client.base_url = "http://in-process.invalid./" # type: ignore[assignment] + client.base_url = "http://in-process.invalid./" return client def _create_process(self, dag_file: DagFileInfo) -> DagFileProcessorProcess: @@ -1113,7 +1113,7 @@ def reload_configuration_for_dag_processing(): # iterating on https://github.com/apache/airflow/pull/19860 # The issue that describes the problem and possible remediation is # at https://github.com/apache/airflow/issues/19934 - importlib.reload(import_module(airflow.settings.LOGGING_CLASS_PATH.rsplit(".", 1)[0])) # type: ignore + importlib.reload(import_module(airflow.settings.LOGGING_CLASS_PATH.rsplit(".", 1)[0])) importlib.reload(airflow.settings) airflow.settings.initialize() del os.environ["CONFIG_PROCESSOR_MANAGER_LOGGER"] diff --git a/airflow-core/src/airflow/dag_processing/processor.py b/airflow-core/src/airflow/dag_processing/processor.py index 80ab38b041069..7103f3bb86c52 100644 --- a/airflow-core/src/airflow/dag_processing/processor.py +++ b/airflow-core/src/airflow/dag_processing/processor.py @@ -219,7 +219,7 @@ def _execute_dag_callbacks(dagbag: DagBag, request: DagCallbackRequest, log: Fil callbacks = callbacks if isinstance(callbacks, list) else [callbacks] # TODO:We need a proper context object! - context: Context = { # type: ignore[assignment] + context: Context = { "dag": dag, "run_id": request.run_id, "reason": request.msg, @@ -357,7 +357,7 @@ def _on_child_started( ) self.send_msg(msg, request_id=0) - def _handle_request(self, msg: ToManager, log: FilteringBoundLogger, req_id: int) -> None: # type: ignore[override] + def _handle_request(self, msg: ToManager, log: FilteringBoundLogger, req_id: int) -> None: from airflow.sdk.api.datamodels._generated import ConnectionResponse, VariableResponse resp: BaseModel | None = None diff --git a/airflow-core/src/airflow/executors/local_executor.py b/airflow-core/src/airflow/executors/local_executor.py index eabb921136978..6ab00cced3238 100644 --- a/airflow-core/src/airflow/executors/local_executor.py +++ b/airflow-core/src/airflow/executors/local_executor.py @@ -162,7 +162,7 @@ def start(self) -> None: # Mypy sees this value as `SynchronizedBase[c_uint]`, but that isn't the right runtime type behaviour # (it looks like an int to python) - self._unread_messages = multiprocessing.Value(ctypes.c_uint) # type: ignore[assignment] + self._unread_messages = multiprocessing.Value(ctypes.c_uint) def _check_workers(self): # Reap any dead workers diff --git a/airflow-core/src/airflow/jobs/scheduler_job_runner.py b/airflow-core/src/airflow/jobs/scheduler_job_runner.py index da269af423a66..4eb99616133f7 100644 --- a/airflow-core/src/airflow/jobs/scheduler_job_runner.py +++ b/airflow-core/src/airflow/jobs/scheduler_job_runner.py @@ -1001,7 +1001,7 @@ def _execute(self) -> int | None: self._run_scheduler_loop() - settings.Session.remove() # type: ignore + settings.Session.remove() except Exception: self.log.exception("Exception when executing SchedulerJob._run_scheduler_loop") raise diff --git a/airflow-core/src/airflow/jobs/triggerer_job_runner.py b/airflow-core/src/airflow/jobs/triggerer_job_runner.py index 07966655cfe99..19241a8c54847 100644 --- a/airflow-core/src/airflow/jobs/triggerer_job_runner.py +++ b/airflow-core/src/airflow/jobs/triggerer_job_runner.py @@ -371,10 +371,10 @@ def client(self) -> Client: client = Client(base_url=None, token="", dry_run=True, transport=in_process_api_server().transport) # Mypy is wrong -- the setter accepts a string on the property setter! `URLType = URL | str` - client.base_url = "http://in-process.invalid./" # type: ignore[assignment] + client.base_url = "http://in-process.invalid./" return client - def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger, req_id: int) -> None: # type: ignore[override] + def _handle_request(self, msg: ToTriggerSupervisor, log: FilteringBoundLogger, req_id: int) -> None: from airflow.sdk.api.datamodels._generated import ( ConnectionResponse, TaskStatesResponse, diff --git a/airflow-core/src/airflow/models/dag.py b/airflow-core/src/airflow/models/dag.py index b50957e667cdb..4e1eac0ff7c8a 100644 --- a/airflow-core/src/airflow/models/dag.py +++ b/airflow-core/src/airflow/models/dag.py @@ -1644,7 +1644,7 @@ def bulk_write_to_db( log.info("Sync %s DAGs", len(dags)) dag_op = DagModelOperation( bundle_name=bundle_name, bundle_version=bundle_version, dags={d.dag_id: d for d in dags} - ) # type: ignore[misc] + ) orm_dags = dag_op.add_dags(session=session) dag_op.update_dags(orm_dags, session=session) diff --git a/airflow-core/src/airflow/models/dagrun.py b/airflow-core/src/airflow/models/dagrun.py index a9f174875e623..fe6fdfb813612 100644 --- a/airflow-core/src/airflow/models/dagrun.py +++ b/airflow-core/src/airflow/models/dagrun.py @@ -1347,7 +1347,7 @@ def notify_dagrun_state_changed(self, msg: str = ""): def handle_dag_callback(self, dag: SDKDAG, success: bool = True, reason: str = "success"): """Only needed for `dag.test` where `execute_callbacks=True` is passed to `update_state`.""" - context: Context = { # type: ignore[assignment] + context: Context = { "dag": dag, "run_id": str(self.run_id), "reason": reason, diff --git a/airflow-core/src/airflow/models/mappedoperator.py b/airflow-core/src/airflow/models/mappedoperator.py index 8612064264cc1..ec3ec96a218b9 100644 --- a/airflow-core/src/airflow/models/mappedoperator.py +++ b/airflow-core/src/airflow/models/mappedoperator.py @@ -58,7 +58,7 @@ getstate_setstate=False, repr=False, ) -class MappedOperator(TaskSDKMappedOperator): # type: ignore[misc] # It complains about weight_rule being different +class MappedOperator(TaskSDKMappedOperator): """Object representing a mapped operator in a DAG.""" deps: frozenset[BaseTIDep] = attrs.field(init=False, default=DEFAULT_OPERATOR_DEPS) @@ -145,7 +145,7 @@ def get_extra_links(self, ti: TaskInstance, name: str) -> str | None: link = self.operator_extra_link_dict.get(name) or self.global_operator_extra_link_dict.get(name) if not link: return None - return link.get_link(self, ti_key=ti.key) # type: ignore[arg-type] + return link.get_link(self, ti_key=ti.key) @functools.singledispatch diff --git a/airflow-core/src/airflow/models/pool.py b/airflow-core/src/airflow/models/pool.py index 7fc99740461a6..e022910b5a82a 100644 --- a/airflow-core/src/airflow/models/pool.py +++ b/airflow-core/src/airflow/models/pool.py @@ -177,7 +177,7 @@ def slots_stats( pool_rows = session.execute(query) for pool_name, total_slots, include_deferred in pool_rows: if total_slots == -1: - total_slots = float("inf") # type: ignore + total_slots = float("inf") pools[pool_name] = PoolStats( total=total_slots, running=0, queued=0, open=0, deferred=0, scheduled=0 ) diff --git a/airflow-core/src/airflow/models/serialized_dag.py b/airflow-core/src/airflow/models/serialized_dag.py index a05c4e6520415..ade8dca8761d4 100644 --- a/airflow-core/src/airflow/models/serialized_dag.py +++ b/airflow-core/src/airflow/models/serialized_dag.py @@ -295,13 +295,13 @@ class SerializedDagModel(Base): dag_runs = relationship( DagRun, - primaryjoin=dag_id == foreign(DagRun.dag_id), # type: ignore + primaryjoin=dag_id == foreign(DagRun.dag_id), backref=backref("serialized_dag", uselist=False, innerjoin=True), ) dag_model = relationship( DagModel, - primaryjoin=dag_id == DagModel.dag_id, # type: ignore + primaryjoin=dag_id == DagModel.dag_id, # type: ignore[has-type] foreign_keys=dag_id, uselist=False, innerjoin=True, diff --git a/airflow-core/src/airflow/models/taskinstance.py b/airflow-core/src/airflow/models/taskinstance.py index 1e22967429d23..a2af30a24d4ec 100644 --- a/airflow-core/src/airflow/models/taskinstance.py +++ b/airflow-core/src/airflow/models/taskinstance.py @@ -682,7 +682,7 @@ def from_runtime_ti(cls, runtime_ti: RuntimeTaskInstanceProtocol) -> TaskInstanc runtime_ti.map_index = -1 ti = TaskInstance( run_id=runtime_ti.run_id, - task=runtime_ti.task, # type: ignore[arg-type] + task=runtime_ti.task, map_index=runtime_ti.map_index, dag_version_id=runtime_ti.dag_version_id, ) @@ -846,7 +846,7 @@ def refresh_from_task( self.pool_slots = task.pool_slots with contextlib.suppress(Exception): # This method is called from the different places, and sometimes the TI is not fully initialized - self.priority_weight = self.task.weight_rule.get_weight(self) # type: ignore[arg-type] + self.priority_weight = self.task.weight_rule.get_weight(self) self.run_as_user = task.run_as_user # Do not set max_tries to task.retries here because max_tries is a cumulative # value that needs to be stored in the db. @@ -1264,7 +1264,7 @@ def _check_and_change_state_before_execution( # Closing all pooled connections to prevent # "max number of connections reached" - settings.engine.dispose() # type: ignore + settings.engine.dispose() if verbose: if mark_success: cls.logger().info("Marking success for %s on %s", ti.task, ti.logical_date) @@ -1777,7 +1777,7 @@ def handle_failure( if test_mode is None: test_mode = self.test_mode failure_context = TaskInstance.fetch_handle_failure_context( - ti=self, # type: ignore[arg-type] + ti=self, error=error, test_mode=test_mode, session=session, @@ -1985,7 +1985,7 @@ def render_templates( # able to access the unmapped task instead. original_task.render_template_fields(context, jinja_env) if isinstance(self.task, MappedOperator): - self.task = context["ti"].task # type: ignore[assignment] + self.task = context["ti"].task return original_task diff --git a/airflow-core/src/airflow/providers_manager.py b/airflow-core/src/airflow/providers_manager.py index 9a4ba30397ab7..0c99707a32d45 100644 --- a/airflow-core/src/airflow/providers_manager.py +++ b/airflow-core/src/airflow/providers_manager.py @@ -396,7 +396,7 @@ def __init__(self): self._asset_uri_handlers: dict[str, Callable[[SplitResult], SplitResult]] = {} self._asset_factories: dict[str, Callable[..., Asset]] = {} self._asset_to_openlineage_converters: dict[str, Callable] = {} - self._taskflow_decorators: dict[str, Callable] = LazyDictWithCache() # type: ignore[assignment] + self._taskflow_decorators: dict[str, Callable] = LazyDictWithCache() # keeps mapping between connection_types and hook class, package they come from self._hook_provider_dict: dict[str, HookClassProvider] = {} self._dialect_provider_dict: dict[str, DialectInfo] = {} @@ -447,7 +447,7 @@ def _init_airflow_core_hooks(self): connection_type=None, package_name="apache-airflow-providers-standard", hook_class_name=class_name, - provider_info=None, # type: ignore[argument] + provider_info=None, ) @provider_info_cache("list") diff --git a/airflow-core/src/airflow/serialization/serialized_objects.py b/airflow-core/src/airflow/serialization/serialized_objects.py index 0160ea5bf28eb..8c75249ac9829 100644 --- a/airflow-core/src/airflow/serialization/serialized_objects.py +++ b/airflow-core/src/airflow/serialization/serialized_objects.py @@ -164,7 +164,7 @@ def encode_relativedelta(var: relativedelta.relativedelta) -> dict[str, Any]: def decode_relativedelta(var: dict[str, Any]) -> relativedelta.relativedelta: """Dencode a relativedelta object.""" if "weekday" in var: - var["weekday"] = relativedelta.weekday(*var["weekday"]) # type: ignore + var["weekday"] = relativedelta.weekday(*var["weekday"]) return relativedelta.relativedelta(**var) @@ -399,14 +399,14 @@ def encode_outlet_event_accessors(var: OutletEventAccessors) -> dict[str, Any]: "__type": DAT.ASSET_EVENT_ACCESSORS, "_dict": [ {"key": BaseSerialization.serialize(k), "value": encode_outlet_event_accessor(v)} - for k, v in var._dict.items() # type: ignore[attr-defined] + for k, v in var._dict.items() ], } def decode_outlet_event_accessors(var: dict[str, Any]) -> OutletEventAccessors: - d = OutletEventAccessors() # type: ignore[assignment] - d._dict = { # type: ignore[attr-defined] + d = OutletEventAccessors() + d._dict = { BaseSerialization.deserialize(row["key"]): decode_outlet_event_accessor(row["value"]) for row in var["_dict"] } @@ -1524,7 +1524,7 @@ def populate_operator( if v is False: raise RuntimeError("_is_sensor=False should never have been serialized!") - object.__setattr__(op, "deps", op.deps | {ReadyToRescheduleDep()}) # type: ignore[union-attr] + object.__setattr__(op, "deps", op.deps | {ReadyToRescheduleDep()}) continue elif ( k in cls._decorated_fields diff --git a/airflow-core/src/airflow/serialization/serializers/timezone.py b/airflow-core/src/airflow/serialization/serializers/timezone.py index b719bb9f1ce9b..c4d1c30142c6e 100644 --- a/airflow-core/src/airflow/serialization/serializers/timezone.py +++ b/airflow-core/src/airflow/serialization/serializers/timezone.py @@ -97,6 +97,6 @@ def _get_tzinfo_name(tzinfo: datetime.tzinfo | None) -> str | None: return tzinfo.name if hasattr(tzinfo, "zone"): # pytz timezone - return tzinfo.zone # type: ignore[no-any-return] + return tzinfo.zone return None diff --git a/airflow-core/src/airflow/settings.py b/airflow-core/src/airflow/settings.py index ca8de146775e2..b2b7bc36cdcc4 100644 --- a/airflow-core/src/airflow/settings.py +++ b/airflow-core/src/airflow/settings.py @@ -165,7 +165,7 @@ def replace_showwarning(replacement): original_show_warning = replace_showwarning(custom_show_warning) atexit.register(functools.partial(replace_showwarning, original_show_warning)) -POLICY_PLUGIN_MANAGER: Any = None # type: ignore +POLICY_PLUGIN_MANAGER: Any = None def task_policy(task): @@ -417,7 +417,7 @@ def prepare_engine_args(disable_connection_pool=False, pool_class=None): default_args = default.copy() break - engine_args: dict = conf.getjson("database", "sql_alchemy_engine_args", fallback=default_args) # type: ignore + engine_args: dict = conf.getjson("database", "sql_alchemy_engine_args", fallback=default_args) if pool_class: # Don't use separate settings for size etc, only those from sql_alchemy_engine_args diff --git a/airflow-core/src/airflow/task/priority_strategy.py b/airflow-core/src/airflow/task/priority_strategy.py index dcef1c865b6e4..f9c4cce15f8d8 100644 --- a/airflow-core/src/airflow/task/priority_strategy.py +++ b/airflow-core/src/airflow/task/priority_strategy.py @@ -52,7 +52,7 @@ def deserialize(cls, data: dict[str, Any]) -> PriorityWeightStrategy: was returned by ``serialize`` during DAG serialization. The default implementation constructs the priority weight strategy without any arguments. """ - return cls(**data) # type: ignore[call-arg] + return cls(**data) def serialize(self) -> dict[str, Any]: """ diff --git a/airflow-core/src/airflow/utils/deprecation_tools.py b/airflow-core/src/airflow/utils/deprecation_tools.py index e501a758690d5..f5562adfa6c70 100644 --- a/airflow-core/src/airflow/utils/deprecation_tools.py +++ b/airflow-core/src/airflow/utils/deprecation_tools.py @@ -178,7 +178,7 @@ def add_deprecated_classes( override_deprecated_classes_for_module = {} # Mypy is not able to derive the right function signature https://github.com/python/mypy/issues/2427 - module_type.__getattr__ = functools.partial( # type: ignore[assignment] + module_type.__getattr__ = functools.partial( # type: ignore[method-assign] getattr_with_deprecation, imports, full_module_name, diff --git a/airflow-core/src/airflow/utils/entry_points.py b/airflow-core/src/airflow/utils/entry_points.py index 55e7dc789dacb..11b0ff2e7b68e 100644 --- a/airflow-core/src/airflow/utils/entry_points.py +++ b/airflow-core/src/airflow/utils/entry_points.py @@ -25,7 +25,7 @@ if sys.version_info >= (3, 12): from importlib import metadata else: - import importlib_metadata as metadata # type: ignore[no-redef] + import importlib_metadata as metadata log = logging.getLogger(__name__) diff --git a/airflow-core/src/airflow/utils/hashlib_wrapper.py b/airflow-core/src/airflow/utils/hashlib_wrapper.py index 0fdad844be0af..03c250d471fac 100644 --- a/airflow-core/src/airflow/utils/hashlib_wrapper.py +++ b/airflow-core/src/airflow/utils/hashlib_wrapper.py @@ -31,4 +31,4 @@ def md5(string: ReadableBuffer = b"", /) -> hashlib._Hash: :param string: The data to hash. Default to empty str byte. :return: The hashed value. """ - return hashlib.md5(string, usedforsecurity=False) # type: ignore + return hashlib.md5(string, usedforsecurity=False) diff --git a/airflow-core/src/airflow/utils/log/file_task_handler.py b/airflow-core/src/airflow/utils/log/file_task_handler.py index 24580d768ff3a..6f69d3c54718c 100644 --- a/airflow-core/src/airflow/utils/log/file_task_handler.py +++ b/airflow-core/src/airflow/utils/log/file_task_handler.py @@ -739,11 +739,7 @@ def read( try_number = task_instance.try_number if try_number == 0 and task_instance.state == TaskInstanceState.SKIPPED: - logs = [ - StructuredLogMessage( # type: ignore[call-arg] - event="Task was skipped, no logs available." - ) - ] + logs = [StructuredLogMessage(event="Task was skipped, no logs available.")] return chain(logs), {"end_of_log": True} if try_number is None or try_number < 1: diff --git a/airflow-core/src/airflow/utils/log/logging_mixin.py b/airflow-core/src/airflow/utils/log/logging_mixin.py index 1b3e24442dfa1..6bc6efcdd7fcb 100644 --- a/airflow-core/src/airflow/utils/log/logging_mixin.py +++ b/airflow-core/src/airflow/utils/log/logging_mixin.py @@ -153,7 +153,7 @@ def supports_external_link(self) -> bool: # base implementation for IO-implementing classes, it's impossible to make them work with # IO generics (and apparently it has not even been intended) # See more: https://giters.com/python/typeshed/issues/6077 -class StreamLogWriter(TextIOBase, IO[str]): # type: ignore[misc] +class StreamLogWriter(TextIOBase, IO[str]): """ Allows to redirect stdout and stderr to logger. diff --git a/airflow-core/src/airflow/utils/timezone.py b/airflow-core/src/airflow/utils/timezone.py index df2567e03db79..3e59169598aab 100644 --- a/airflow-core/src/airflow/utils/timezone.py +++ b/airflow-core/src/airflow/utils/timezone.py @@ -284,7 +284,7 @@ def parse_timezone(name: str | int) -> FixedTimezone | Timezone: """ if _PENDULUM3: # This only presented in pendulum 3 and code do not reached into the pendulum 2 - return pendulum.timezone(name) # type: ignore[operator] + return pendulum.timezone(name) # In pendulum 2 this refers to the function, in pendulum 3 refers to the module return pendulum.tz.timezone(name) # type: ignore[operator] diff --git a/airflow-core/src/airflow/utils/yaml.py b/airflow-core/src/airflow/utils/yaml.py index e1cc4d3400578..2bc5911d10de8 100644 --- a/airflow-core/src/airflow/utils/yaml.py +++ b/airflow-core/src/airflow/utils/yaml.py @@ -44,7 +44,7 @@ def safe_load(stream: bytes | str | BinaryIO | TextIO) -> Any: try: from yaml import CSafeLoader as SafeLoader except ImportError: - from yaml import SafeLoader # type: ignore[assignment, no-redef] + from yaml import SafeLoader # type: ignore[assignment] return orig(stream, SafeLoader) @@ -57,7 +57,7 @@ def dump(data: Any, **kwargs) -> str: try: from yaml import CSafeDumper as SafeDumper except ImportError: - from yaml import SafeDumper # type: ignore[assignment, no-redef] + from yaml import SafeDumper # type: ignore[assignment] return cast("str", orig(data, Dumper=SafeDumper, **kwargs)) diff --git a/airflow-core/tests/integration/__init__.py b/airflow-core/tests/integration/__init__.py index e8fd22856438c..5966d6b1d5261 100644 --- a/airflow-core/tests/integration/__init__.py +++ b/airflow-core/tests/integration/__init__.py @@ -14,4 +14,4 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/airflow-core/tests/unit/charts/helm_template_generator.py b/airflow-core/tests/unit/charts/helm_template_generator.py index 7a12acc864758..954ac98d92e54 100644 --- a/airflow-core/tests/unit/charts/helm_template_generator.py +++ b/airflow-core/tests/unit/charts/helm_template_generator.py @@ -146,7 +146,7 @@ def render_chart( raise HelmFailedError(result.returncode, result.args, result.stdout, result.stderr) templates = result.stdout k8s_objects = yaml.full_load_all(templates) - k8s_objects = [k8s_object for k8s_object in k8s_objects if k8s_object] # type: ignore + k8s_objects = [k8s_object for k8s_object in k8s_objects if k8s_object] for k8s_object in k8s_objects: validate_k8s_object(k8s_object, kubernetes_version) return k8s_objects diff --git a/airflow-core/tests/unit/dag_processing/test_processor.py b/airflow-core/tests/unit/dag_processing/test_processor.py index b71e48837d488..66edfb26f6655 100644 --- a/airflow-core/tests/unit/dag_processing/test_processor.py +++ b/airflow-core/tests/unit/dag_processing/test_processor.py @@ -81,7 +81,7 @@ def inprocess_client(): """Provides an in-process Client backed by a single API server.""" api = InProcessExecutionAPI() client = Client(base_url=None, token="", dry_run=True, transport=api.transport) - client.base_url = "http://in-process.invalid/" # type: ignore[assignment] + client.base_url = "http://in-process.invalid/" return client diff --git a/airflow-core/tests/unit/jobs/test_scheduler_job.py b/airflow-core/tests/unit/jobs/test_scheduler_job.py index 0085c20508d68..f9269226e9cdc 100644 --- a/airflow-core/tests/unit/jobs/test_scheduler_job.py +++ b/airflow-core/tests/unit/jobs/test_scheduler_job.py @@ -6120,7 +6120,7 @@ def watch_set_state(dr: DagRun, state, **kwargs): if state in (DagRunState.SUCCESS, DagRunState.FAILED): # Stop the scheduler self.job_runner.num_runs = 1 # type: ignore[union-attr] - orig_set_state(dr, state, **kwargs) # type: ignore[call-arg] + orig_set_state(dr, state, **kwargs) def watch_heartbeat(*args, **kwargs): if len(num_queued_tis) < 3 or len(num_finished_events) < 3: diff --git a/airflow-core/tests/unit/models/test_dag.py b/airflow-core/tests/unit/models/test_dag.py index 322f23ef067bb..1feb7ebd2eb58 100644 --- a/airflow-core/tests/unit/models/test_dag.py +++ b/airflow-core/tests/unit/models/test_dag.py @@ -157,7 +157,7 @@ def _create_dagrun( data_interval = DataInterval(*map(timezone.coerce_datetime, data_interval)) run_id = dag.timetable.generate_run_id( run_type=run_type, - run_after=logical_date or data_interval.end, # type: ignore + run_after=logical_date or data_interval.end, data_interval=data_interval, ) return dag.create_dagrun( @@ -1622,7 +1622,7 @@ def test_clear_dag( ) as dag: EmptyOperator(task_id=task_id) - session = settings.Session() # type: ignore + session = settings.Session() dagrun_1 = dag_maker.create_dagrun( run_id="backfill", run_type=DagRunType.BACKFILL_JOB, diff --git a/airflow-core/tests/unit/models/test_dagrun.py b/airflow-core/tests/unit/models/test_dagrun.py index 7387aca91f04d..ab9f0c17dda39 100644 --- a/airflow-core/tests/unit/models/test_dagrun.py +++ b/airflow-core/tests/unit/models/test_dagrun.py @@ -336,7 +336,7 @@ def test_dagrun_deadlock(self, dag_maker, session): assert dr.state == DagRunState.RUNNING ti_op2.set_state(state=None, session=session) - ti_op2.task.trigger_rule = "invalid" # type: ignore + ti_op2.task.trigger_rule = "invalid" dr.update_state(session=session) assert dr.state == DagRunState.FAILED diff --git a/airflow-core/tests/unit/serialization/test_serialized_objects.py b/airflow-core/tests/unit/serialization/test_serialized_objects.py index 1177d093a9a51..402c169cc08b2 100644 --- a/airflow-core/tests/unit/serialization/test_serialized_objects.py +++ b/airflow-core/tests/unit/serialization/test_serialized_objects.py @@ -227,9 +227,8 @@ def equal_exception(a: AirflowException, b: AirflowException) -> bool: def equal_outlet_event_accessors(a: OutletEventAccessors, b: OutletEventAccessors) -> bool: - return a._dict.keys() == b._dict.keys() and all( # type: ignore[attr-defined] - equal_outlet_event_accessor(a._dict[key], b._dict[key]) # type: ignore[attr-defined] - for key in a._dict # type: ignore[attr-defined] + return a._dict.keys() == b._dict.keys() and all( + equal_outlet_event_accessor(a._dict[key], b._dict[key]) for key in a._dict ) diff --git a/airflow-core/tests/unit/utils/test_dot_renderer.py b/airflow-core/tests/unit/utils/test_dot_renderer.py index 26826f8d50140..d3ba7acaab796 100644 --- a/airflow-core/tests/unit/utils/test_dot_renderer.py +++ b/airflow-core/tests/unit/utils/test_dot_renderer.py @@ -35,7 +35,7 @@ try: from airflow.providers.standard.operators.python import PythonOperator except ImportError: - from airflow.operators.python import PythonOperator # type: ignore[no-redef,attr-defined] + from airflow.operators.python import PythonOperator # type: ignore[no-redef] START_DATE = timezone.utcnow()