diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py index 6cfbd0872f483..832f3d92e310a 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/base_aws.py @@ -74,7 +74,7 @@ # If we change to Union syntax then mypy is not happy with UP007 Use `X | Y` for type annotations # The only way to workaround it for now is to keep the union syntax with ignore for mypy # We should try to resolve this later. -BaseAwsConnection = TypeVar("BaseAwsConnection", bound=Union[BaseClient, ServiceResource]) # type: ignore[operator] # noqa: UP007 +BaseAwsConnection = TypeVar("BaseAwsConnection", bound=Union[BaseClient, ServiceResource]) # noqa: UP007 if AIRFLOW_V_3_0_PLUS: @@ -636,7 +636,7 @@ def conn_config(self) -> AwsConnectionWrapper: raise return AwsConnectionWrapper( - conn=connection, # type: ignore[arg-type] + conn=connection, region_name=self._region_name, botocore_config=self._config, verify=self._verify, @@ -718,10 +718,10 @@ def _get_config(self, config: Config | None = None) -> Config: # because the user_agent_extra field is generated at runtime. user_agent_config = Config( user_agent_extra=self._generate_user_agent_extra_field( - existing_user_agent_extra=config.user_agent_extra # type: ignore[union-attr] + existing_user_agent_extra=config.user_agent_extra ) ) - return config.merge(user_agent_config) # type: ignore[union-attr] + return config.merge(user_agent_config) def get_client_type( self, @@ -1050,7 +1050,7 @@ def _list_custom_waiters(self) -> list[str]: return WaiterModel(model_config).waiter_names -class AwsBaseHook(AwsGenericHook[Union[boto3.client, boto3.resource]]): # type: ignore[operator] # noqa: UP007 +class AwsBaseHook(AwsGenericHook[Union[boto3.client, boto3.resource]]): # noqa: UP007 """ Base class for interact with AWS. diff --git a/providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_sql.py b/providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_sql.py index bdd90f1d6baa5..76e71eafb4b7b 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_sql.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/redshift_sql.py @@ -74,7 +74,7 @@ def get_ui_field_behaviour(cls) -> dict: @cached_property def conn(self): - return self.get_connection(self.redshift_conn_id) # type: ignore[attr-defined] + return self.get_connection(self.redshift_conn_id) def _get_conn_params(self) -> dict[str, str | int]: """Retrieve connection parameters.""" diff --git a/providers/amazon/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/providers/amazon/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py index 0961b091a580c..543b0d33ceaea 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py @@ -169,7 +169,7 @@ def read(self, relative_path, ti: RuntimeTI) -> tuple[LogSourceInfo, LogMessages f"Reading remote log from Cloudwatch log_group: {self.log_group} log_stream: {relative_path}" ] try: - logs = [self.get_cloudwatch_logs(relative_path, ti)] # type: ignore[arg-value] + logs = [self.get_cloudwatch_logs(relative_path, ti)] except Exception as e: logs = None messages.append(str(e)) diff --git a/providers/amazon/src/airflow/providers/amazon/aws/notifications/chime.py b/providers/amazon/src/airflow/providers/amazon/aws/notifications/chime.py index 226b6fbb64899..1c2ac5b3ff73c 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/notifications/chime.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/notifications/chime.py @@ -49,7 +49,7 @@ def hook(self): """To reduce overhead cache the hook for the notifier.""" return ChimeWebhookHook(chime_conn_id=self.chime_conn_id) - def notify(self, context: Context) -> None: # type: ignore[override] + def notify(self, context: Context) -> None: """Send a message to a Chime Chat Room.""" self.hook.send_message(message=self.message) diff --git a/providers/amazon/src/airflow/providers/amazon/aws/operators/appflow.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/appflow.py index a8f81b28b033d..0a1d799866bb7 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/operators/appflow.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/appflow.py @@ -468,7 +468,7 @@ def _has_new_records_func(self, **kwargs) -> bool: self.log.info("flow_name: %s", flow_name) af_client = self.hook.conn task_instance = kwargs["task_instance"] - execution_id = task_instance.xcom_pull(task_ids=appflow_task_id, key="execution_id") # type: ignore + execution_id = task_instance.xcom_pull(task_ids=appflow_task_id, key="execution_id") if not execution_id: raise AirflowException(f"No execution_id found from task_id {appflow_task_id}!") self.log.info("execution_id: %s", execution_id) @@ -494,5 +494,5 @@ def _has_new_records_func(self, **kwargs) -> bool: raise AirflowException(f"Flow ({execution_id}) without recordsProcessed info!") records_processed = execution["recordsProcessed"] self.log.info("records_processed: %d", records_processed) - task_instance.xcom_push("records_processed", records_processed) # type: ignore + task_instance.xcom_push("records_processed", records_processed) return records_processed > 0 diff --git a/providers/amazon/src/airflow/providers/amazon/aws/operators/datasync.py b/providers/amazon/src/airflow/providers/amazon/aws/operators/datasync.py index 83a6e15de1330..7b2b7282efca7 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/operators/datasync.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/operators/datasync.py @@ -362,7 +362,7 @@ def _execute_datasync_task(self, context: Context) -> None: aws_domain=DataSyncTaskExecutionLink.get_aws_domain(self.hook.conn_partition), region_name=self.hook.conn_region_name, task_id=self.task_arn.split("/")[-1], - task_execution_id=self.task_execution_arn.split("/")[-1], # type: ignore[union-attr] + task_execution_id=self.task_execution_arn.split("/")[-1], ) DataSyncTaskExecutionLink.persist( context=context, @@ -370,7 +370,7 @@ def _execute_datasync_task(self, context: Context) -> None: region_name=self.hook.conn_region_name, aws_partition=self.hook.conn_partition, task_id=self.task_arn.split("/")[-1], - task_execution_id=self.task_execution_arn.split("/")[-1], # type: ignore[union-attr] + task_execution_id=self.task_execution_arn.split("/")[-1], ) self.log.info("You can view this DataSync task execution at %s", execution_url) diff --git a/providers/amazon/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py b/providers/amazon/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py index fa4966d0a124a..3b090b1ad916c 100644 --- a/providers/amazon/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py +++ b/providers/amazon/src/airflow/providers/amazon/aws/triggers/opensearch_serverless.py @@ -54,7 +54,7 @@ def __init__( serialized_fields={"collection_id": collection_id, "collection_name": collection_name}, waiter_name="collection_available", # waiter_args is a dict[str, Any], allow a possible list of None (it is caught above) - waiter_args={"ids": [collection_id]} if collection_id else {"names": [collection_name]}, # type: ignore[list-item] + waiter_args={"ids": [collection_id]} if collection_id else {"names": [collection_name]}, failure_message="OpenSearch Serverless Collection creation failed.", status_message="Status of OpenSearch Serverless Collection is", status_queries=["status"], diff --git a/providers/amazon/src/airflow/providers/amazon/version_compat.py b/providers/amazon/src/airflow/providers/amazon/version_compat.py index befe89d465737..f2e3fdfc6afc5 100644 --- a/providers/amazon/src/airflow/providers/amazon/version_compat.py +++ b/providers/amazon/src/airflow/providers/amazon/version_compat.py @@ -44,7 +44,7 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]: from airflow.sdk import BaseOperator, BaseOperatorLink, BaseSensorOperator from airflow.sdk.execution_time.xcom import XCom else: - from airflow.models import BaseOperator, XCom # type: ignore[no-redef] + from airflow.models import BaseOperator, XCom from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef] from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef] diff --git a/providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py b/providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py index 71949cb151a5f..3c424c847c9e1 100644 --- a/providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py +++ b/providers/amazon/tests/system/amazon/aws/example_sagemaker_pipeline.py @@ -119,7 +119,7 @@ def delete_pipeline(name: str): chain( # TEST SETUP test_context, - create_pipeline, # type: ignore[arg-type] + create_pipeline, # TEST BODY start_pipeline1, start_pipeline2, diff --git a/providers/amazon/tests/unit/amazon/aws/hooks/test_eks.py b/providers/amazon/tests/unit/amazon/aws/hooks/test_eks.py index 569fe59928d0c..651345485e789 100644 --- a/providers/amazon/tests/unit/amazon/aws/hooks/test_eks.py +++ b/providers/amazon/tests/unit/amazon/aws/hooks/test_eks.py @@ -273,7 +273,7 @@ def test_create_cluster_throws_exception_when_cluster_exists( with pytest.raises(ClientError) as raised_exception: eks_hook.create_cluster( name=generated_test_data.existing_cluster_name, - **dict(ClusterInputs.REQUIRED), # type: ignore + **dict(ClusterInputs.REQUIRED), ) assert_client_error_exception_thrown( @@ -434,7 +434,7 @@ def test_create_nodegroup_throws_exception_when_cluster_not_found(self) -> None: eks_hook.create_nodegroup( clusterName=non_existent_cluster_name, nodegroupName=non_existent_nodegroup_name, - **dict(NodegroupInputs.REQUIRED), # type: ignore + **dict(NodegroupInputs.REQUIRED), ) assert_client_error_exception_thrown( @@ -458,7 +458,7 @@ def test_create_nodegroup_throws_exception_when_nodegroup_already_exists( eks_hook.create_nodegroup( clusterName=generated_test_data.cluster_name, nodegroupName=generated_test_data.existing_nodegroup_name, - **dict(NodegroupInputs.REQUIRED), # type: ignore + **dict(NodegroupInputs.REQUIRED), ) assert_client_error_exception_thrown( @@ -488,7 +488,7 @@ def test_create_nodegroup_throws_exception_when_cluster_not_active( eks_hook.create_nodegroup( clusterName=generated_test_data.cluster_name, nodegroupName=non_existent_nodegroup_name, - **dict(NodegroupInputs.REQUIRED), # type: ignore + **dict(NodegroupInputs.REQUIRED), ) assert_client_error_exception_thrown( @@ -846,7 +846,7 @@ def test_create_fargate_profile_throws_exception_when_cluster_not_found(self) -> eks_hook.create_fargate_profile( clusterName=non_existent_cluster_name, fargateProfileName=non_existent_fargate_profile_name, - **dict(FargateProfileInputs.REQUIRED), # type: ignore + **dict(FargateProfileInputs.REQUIRED), ) assert_client_error_exception_thrown( @@ -867,7 +867,7 @@ def test_create_fargate_profile_throws_exception_when_fargate_profile_already_ex eks_hook.create_fargate_profile( clusterName=generated_test_data.cluster_name, fargateProfileName=generated_test_data.existing_fargate_profile_name, - **dict(FargateProfileInputs.REQUIRED), # type: ignore + **dict(FargateProfileInputs.REQUIRED), ) assert_client_error_exception_thrown( @@ -897,7 +897,7 @@ def test_create_fargate_profile_throws_exception_when_cluster_not_active( eks_hook.create_fargate_profile( clusterName=generated_test_data.cluster_name, fargateProfileName=non_existent_fargate_profile_name, - **dict(FargateProfileInputs.REQUIRED), # type: ignore + **dict(FargateProfileInputs.REQUIRED), ) assert_client_error_exception_thrown( diff --git a/providers/amazon/tests/unit/amazon/aws/operators/test_appflow.py b/providers/amazon/tests/unit/amazon/aws/operators/test_appflow.py index 6b9d3972eb69d..49c49e8c51458 100644 --- a/providers/amazon/tests/unit/amazon/aws/operators/test_appflow.py +++ b/providers/amazon/tests/unit/amazon/aws/operators/test_appflow.py @@ -119,7 +119,7 @@ def test_run(appflow_conn, ctx, waiter_mock): args = DUMP_COMMON_ARGS.copy() args.pop("source") operator = AppflowRunOperator(**args) - operator.execute(ctx) # type: ignore + operator.execute(ctx) appflow_conn.start_flow.assert_called_once_with(flowName=FLOW_NAME) appflow_conn.describe_flow_execution_records.assert_called_once() @@ -127,7 +127,7 @@ def test_run(appflow_conn, ctx, waiter_mock): @pytest.mark.db_test def test_run_full(appflow_conn, ctx, waiter_mock): operator = AppflowRunFullOperator(**DUMP_COMMON_ARGS) - operator.execute(ctx) # type: ignore + operator.execute(ctx) run_assertions_base(appflow_conn, []) @@ -136,7 +136,7 @@ def test_run_after(appflow_conn, ctx, waiter_mock): operator = AppflowRunAfterOperator( source_field="col0", filter_date="2022-05-26T00:00+00:00", **DUMP_COMMON_ARGS ) - operator.execute(ctx) # type: ignore + operator.execute(ctx) run_assertions_base( appflow_conn, [ @@ -155,7 +155,7 @@ def test_run_before(appflow_conn, ctx, waiter_mock): operator = AppflowRunBeforeOperator( source_field="col0", filter_date="2022-05-26T00:00+00:00", **DUMP_COMMON_ARGS ) - operator.execute(ctx) # type: ignore + operator.execute(ctx) run_assertions_base( appflow_conn, [ @@ -174,7 +174,7 @@ def test_run_daily(appflow_conn, ctx, waiter_mock): operator = AppflowRunDailyOperator( source_field="col0", filter_date="2022-05-26T00:00+00:00", **DUMP_COMMON_ARGS ) - operator.execute(ctx) # type: ignore + operator.execute(ctx) run_assertions_base( appflow_conn, [ @@ -202,7 +202,7 @@ def test_short_circuit(appflow_conn, ctx): flow_name=FLOW_NAME, appflow_run_task_id=TASK_ID, ) - operator.execute(ctx) # type: ignore + operator.execute(ctx) appflow_conn.describe_flow_execution_records.assert_called_once_with( flowName=FLOW_NAME, maxResults=100 ) diff --git a/providers/amazon/tests/unit/amazon/aws/operators/test_ecs.py b/providers/amazon/tests/unit/amazon/aws/operators/test_ecs.py index f55d4b5bd74f5..efc6d33630e40 100644 --- a/providers/amazon/tests/unit/amazon/aws/operators/test_ecs.py +++ b/providers/amazon/tests/unit/amazon/aws/operators/test_ecs.py @@ -359,7 +359,7 @@ def test_execute_without_failures( mock_ti = mock.MagicMock() mock_context = {"ti": mock_ti, "task_instance": mock_ti} - self.ecs.execute(mock_context) # type: ignore[arg-type] + self.ecs.execute(mock_context) client_mock.run_task.assert_called_once_with( cluster="c", @@ -394,7 +394,7 @@ def test_execute_with_failures(self, client_mock): mock_context = {"ti": mock_ti, "task_instance": mock_ti} with pytest.raises(EcsOperatorError): - self.ecs.execute(mock_context) # type: ignore[arg-type] + self.ecs.execute(mock_context) client_mock.run_task.assert_called_once_with( cluster="c", @@ -715,7 +715,7 @@ def test_execute_xcom_with_log(self, log_fetcher_mock, client_mock): mock_ti = mock.MagicMock() mock_context = {"ti": mock_ti, "task_instance": mock_ti} - assert self.ecs.execute(mock_context) == "Log output" # type: ignore[arg-type] + assert self.ecs.execute(mock_context) == "Log output" @mock.patch.object(EcsBaseOperator, "client") @mock.patch("airflow.providers.amazon.aws.utils.task_log_fetcher.AwsTaskLogFetcher") @@ -728,7 +728,7 @@ def test_execute_xcom_with_no_log(self, log_fetcher_mock, client_mock): mock_ti = mock.MagicMock() mock_context = {"ti": mock_ti, "task_instance": mock_ti} - assert self.ecs.execute(mock_context) is None # type: ignore[arg-type] + assert self.ecs.execute(mock_context) is None @mock.patch.object(EcsBaseOperator, "client") def test_execute_xcom_with_no_log_fetcher(self, client_mock): @@ -737,7 +737,7 @@ def test_execute_xcom_with_no_log_fetcher(self, client_mock): mock_ti = mock.MagicMock() mock_context = {"ti": mock_ti, "task_instance": mock_ti} - assert self.ecs.execute(mock_context) is None # type: ignore[arg-type] + assert self.ecs.execute(mock_context) is None @mock.patch.object(EcsBaseOperator, "client") @mock.patch.object(AwsTaskLogFetcher, "get_last_log_message", return_value="Log output") @@ -747,7 +747,7 @@ def test_execute_xcom_disabled(self, log_fetcher_mock, client_mock): mock_ti = mock.MagicMock() mock_context = {"ti": mock_ti, "task_instance": mock_ti} - assert self.ecs.execute(mock_context) is None # type: ignore[arg-type] + assert self.ecs.execute(mock_context) is None @mock.patch.object(EcsRunTaskOperator, "client") def test_with_defer(self, client_mock): @@ -759,7 +759,7 @@ def test_with_defer(self, client_mock): mock_context = {"ti": mock_ti, "task_instance": mock_ti} with pytest.raises(TaskDeferred) as deferred: - self.ecs.execute(mock_context) # type: ignore[arg-type] + self.ecs.execute(mock_context) assert isinstance(deferred.value.trigger, TaskDoneTrigger) assert deferred.value.trigger.task_arn == f"arn:aws:ecs:us-east-1:012345678910:task/{TASK_ID}" @@ -772,7 +772,7 @@ def test_execute_complete(self, client_mock): mock_ti = mock.MagicMock() mock_context = {"ti": mock_ti, "task_instance": mock_ti} - self.ecs.execute_complete(mock_context, event) # type: ignore[arg-type] + self.ecs.execute_complete(mock_context, event) # task gets described to assert its success client_mock().describe_tasks.assert_called_once_with(cluster="test_cluster", tasks=["my_arn"]) diff --git a/providers/amazon/tests/unit/amazon/aws/operators/test_eks.py b/providers/amazon/tests/unit/amazon/aws/operators/test_eks.py index 66cda6292ccef..875401955de6e 100644 --- a/providers/amazon/tests/unit/amazon/aws/operators/test_eks.py +++ b/providers/amazon/tests/unit/amazon/aws/operators/test_eks.py @@ -386,7 +386,7 @@ def test_template_fields(self): class TestEksCreateFargateProfileOperator: def setup_method(self) -> None: - self.create_fargate_profile_params = CreateFargateProfileParams( # type: ignore + self.create_fargate_profile_params = CreateFargateProfileParams( cluster_name=CLUSTER_NAME, pod_execution_role_arn=POD_EXECUTION_ROLE_ARN[1], selectors=SELECTORS[1], diff --git a/providers/amazon/tests/unit/amazon/aws/triggers/test_glue.py b/providers/amazon/tests/unit/amazon/aws/triggers/test_glue.py index d9b8ccaa452c9..4fa6f526835bc 100644 --- a/providers/amazon/tests/unit/amazon/aws/triggers/test_glue.py +++ b/providers/amazon/tests/unit/amazon/aws/triggers/test_glue.py @@ -54,7 +54,7 @@ async def test_wait_job(self, mock_async_conn, mock_get_waiter): waiter_delay=10, ) generator = trigger.run() - event = await generator.asend(None) # type:ignore[attr-defined] + event = await generator.asend(None) assert_expected_waiter_type(mock_get_waiter, "job_complete") mock_get_waiter().wait.assert_called_once() @@ -87,7 +87,7 @@ async def test_wait_job_failed(self, mock_async_conn, mock_get_waiter): generator = trigger.run() with pytest.raises(AirflowException): - await generator.asend(None) # type:ignore[attr-defined] + await generator.asend(None) assert_expected_waiter_type(mock_get_waiter, "job_complete") def test_serialization(self): diff --git a/providers/amazon/tests/unit/amazon/aws/utils/eks_test_utils.py b/providers/amazon/tests/unit/amazon/aws/utils/eks_test_utils.py index dbf589572796c..5f93fc61e74da 100644 --- a/providers/amazon/tests/unit/amazon/aws/utils/eks_test_utils.py +++ b/providers/amazon/tests/unit/amazon/aws/utils/eks_test_utils.py @@ -58,7 +58,7 @@ def attributes_to_test( :param nodegroup_name: The name of the nodegroup under test if applicable. :return: Returns a list of tuples containing the keys and values to be validated in testing. """ - result: list[tuple] = deepcopy(inputs.REQUIRED + inputs.OPTIONAL + [STATUS]) # type: ignore + result: list[tuple] = deepcopy(inputs.REQUIRED + inputs.OPTIONAL + [STATUS]) if inputs == ClusterInputs: result += [(ClusterAttributes.NAME, cluster_name)] elif inputs == FargateProfileInputs: @@ -178,7 +178,7 @@ def _input_builder(options: InputTypes, minimal: bool) -> dict: :param minimal: If True, only the required values are generated; if False all values are generated. :return: Returns a dict containing the keys and values to be validated in testing. """ - values: list[tuple] = deepcopy(options.REQUIRED) # type: ignore + values: list[tuple] = deepcopy(options.REQUIRED) if not minimal: values.extend(deepcopy(options.OPTIONAL)) return dict(values) # type: ignore