From 09eff8e024069a2b5e6a88a3882e90c5ad220fee Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Sun, 22 Jun 2025 12:59:53 +0200 Subject: [PATCH] Remove pytest.mark.db_test: airbyte and amazon providers where possible After #51930, we can now remove "pytest.mark.db_test" in all providers that used db only to create connections. This PR is the "trial" attempt of doing so and it verifies that it is a viable thing to do. It: * removes all pytest.mark.db_test markers from airbyte * removes / reshuffles many pytest.mark.db_tests in amazon * adds pre-commit that we can use to guard that no new pytest.mark.db_test markers will be added in providers that we already cleaned up (for example airbyte) This PR, when merged will be followed up with an issue where we will ask contributors to apply the same approach to all the remaining providers - where I will describe in detail the process of removing the markers. This is part of the #42632 which has the long-term target of making all the provider tests non-db tests and simplifying our test setup. --- .pre-commit-config.yaml | 10 ++++++++++ contributing-docs/08_static_code_checks.rst | 2 ++ dev/breeze/doc/images/output_static-checks.svg | 4 ++-- dev/breeze/doc/images/output_static-checks.txt | 2 +- dev/breeze/src/airflow_breeze/pre_commit_ids.py | 1 + .../airbyte/tests/unit/airbyte/hooks/test_airbyte.py | 4 ---- .../tests/unit/airbyte/operators/test_airbyte.py | 2 -- .../airbyte/tests/unit/airbyte/sensors/test_airbyte.py | 1 - .../tests/unit/airbyte/triggers/test_airbyte.py | 1 - .../unit/amazon/aws/executors/ecs/test_ecs_executor.py | 2 -- .../tests/unit/amazon/aws/hooks/test_base_aws.py | 2 -- .../amazon/tests/unit/amazon/aws/hooks/test_chime.py | 2 -- .../amazon/tests/unit/amazon/aws/hooks/test_emr.py | 1 - .../amazon/tests/unit/amazon/aws/hooks/test_s3.py | 2 -- .../amazon/aws/hooks/test_sagemaker_unified_studio.py | 4 ++-- .../amazon/tests/unit/amazon/aws/links/test_athena.py | 4 ++++ .../tests/unit/amazon/aws/links/test_base_aws.py | 1 - .../amazon/tests/unit/amazon/aws/links/test_batch.py | 4 ++++ .../tests/unit/amazon/aws/links/test_comprehend.py | 4 ++++ .../tests/unit/amazon/aws/links/test_datasync.py | 5 +++++ .../amazon/tests/unit/amazon/aws/links/test_ec2.py | 4 ++++ .../amazon/tests/unit/amazon/aws/links/test_emr.py | 3 +++ .../amazon/tests/unit/amazon/aws/links/test_glue.py | 4 ++++ .../amazon/tests/unit/amazon/aws/links/test_logs.py | 4 ++++ .../tests/unit/amazon/aws/links/test_sagemaker.py | 4 ++++ .../amazon/aws/links/test_sagemaker_unified_studio.py | 4 ++++ .../tests/unit/amazon/aws/links/test_step_function.py | 2 ++ .../tests/unit/amazon/aws/sensors/test_dynamodb.py | 3 --- .../unit/amazon/aws/transfers/test_google_api_to_s3.py | 1 - .../unit/amazon/aws/triggers/test_serialization.py | 2 -- .../tests/unit/amazon/aws/utils/test_suppress.py | 1 - .../amazon/tests/unit/amazon/aws/waiters/test_eks.py | 2 ++ 32 files changed, 62 insertions(+), 30 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 995e255cf363d..1ae406eb2fd2c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -570,6 +570,16 @@ repos: entry: "pydevd.*settrace\\(" pass_filenames: true files: \.py$ + - id: check-pytest-mark-db-test-in-providers + language: pygrep + name: Check pytest.mark.db_test use in providers + entry: pytest\.mark\.db_test + pass_filenames: true + # Here we should add providers that are already free from the pytest.mark.db_test + # and we want to keep them clean and only use non-db-tests + files: > + (?x) + ^providers/airbyte/.*\.py$ - id: check-links-to-example-dags-do-not-use-hardcoded-versions name: Verify no hard-coded version in example dags description: The links to example dags should use |version| as version specification diff --git a/contributing-docs/08_static_code_checks.rst b/contributing-docs/08_static_code_checks.rst index d1b8a7aa4ee62..494fe6a08aa82 100644 --- a/contributing-docs/08_static_code_checks.rst +++ b/contributing-docs/08_static_code_checks.rst @@ -221,6 +221,8 @@ require Breeze Docker image to be built locally. +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-pydevd-left-in-code | Check for pydevd debug statements accidentally left | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ +| check-pytest-mark-db-test-in-providers | Check pytest.mark.db_test use in providers | | ++-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-revision-heads-map | Check that the REVISION_HEADS_MAP is up-to-date | | +-----------------------------------------------------------+--------------------------------------------------------+---------+ | check-safe-filter-usage-in-html | Don't use safe in templates | | diff --git a/dev/breeze/doc/images/output_static-checks.svg b/dev/breeze/doc/images/output_static-checks.svg index beaf498a9e529..cf51d1ab8d4d8 100644 --- a/dev/breeze/doc/images/output_static-checks.svg +++ b/dev/breeze/doc/images/output_static-checks.svg @@ -355,8 +355,8 @@ check-pre-commit-information-consistent | check-provide-create-sessions-imports | check-provider-docs-valid | check-provider-yaml-valid |                           check-providers-subpackages-init-file-exist | check-pydevd-left-in-code |         -check-revision-heads-map | check-safe-filter-usage-in-html |                      -check-significant-newsfragments-are-valid |                                       +check-pytest-mark-db-test-in-providers | check-revision-heads-map |               +check-safe-filter-usage-in-html | check-significant-newsfragments-are-valid |     check-sql-dependency-common-data-structure |                                      check-start-date-not-used-in-defaults | check-system-tests-present |              check-system-tests-tocs | check-taskinstance-tis-attrs |                          diff --git a/dev/breeze/doc/images/output_static-checks.txt b/dev/breeze/doc/images/output_static-checks.txt index a62f2b66d5aa2..0282b0f74e16f 100644 --- a/dev/breeze/doc/images/output_static-checks.txt +++ b/dev/breeze/doc/images/output_static-checks.txt @@ -1 +1 @@ -f31561bb0408a8cab278aad420d260b3 +bb6a22bca8cde603acddfad0f073b3c0 diff --git a/dev/breeze/src/airflow_breeze/pre_commit_ids.py b/dev/breeze/src/airflow_breeze/pre_commit_ids.py index de0227170d7fd..ce4f0ec27fe57 100644 --- a/dev/breeze/src/airflow_breeze/pre_commit_ids.py +++ b/dev/breeze/src/airflow_breeze/pre_commit_ids.py @@ -74,6 +74,7 @@ "check-provider-yaml-valid", "check-providers-subpackages-init-file-exist", "check-pydevd-left-in-code", + "check-pytest-mark-db-test-in-providers", "check-revision-heads-map", "check-safe-filter-usage-in-html", "check-significant-newsfragments-are-valid", diff --git a/providers/airbyte/tests/unit/airbyte/hooks/test_airbyte.py b/providers/airbyte/tests/unit/airbyte/hooks/test_airbyte.py index 1672a373bc8b7..6f19b49c51ec9 100644 --- a/providers/airbyte/tests/unit/airbyte/hooks/test_airbyte.py +++ b/providers/airbyte/tests/unit/airbyte/hooks/test_airbyte.py @@ -27,11 +27,7 @@ from airflow.models import Connection from airflow.providers.airbyte.hooks.airbyte import AirbyteHook -# those tests will not work with database isolation because they mock requests -pytestmark = pytest.mark.db_test - -@pytest.mark.db_test class TestAirbyteHook: """ Test all functions from Airbyte Hook diff --git a/providers/airbyte/tests/unit/airbyte/operators/test_airbyte.py b/providers/airbyte/tests/unit/airbyte/operators/test_airbyte.py index b7d6e9f6bdb35..0e9fc54c6b883 100644 --- a/providers/airbyte/tests/unit/airbyte/operators/test_airbyte.py +++ b/providers/airbyte/tests/unit/airbyte/operators/test_airbyte.py @@ -19,14 +19,12 @@ from unittest import mock -import pytest from airbyte_api.models import JobCreateRequest, JobResponse, JobStatusEnum, JobTypeEnum from airflow.models import Connection from airflow.providers.airbyte.operators.airbyte import AirbyteTriggerSyncOperator -@pytest.mark.db_test class TestAirbyteTriggerSyncOp: """ Test execute function from Airbyte Operator diff --git a/providers/airbyte/tests/unit/airbyte/sensors/test_airbyte.py b/providers/airbyte/tests/unit/airbyte/sensors/test_airbyte.py index bae1d7c2bc9a2..abec7b447f8ba 100644 --- a/providers/airbyte/tests/unit/airbyte/sensors/test_airbyte.py +++ b/providers/airbyte/tests/unit/airbyte/sensors/test_airbyte.py @@ -27,7 +27,6 @@ from airflow.providers.airbyte.sensors.airbyte import AirbyteJobSensor -@pytest.mark.db_test class TestAirbyteJobSensor: task_id = "task-id" airbyte_conn_id = "airbyte-conn-test" diff --git a/providers/airbyte/tests/unit/airbyte/triggers/test_airbyte.py b/providers/airbyte/tests/unit/airbyte/triggers/test_airbyte.py index 995da3fc9108e..55985e971db1c 100644 --- a/providers/airbyte/tests/unit/airbyte/triggers/test_airbyte.py +++ b/providers/airbyte/tests/unit/airbyte/triggers/test_airbyte.py @@ -29,7 +29,6 @@ from airflow.triggers.base import TriggerEvent -@pytest.mark.db_test class TestAirbyteSyncTrigger: DAG_ID = "airbyte_sync_run" TASK_ID = "airbyte_sync_run_task_op" diff --git a/providers/amazon/tests/unit/amazon/aws/executors/ecs/test_ecs_executor.py b/providers/amazon/tests/unit/amazon/aws/executors/ecs/test_ecs_executor.py index a29cb883a7b42..ff7b9699c98a0 100644 --- a/providers/amazon/tests/unit/amazon/aws/executors/ecs/test_ecs_executor.py +++ b/providers/amazon/tests/unit/amazon/aws/executors/ecs/test_ecs_executor.py @@ -61,8 +61,6 @@ from tests_common.test_utils.config import conf_vars from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS -pytestmark = pytest.mark.db_test - airflow_version = VersionInfo(*map(int, airflow_version_str.split(".")[:3])) ARN1 = "arn1" diff --git a/providers/amazon/tests/unit/amazon/aws/hooks/test_base_aws.py b/providers/amazon/tests/unit/amazon/aws/hooks/test_base_aws.py index 3970a766ade64..5aa436903ce9c 100644 --- a/providers/amazon/tests/unit/amazon/aws/hooks/test_base_aws.py +++ b/providers/amazon/tests/unit/amazon/aws/hooks/test_base_aws.py @@ -883,7 +883,6 @@ def test_hook_connection_test(self): assert result assert hook.client_type == "s3" # Same client_type which defined during initialisation - @pytest.mark.db_test @mock.patch("boto3.session.Session") def test_hook_connection_test_failed(self, mock_boto3_session): """Test ``test_connection`` failure.""" @@ -1135,7 +1134,6 @@ def test_waiter_config_params_not_provided(waiter_path_mock: MagicMock, caplog): assert "PARAM_1" in str(ae.value) -@pytest.mark.db_test @mock.patch.object(AwsGenericHook, "waiter_path", new_callable=PropertyMock) def test_waiter_config_no_params_needed(waiter_path_mock: MagicMock, caplog): waiter_path_mock.return_value = TEST_WAITER_CONFIG_LOCATION diff --git a/providers/amazon/tests/unit/amazon/aws/hooks/test_chime.py b/providers/amazon/tests/unit/amazon/aws/hooks/test_chime.py index 83ccf2055182a..a44140d21b88c 100644 --- a/providers/amazon/tests/unit/amazon/aws/hooks/test_chime.py +++ b/providers/amazon/tests/unit/amazon/aws/hooks/test_chime.py @@ -25,8 +25,6 @@ from airflow.models import Connection from airflow.providers.amazon.aws.hooks.chime import ChimeWebhookHook -pytestmark = pytest.mark.db_test - class TestChimeWebhookHook: _config = { diff --git a/providers/amazon/tests/unit/amazon/aws/hooks/test_emr.py b/providers/amazon/tests/unit/amazon/aws/hooks/test_emr.py index aaa572cc5e4f2..63668a227c04b 100644 --- a/providers/amazon/tests/unit/amazon/aws/hooks/test_emr.py +++ b/providers/amazon/tests/unit/amazon/aws/hooks/test_emr.py @@ -154,7 +154,6 @@ def test_add_job_flow_steps_raises_exception_on_failure(self, mock_conn, mock_sl assert "test failure details" in caplog.messages[-1] mock_conn.get_waiter.assert_called_with("step_complete") - @pytest.mark.db_test @mock_aws def test_create_job_flow_extra_args(self): """ diff --git a/providers/amazon/tests/unit/amazon/aws/hooks/test_s3.py b/providers/amazon/tests/unit/amazon/aws/hooks/test_s3.py index 2d69d2c539c6c..31b3a625a3971 100644 --- a/providers/amazon/tests/unit/amazon/aws/hooks/test_s3.py +++ b/providers/amazon/tests/unit/amazon/aws/hooks/test_s3.py @@ -1272,7 +1272,6 @@ def test_delete_bucket_if_bucket_not_exist(self, s3_bucket): assert mock_hook.delete_bucket(bucket_name="not-exists-bucket-name", force_delete=True) assert ctx.value.response["Error"]["Code"] == "NoSuchBucket" - @pytest.mark.db_test def test_provide_bucket_name(self): with mock.patch.object( S3Hook, @@ -1728,7 +1727,6 @@ def test_delete_bucket_tagging_with_no_tags(self): hook.get_bucket_tagging(bucket_name="new_bucket") -@pytest.mark.db_test @pytest.mark.parametrize( "key_kind, has_conn, has_bucket, precedence, expected", [ diff --git a/providers/amazon/tests/unit/amazon/aws/hooks/test_sagemaker_unified_studio.py b/providers/amazon/tests/unit/amazon/aws/hooks/test_sagemaker_unified_studio.py index 179d997740c58..4d8c7f1995b03 100644 --- a/providers/amazon/tests/unit/amazon/aws/hooks/test_sagemaker_unified_studio.py +++ b/providers/amazon/tests/unit/amazon/aws/hooks/test_sagemaker_unified_studio.py @@ -28,8 +28,6 @@ ) from airflow.utils.session import create_session -pytestmark = pytest.mark.db_test - class TestSageMakerNotebookHook: @pytest.fixture(autouse=True) @@ -174,6 +172,7 @@ def test_handle_unexpected_state(self): with pytest.raises(AirflowException, match=error_message): self.hook._handle_state(execution_id, status, error_message) + @pytest.mark.db_test @patch( "airflow.providers.amazon.aws.hooks.sagemaker_unified_studio.SageMakerNotebookHook._set_xcom_files" ) @@ -187,6 +186,7 @@ def test_set_xcom_files_negative_missing_context(self): with pytest.raises(AirflowException, match="context is required"): self.hook._set_xcom_files(self.files, {}) + @pytest.mark.db_test @patch( "airflow.providers.amazon.aws.hooks.sagemaker_unified_studio.SageMakerNotebookHook._set_xcom_s3_path" ) diff --git a/providers/amazon/tests/unit/amazon/aws/links/test_athena.py b/providers/amazon/tests/unit/amazon/aws/links/test_athena.py index 99a3536d17838..e1d9ba41b5f40 100644 --- a/providers/amazon/tests/unit/amazon/aws/links/test_athena.py +++ b/providers/amazon/tests/unit/amazon/aws/links/test_athena.py @@ -16,6 +16,8 @@ # under the License. from __future__ import annotations +import pytest + from airflow.providers.amazon.aws.links.athena import AthenaQueryResultsLink from airflow.providers.amazon.version_compat import AIRFLOW_V_3_0_PLUS @@ -24,6 +26,8 @@ if AIRFLOW_V_3_0_PLUS: from airflow.sdk.execution_time.comms import XComResult +pytestmark = pytest.mark.db_test + class TestAthenaQueryResultsLink(BaseAwsLinksTestCase): link_class = AthenaQueryResultsLink diff --git a/providers/amazon/tests/unit/amazon/aws/links/test_base_aws.py b/providers/amazon/tests/unit/amazon/aws/links/test_base_aws.py index 98ebcde8c92da..6155f1747bff6 100644 --- a/providers/amazon/tests/unit/amazon/aws/links/test_base_aws.py +++ b/providers/amazon/tests/unit/amazon/aws/links/test_base_aws.py @@ -132,7 +132,6 @@ class OperatorAndTi(NamedTuple): task_instance: TaskInstance -@pytest.mark.db_test @pytest.mark.need_serialized_dag class BaseAwsLinksTestCase: """Base class for AWS Provider links tests.""" diff --git a/providers/amazon/tests/unit/amazon/aws/links/test_batch.py b/providers/amazon/tests/unit/amazon/aws/links/test_batch.py index 70cd65655bfec..38e6d573d1373 100644 --- a/providers/amazon/tests/unit/amazon/aws/links/test_batch.py +++ b/providers/amazon/tests/unit/amazon/aws/links/test_batch.py @@ -16,6 +16,8 @@ # under the License. from __future__ import annotations +import pytest + from airflow.providers.amazon.aws.links.batch import ( BatchJobDefinitionLink, BatchJobDetailsLink, @@ -28,6 +30,8 @@ if AIRFLOW_V_3_0_PLUS: from airflow.sdk.execution_time.comms import XComResult +pytestmark = pytest.mark.db_test + class TestBatchJobDefinitionLink(BaseAwsLinksTestCase): link_class = BatchJobDefinitionLink diff --git a/providers/amazon/tests/unit/amazon/aws/links/test_comprehend.py b/providers/amazon/tests/unit/amazon/aws/links/test_comprehend.py index 9b88270b5bd30..c945f672949cd 100644 --- a/providers/amazon/tests/unit/amazon/aws/links/test_comprehend.py +++ b/providers/amazon/tests/unit/amazon/aws/links/test_comprehend.py @@ -27,6 +27,10 @@ if AIRFLOW_V_3_0_PLUS: from airflow.sdk.execution_time.comms import XComResult +import pytest + +pytestmark = pytest.mark.db_test + class TestComprehendPiiEntitiesDetectionLink(BaseAwsLinksTestCase): link_class = ComprehendPiiEntitiesDetectionLink diff --git a/providers/amazon/tests/unit/amazon/aws/links/test_datasync.py b/providers/amazon/tests/unit/amazon/aws/links/test_datasync.py index 79c8469b701f7..983de3b2299bd 100644 --- a/providers/amazon/tests/unit/amazon/aws/links/test_datasync.py +++ b/providers/amazon/tests/unit/amazon/aws/links/test_datasync.py @@ -24,6 +24,11 @@ if AIRFLOW_V_3_0_PLUS: from airflow.sdk.execution_time.comms import XComResult +import pytest + +pytestmark = pytest.mark.db_test + + TASK_ID = "task-0b36221bf94ad2bdd" EXECUTION_ID = "exec-00000000000000004" diff --git a/providers/amazon/tests/unit/amazon/aws/links/test_ec2.py b/providers/amazon/tests/unit/amazon/aws/links/test_ec2.py index f451c910058cf..8d5c745230274 100644 --- a/providers/amazon/tests/unit/amazon/aws/links/test_ec2.py +++ b/providers/amazon/tests/unit/amazon/aws/links/test_ec2.py @@ -24,6 +24,10 @@ if AIRFLOW_V_3_0_PLUS: from airflow.sdk.execution_time.comms import XComResult +import pytest + +pytestmark = pytest.mark.db_test + class TestEC2InstanceLink(BaseAwsLinksTestCase): link_class = EC2InstanceLink diff --git a/providers/amazon/tests/unit/amazon/aws/links/test_emr.py b/providers/amazon/tests/unit/amazon/aws/links/test_emr.py index feda067f7cc7a..fc121d4fff062 100644 --- a/providers/amazon/tests/unit/amazon/aws/links/test_emr.py +++ b/providers/amazon/tests/unit/amazon/aws/links/test_emr.py @@ -36,6 +36,9 @@ from unit.amazon.aws.links.test_base_aws import BaseAwsLinksTestCase +pytestmark = pytest.mark.db_test + + if AIRFLOW_V_3_0_PLUS: from airflow.sdk.execution_time.comms import XComResult diff --git a/providers/amazon/tests/unit/amazon/aws/links/test_glue.py b/providers/amazon/tests/unit/amazon/aws/links/test_glue.py index 2b1f076e149df..bf49f23ae122a 100644 --- a/providers/amazon/tests/unit/amazon/aws/links/test_glue.py +++ b/providers/amazon/tests/unit/amazon/aws/links/test_glue.py @@ -24,6 +24,10 @@ if AIRFLOW_V_3_0_PLUS: from airflow.sdk.execution_time.comms import XComResult +import pytest + +pytestmark = pytest.mark.db_test + class TestGlueJobRunDetailsLink(BaseAwsLinksTestCase): link_class = GlueJobRunDetailsLink diff --git a/providers/amazon/tests/unit/amazon/aws/links/test_logs.py b/providers/amazon/tests/unit/amazon/aws/links/test_logs.py index 2c90eecd232ad..bb7a3277d3994 100644 --- a/providers/amazon/tests/unit/amazon/aws/links/test_logs.py +++ b/providers/amazon/tests/unit/amazon/aws/links/test_logs.py @@ -24,6 +24,10 @@ if AIRFLOW_V_3_0_PLUS: from airflow.sdk.execution_time.comms import XComResult +import pytest + +pytestmark = pytest.mark.db_test + class TestCloudWatchEventsLink(BaseAwsLinksTestCase): link_class = CloudWatchEventsLink diff --git a/providers/amazon/tests/unit/amazon/aws/links/test_sagemaker.py b/providers/amazon/tests/unit/amazon/aws/links/test_sagemaker.py index f08d7df93d509..7d79d931206ce 100644 --- a/providers/amazon/tests/unit/amazon/aws/links/test_sagemaker.py +++ b/providers/amazon/tests/unit/amazon/aws/links/test_sagemaker.py @@ -25,6 +25,10 @@ if AIRFLOW_V_3_0_PLUS: from airflow.sdk.execution_time.comms import XComResult +import pytest + +pytestmark = pytest.mark.db_test + class TestSageMakerTransformDetailsLink(BaseAwsLinksTestCase): link_class = SageMakerTransformJobLink diff --git a/providers/amazon/tests/unit/amazon/aws/links/test_sagemaker_unified_studio.py b/providers/amazon/tests/unit/amazon/aws/links/test_sagemaker_unified_studio.py index bb749727323e2..487e116e0c795 100644 --- a/providers/amazon/tests/unit/amazon/aws/links/test_sagemaker_unified_studio.py +++ b/providers/amazon/tests/unit/amazon/aws/links/test_sagemaker_unified_studio.py @@ -24,6 +24,10 @@ if AIRFLOW_V_3_0_PLUS: from airflow.sdk.execution_time.comms import XComResult +import pytest + +pytestmark = pytest.mark.db_test + class TestSageMakerUnifiedStudioLink(BaseAwsLinksTestCase): link_class = SageMakerUnifiedStudioLink diff --git a/providers/amazon/tests/unit/amazon/aws/links/test_step_function.py b/providers/amazon/tests/unit/amazon/aws/links/test_step_function.py index acfad7e98e96c..6526a69011baf 100644 --- a/providers/amazon/tests/unit/amazon/aws/links/test_step_function.py +++ b/providers/amazon/tests/unit/amazon/aws/links/test_step_function.py @@ -29,6 +29,8 @@ if AIRFLOW_V_3_0_PLUS: from airflow.sdk.execution_time.comms import XComResult +pytestmark = pytest.mark.db_test + class TestStateMachineDetailsLink(BaseAwsLinksTestCase): link_class = StateMachineDetailsLink diff --git a/providers/amazon/tests/unit/amazon/aws/sensors/test_dynamodb.py b/providers/amazon/tests/unit/amazon/aws/sensors/test_dynamodb.py index 93ca01d26275d..f34acf3aac400 100644 --- a/providers/amazon/tests/unit/amazon/aws/sensors/test_dynamodb.py +++ b/providers/amazon/tests/unit/amazon/aws/sensors/test_dynamodb.py @@ -17,14 +17,11 @@ from __future__ import annotations -import pytest from moto import mock_aws from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook from airflow.providers.amazon.aws.sensors.dynamodb import DynamoDBValueSensor -pytestmark = pytest.mark.db_test - class TestDynamoDBValueSensor: def setup_method(self): diff --git a/providers/amazon/tests/unit/amazon/aws/transfers/test_google_api_to_s3.py b/providers/amazon/tests/unit/amazon/aws/transfers/test_google_api_to_s3.py index e8f97104f392f..52a9937344b74 100644 --- a/providers/amazon/tests/unit/amazon/aws/transfers/test_google_api_to_s3.py +++ b/providers/amazon/tests/unit/amazon/aws/transfers/test_google_api_to_s3.py @@ -26,7 +26,6 @@ from airflow.providers.amazon.aws.transfers.google_api_to_s3 import GoogleApiToS3Operator # This test mocks json.dumps so it won't work for database isolation mode -pytestmark = pytest.mark.db_test class TestGoogleApiToS3: diff --git a/providers/amazon/tests/unit/amazon/aws/triggers/test_serialization.py b/providers/amazon/tests/unit/amazon/aws/triggers/test_serialization.py index 446a799c482ad..8b5149ed343b6 100644 --- a/providers/amazon/tests/unit/amazon/aws/triggers/test_serialization.py +++ b/providers/amazon/tests/unit/amazon/aws/triggers/test_serialization.py @@ -64,8 +64,6 @@ from airflow.providers.amazon.aws.utils.rds import RdsDbType from airflow.serialization.serialized_objects import BaseSerialization -pytestmark = pytest.mark.db_test - BATCH_JOB_ID = "job_id" TEST_CLUSTER_IDENTIFIER = "test-cluster" diff --git a/providers/amazon/tests/unit/amazon/aws/utils/test_suppress.py b/providers/amazon/tests/unit/amazon/aws/utils/test_suppress.py index 7323527f65559..744df42b6e6bb 100644 --- a/providers/amazon/tests/unit/amazon/aws/utils/test_suppress.py +++ b/providers/amazon/tests/unit/amazon/aws/utils/test_suppress.py @@ -22,7 +22,6 @@ from airflow.providers.amazon.aws.utils.suppress import return_on_error -@pytest.mark.db_test def test_suppress_function(caplog): @return_on_error("error") def fn(value: str, exc: Exception | None = None) -> str: diff --git a/providers/amazon/tests/unit/amazon/aws/waiters/test_eks.py b/providers/amazon/tests/unit/amazon/aws/waiters/test_eks.py index 6aad471868e6a..1b70fff4784df 100644 --- a/providers/amazon/tests/unit/amazon/aws/waiters/test_eks.py +++ b/providers/amazon/tests/unit/amazon/aws/waiters/test_eks.py @@ -19,6 +19,7 @@ import json import boto3 +import pytest from moto import mock_aws from airflow.providers.amazon.aws.hooks.eks import EksHook @@ -36,6 +37,7 @@ def test_service_waiters(self): assert waiter in hook.list_waiters() assert waiter in hook._list_custom_waiters() + @pytest.mark.db_test @mock_aws def test_existing_waiter_inherited(self): """