This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 7ea40fe972 Make Amazon Provider tests compatible with `moto>=5` 
(#37060)
7ea40fe972 is described below

commit 7ea40fe972fd0488c4a514759c2ae23f35a6b6ff
Author: Andrey Anshin <andrey.ans...@taragol.is>
AuthorDate: Tue Jan 30 14:59:34 2024 +0400

    Make Amazon Provider tests compatible with `moto>=5` (#37060)
    
    * Make Amazon Provider tests compatible with `moto>=5`
    
    * Fix loggers task handlers tests
    
    * Change apache hive prover tests
    
    * Fix 'test_delete_bucket_if_bucket_not_exist'
    
    * update pyproject.toml
---
 airflow/providers/amazon/provider.yaml             |  5 +-
 generated/provider_dependencies.json               |  2 +-
 pyproject.toml                                     |  4 +-
 tests/providers/amazon/aws/hooks/test_base_aws.py  | 28 +++++-----
 .../amazon/aws/hooks/test_batch_waiters.py         |  4 +-
 .../amazon/aws/hooks/test_cloud_formation.py       |  9 ++--
 tests/providers/amazon/aws/hooks/test_datasync.py  |  6 +--
 tests/providers/amazon/aws/hooks/test_dynamodb.py  |  6 +--
 tests/providers/amazon/aws/hooks/test_ec2.py       | 28 +++++-----
 tests/providers/amazon/aws/hooks/test_ecr.py       |  4 +-
 tests/providers/amazon/aws/hooks/test_eks.py       | 20 +++----
 tests/providers/amazon/aws/hooks/test_emr.py       | 16 +++---
 .../providers/amazon/aws/hooks/test_eventbridge.py |  4 +-
 tests/providers/amazon/aws/hooks/test_glue.py      |  6 +--
 .../amazon/aws/hooks/test_glue_catalog.py          |  4 +-
 .../amazon/aws/hooks/test_glue_crawler.py          | 16 +++---
 tests/providers/amazon/aws/hooks/test_kinesis.py   |  5 +-
 tests/providers/amazon/aws/hooks/test_logs.py      |  4 +-
 tests/providers/amazon/aws/hooks/test_neptune.py   |  4 +-
 tests/providers/amazon/aws/hooks/test_rds.py       |  4 +-
 .../amazon/aws/hooks/test_redshift_cluster.py      | 14 ++---
 tests/providers/amazon/aws/hooks/test_s3.py        | 52 +++++++++---------
 tests/providers/amazon/aws/hooks/test_sagemaker.py |  6 +--
 .../amazon/aws/hooks/test_secrets_manager.py       |  4 +-
 tests/providers/amazon/aws/hooks/test_ses.py       |  6 +--
 tests/providers/amazon/aws/hooks/test_sns.py       |  4 +-
 tests/providers/amazon/aws/hooks/test_sqs.py       |  4 +-
 tests/providers/amazon/aws/hooks/test_ssm.py       |  4 +-
 .../amazon/aws/hooks/test_step_function.py         |  4 +-
 .../amazon/aws/log/test_cloudwatch_task_handler.py |  8 ++-
 .../amazon/aws/log/test_s3_task_handler.py         |  9 ++--
 .../amazon/aws/operators/test_datasync.py          | 14 ++---
 tests/providers/amazon/aws/operators/test_ec2.py   | 26 ++++-----
 .../amazon/aws/operators/test_glue_databrew.py     |  4 +-
 .../providers/amazon/aws/operators/test_neptune.py |  4 +-
 tests/providers/amazon/aws/operators/test_rds.py   | 62 +++++++++++-----------
 tests/providers/amazon/aws/operators/test_s3.py    | 36 ++++++-------
 .../aws/operators/test_sagemaker_notebook.py       |  4 +-
 tests/providers/amazon/aws/operators/test_sqs.py   |  8 +--
 .../amazon/aws/secrets/test_secrets_manager.py     | 20 +++----
 .../amazon/aws/secrets/test_systems_manager.py     | 14 ++---
 .../amazon/aws/sensors/test_cloud_formation.py     | 12 ++---
 .../providers/amazon/aws/sensors/test_dynamodb.py  | 10 ++--
 tests/providers/amazon/aws/sensors/test_ec2.py     | 10 ++--
 .../aws/sensors/test_glue_catalog_partition.py     | 12 ++---
 tests/providers/amazon/aws/sensors/test_rds.py     | 22 ++++----
 .../amazon/aws/sensors/test_redshift_cluster.py    |  8 +--
 tests/providers/amazon/aws/sensors/test_sqs.py     | 10 ++--
 .../amazon/aws/system/utils/test_helpers.py        |  4 +-
 .../amazon/aws/transfers/test_azure_blob_to_s3.py  |  4 +-
 .../amazon/aws/transfers/test_gcs_to_s3.py         |  4 +-
 .../amazon/aws/transfers/test_hive_to_dynamodb.py  |  8 +--
 .../amazon/aws/transfers/test_http_to_s3.py        |  4 +-
 .../amazon/aws/transfers/test_local_to_s3.py       |  6 +--
 .../amazon/aws/transfers/test_s3_to_sftp.py        |  4 +-
 .../amazon/aws/transfers/test_sftp_to_s3.py        |  4 +-
 .../amazon/aws/waiters/test_custom_waiters.py      |  4 +-
 .../apache/hive/transfers/test_s3_to_hive.py       |  4 +-
 58 files changed, 300 insertions(+), 316 deletions(-)

diff --git a/airflow/providers/amazon/provider.yaml 
b/airflow/providers/amazon/provider.yaml
index 981890661c..aeb85956c1 100644
--- a/airflow/providers/amazon/provider.yaml
+++ b/airflow/providers/amazon/provider.yaml
@@ -118,10 +118,7 @@ additional-extras:
 devel-dependencies:
   - aiobotocore>=2.7.0
   - aws_xray_sdk>=2.12.0
-  # Moto 5 replaced all decorators with single mock_aws decorator and we need 
to
-  # Replace the usage of mock decorators in our tests to be able to use moto 5
-  # See https://github.com/apache/airflow/issues/37053
-  - moto[cloudformation,glue]>=4.2.12,<5.0.0
+  - moto[cloudformation,glue]>=5.0.0
   - mypy-boto3-appflow>=1.33.0
   - mypy-boto3-rds>=1.33.0
   - mypy-boto3-redshift-data>=1.33.0
diff --git a/generated/provider_dependencies.json 
b/generated/provider_dependencies.json
index b4f9e53d8e..12b6ec2147 100644
--- a/generated/provider_dependencies.json
+++ b/generated/provider_dependencies.json
@@ -41,7 +41,7 @@
     "devel-deps": [
       "aiobotocore>=2.7.0",
       "aws_xray_sdk>=2.12.0",
-      "moto[cloudformation,glue]>=4.2.12,<5.0.0",
+      "moto[cloudformation,glue]>=5.0.0",
       "mypy-boto3-appflow>=1.33.0",
       "mypy-boto3-rds>=1.33.0",
       "mypy-boto3-redshift-data>=1.33.0",
diff --git a/pyproject.toml b/pyproject.toml
index 204fc9b697..e9a81d58c9 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -524,7 +524,7 @@ winrm = [
 # If you want to modify these - modify the corresponding provider.yaml instead.
 
#############################################################################################################
 # START OF GENERATED DEPENDENCIES
-# Hash of dependencies: ee123f98f7863f010cb52314e9c76927
+# Hash of dependencies: b724779455591b6a7542cf2d456e5366
 airbyte = [ # source: airflow/providers/airbyte/provider.yaml
   "apache-airflow[http]",
 ]
@@ -548,7 +548,7 @@ amazon = [ # source: airflow/providers/amazon/provider.yaml
   # Devel dependencies for the amazon provider
   "aiobotocore>=2.7.0",
   "aws_xray_sdk>=2.12.0",
-  "moto[cloudformation,glue]>=4.2.12,<5.0.0",
+  "moto[cloudformation,glue]>=5.0.0",
   "mypy-boto3-appflow>=1.33.0",
   "mypy-boto3-rds>=1.33.0",
   "mypy-boto3-redshift-data>=1.33.0",
diff --git a/tests/providers/amazon/aws/hooks/test_base_aws.py 
b/tests/providers/amazon/aws/hooks/test_base_aws.py
index c87aaa98fd..fc02ebb845 100644
--- a/tests/providers/amazon/aws/hooks/test_base_aws.py
+++ b/tests/providers/amazon/aws/hooks/test_base_aws.py
@@ -36,7 +36,7 @@ from botocore.config import Config
 from botocore.credentials import ReadOnlyCredentials
 from botocore.exceptions import NoCredentialsError
 from botocore.utils import FileWebIdentityTokenLoader
-from moto import mock_dynamodb, mock_emr, mock_iam, mock_sts
+from moto import mock_aws
 from moto.core import DEFAULT_ACCOUNT_ID
 
 from airflow.exceptions import AirflowException, 
AirflowProviderDeprecationWarning
@@ -279,7 +279,7 @@ class TestSessionFactory:
         ("assume-without-initial-creds", {}),
     ]
 
-    @mock_sts
+    @mock_aws
     @pytest.mark.parametrize(
         "conn_id, conn_extra",
         config_for_credentials_test,
@@ -341,7 +341,7 @@ class TestSessionFactory:
 
 
 class TestAwsBaseHook:
-    @mock_emr
+    @mock_aws
     def test_get_client_type_set_in_class_attribute(self):
         client = boto3.client("emr", region_name="us-east-1")
         if client.list_clusters()["Clusters"]:
@@ -351,7 +351,7 @@ class TestAwsBaseHook:
 
         assert client_from_hook.list_clusters()["Clusters"] == []
 
-    @mock_dynamodb
+    @mock_aws
     def test_get_resource_type_set_in_class_attribute(self):
         hook = AwsBaseHook(aws_conn_id="aws_default", resource_type="dynamodb")
         resource_from_hook = hook.get_resource_type()
@@ -370,7 +370,7 @@ class TestAwsBaseHook:
 
         assert table.item_count == 0
 
-    @mock_dynamodb
+    @mock_aws
     def test_get_session_returns_a_boto3_session(self):
         hook = AwsBaseHook(aws_conn_id="aws_default", resource_type="dynamodb")
         session_from_hook = hook.get_session()
@@ -465,7 +465,7 @@ class TestAwsBaseHook:
         ],
     )
     @mock.patch.object(AwsBaseHook, "get_connection")
-    @mock_sts
+    @mock_aws
     def test_assume_role(self, mock_get_connection, sts_endpoint):
         aws_conn_id = "aws/test"
         role_arn = "arn:aws:iam::123456:role/role_arn"
@@ -631,7 +631,7 @@ class TestAwsBaseHook:
         ],
     )
     @mock.patch.object(AwsBaseHook, "get_connection")
-    @mock_sts
+    @mock_aws
     def test_assume_role_with_saml(self, mock_get_connection, sts_endpoint):
         idp_url = "https://my-idp.local.corp";
         principal_arn = "principal_arn_1234567890"
@@ -718,7 +718,7 @@ class TestAwsBaseHook:
             ),
         ]
 
-    @mock_iam
+    @mock_aws
     def test_expand_role(self):
         conn = boto3.client("iam", region_name="us-east-1")
         conn.create_role(RoleName="test-role", AssumeRolePolicyDocument="some 
policy")
@@ -734,7 +734,7 @@ class TestAwsBaseHook:
             hook.get_client_type("s3")
 
     @mock.patch.object(AwsBaseHook, "get_connection")
-    @mock_sts
+    @mock_aws
     def test_refreshable_credentials(self, mock_get_connection):
         role_arn = "arn:aws:iam::123456:role/role_arn"
         conn_id = "F5"
@@ -786,7 +786,7 @@ class TestAwsBaseHook:
             assert mock_refresh.call_count == 2
             assert len(expire_on_calls) == 0
 
-    @mock_dynamodb
+    @mock_aws
     @pytest.mark.parametrize("conn_type", ["client", "resource"])
     @pytest.mark.parametrize(
         "connection_uri,region_name,env_region,expected_region_name",
@@ -814,7 +814,7 @@ class TestAwsBaseHook:
 
             assert hook.conn_region_name == expected_region_name
 
-    @mock_dynamodb
+    @mock_aws
     @pytest.mark.parametrize("conn_type", ["client", "resource"])
     @pytest.mark.parametrize(
         "connection_uri,expected_partition",
@@ -835,7 +835,7 @@ class TestAwsBaseHook:
 
             assert hook.conn_partition == expected_partition
 
-    @mock_dynamodb
+    @mock_aws
     def test_service_name(self):
         client_hook = AwsBaseHook(aws_conn_id=None, client_type="dynamodb")
         resource_hook = AwsBaseHook(aws_conn_id=None, resource_type="dynamodb")
@@ -869,7 +869,7 @@ class TestAwsBaseHook:
         with pytest.raises(ValueError, match="Either client_type=.* or 
resource_type=.* must be provided"):
             hook.get_conn()
 
-    @mock_sts
+    @mock_aws
     def test_hook_connection_test(self):
         hook = AwsBaseHook(client_type="s3")
         result, message = hook.test_connection()
@@ -1031,7 +1031,7 @@ class TestAwsBaseHook:
         assert mock_mask_secret.mock_calls == expected_calls
         assert credentials == expected_credentials
 
-    @mock_sts
+    @mock_aws
     def test_account_id(self):
         assert AwsBaseHook(aws_conn_id=None).account_id == DEFAULT_ACCOUNT_ID
 
diff --git a/tests/providers/amazon/aws/hooks/test_batch_waiters.py 
b/tests/providers/amazon/aws/hooks/test_batch_waiters.py
index 4fc894090c..93bd77d0ee 100644
--- a/tests/providers/amazon/aws/hooks/test_batch_waiters.py
+++ b/tests/providers/amazon/aws/hooks/test_batch_waiters.py
@@ -26,7 +26,7 @@ import boto3
 import pytest
 from botocore.exceptions import ClientError, WaiterError
 from botocore.waiter import SingleWaiterConfig, WaiterModel
-from moto import mock_batch
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.batch_waiters import BatchWaitersHook
@@ -45,7 +45,7 @@ def aws_region():
     return AWS_REGION
 
 
-@mock_batch
+@mock_aws
 @pytest.fixture
 def patch_hook(monkeypatch, aws_region):
     """Patch hook object by dummy boto3 Batch client."""
diff --git a/tests/providers/amazon/aws/hooks/test_cloud_formation.py 
b/tests/providers/amazon/aws/hooks/test_cloud_formation.py
index ce9eed6e6d..8dcea8b8f9 100644
--- a/tests/providers/amazon/aws/hooks/test_cloud_formation.py
+++ b/tests/providers/amazon/aws/hooks/test_cloud_formation.py
@@ -19,13 +19,14 @@ from __future__ import annotations
 
 import json
 
-from moto import mock_cloudformation
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.cloud_formation import 
CloudFormationHook
 
 
+@mock_aws
 class TestCloudFormationHook:
-    def setup_method(self):
+    def setup_method(self, _):
         self.hook = CloudFormationHook(aws_conn_id="aws_default")
 
     def create_stack(self, stack_name):
@@ -60,11 +61,9 @@ class TestCloudFormationHook:
             },
         )
 
-    @mock_cloudformation
     def test_get_conn_returns_a_boto3_connection(self):
         assert self.hook.get_conn().describe_stacks() is not None
 
-    @mock_cloudformation
     def test_get_stack_status(self):
         stack_name = "my_test_get_stack_status_stack"
 
@@ -75,7 +74,6 @@ class TestCloudFormationHook:
         stack_status = self.hook.get_stack_status(stack_name=stack_name)
         assert stack_status == "CREATE_COMPLETE", "Incorrect stack status 
returned."
 
-    @mock_cloudformation
     def test_create_stack(self):
         stack_name = "my_test_create_stack_stack"
         self.create_stack(stack_name)
@@ -89,7 +87,6 @@ class TestCloudFormationHook:
         stack = matching_stacks[0]
         assert stack["StackStatus"] == "CREATE_COMPLETE", "Stack should be in 
status CREATE_COMPLETE"
 
-    @mock_cloudformation
     def test_delete_stack(self):
         stack_name = "my_test_delete_stack_stack"
         self.create_stack(stack_name)
diff --git a/tests/providers/amazon/aws/hooks/test_datasync.py 
b/tests/providers/amazon/aws/hooks/test_datasync.py
index f7631a73b2..f2eb89c04f 100644
--- a/tests/providers/amazon/aws/hooks/test_datasync.py
+++ b/tests/providers/amazon/aws/hooks/test_datasync.py
@@ -21,13 +21,13 @@ from unittest import mock
 
 import boto3
 import pytest
-from moto import mock_datasync
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException, AirflowTaskTimeout
 from airflow.providers.amazon.aws.hooks.datasync import DataSyncHook
 
 
-@mock_datasync
+@mock_aws
 class TestDataSyncHook:
     def test_get_conn(self):
         hook = DataSyncHook(aws_conn_id="aws_default")
@@ -47,7 +47,7 @@ class TestDataSyncHook:
 # separate class above
 
 
-@mock_datasync
+@mock_aws
 @mock.patch.object(DataSyncHook, "get_conn")
 class TestDataSyncHookMocked:
     source_server_hostname = "host"
diff --git a/tests/providers/amazon/aws/hooks/test_dynamodb.py 
b/tests/providers/amazon/aws/hooks/test_dynamodb.py
index 0fb96fc3b1..f3baba8b69 100644
--- a/tests/providers/amazon/aws/hooks/test_dynamodb.py
+++ b/tests/providers/amazon/aws/hooks/test_dynamodb.py
@@ -20,18 +20,18 @@ from __future__ import annotations
 import uuid
 from unittest import mock
 
-from moto import mock_dynamodb
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
 
 
 class TestDynamoDBHook:
-    @mock_dynamodb
+    @mock_aws
     def test_get_conn_returns_a_boto3_connection(self):
         hook = DynamoDBHook(aws_conn_id="aws_default")
         assert hook.get_conn() is not None
 
-    @mock_dynamodb
+    @mock_aws
     def test_insert_batch_items_dynamodb_table(self):
         hook = DynamoDBHook(
             aws_conn_id="aws_default", table_name="test_airflow", 
table_keys=["id"], region_name="us-east-1"
diff --git a/tests/providers/amazon/aws/hooks/test_ec2.py 
b/tests/providers/amazon/aws/hooks/test_ec2.py
index b989a57a65..8974570a97 100644
--- a/tests/providers/amazon/aws/hooks/test_ec2.py
+++ b/tests/providers/amazon/aws/hooks/test_ec2.py
@@ -18,7 +18,7 @@
 from __future__ import annotations
 
 import pytest
-from moto import mock_ec2
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.ec2 import EC2Hook
@@ -57,19 +57,19 @@ class TestEC2Hook:
         """Create Instance and return instance id."""
         return cls._create_instances(hook)[0]
 
-    @mock_ec2
+    @mock_aws
     def test_get_conn_returns_boto3_resource(self):
         ec2_hook = EC2Hook()
         instances = list(ec2_hook.conn.instances.all())
         assert instances is not None
 
-    @mock_ec2
+    @mock_aws
     def test_client_type_get_conn_returns_boto3_resource(self):
         ec2_hook = EC2Hook(api_type="client_type")
         instances = list(ec2_hook.get_instances())
         assert instances is not None
 
-    @mock_ec2
+    @mock_aws
     def test_get_instance(self):
         ec2_hook = EC2Hook()
         created_instance_id = self._create_instance(ec2_hook)
@@ -77,7 +77,7 @@ class TestEC2Hook:
         existing_instance = 
ec2_hook.get_instance(instance_id=created_instance_id)
         assert created_instance_id == existing_instance.instance_id
 
-    @mock_ec2
+    @mock_aws
     def test_get_instance_client_type(self):
         ec2_hook = EC2Hook(api_type="client_type")
         created_instance_id = self._create_instance(ec2_hook)
@@ -85,7 +85,7 @@ class TestEC2Hook:
         existing_instance = 
ec2_hook.get_instance(instance_id=created_instance_id)
         assert created_instance_id == existing_instance["InstanceId"]
 
-    @mock_ec2
+    @mock_aws
     def test_get_instance_state(self):
         ec2_hook = EC2Hook()
         created_instance_id = self._create_instance(ec2_hook)
@@ -95,7 +95,7 @@ class TestEC2Hook:
         existing_instance_state = 
ec2_hook.get_instance_state(instance_id=created_instance_id)
         assert created_instance_state == existing_instance_state
 
-    @mock_ec2
+    @mock_aws
     def test_client_type_get_instance_state(self):
         ec2_hook = EC2Hook(api_type="client_type")
         created_instance_id = self._create_instance(ec2_hook)
@@ -105,7 +105,7 @@ class TestEC2Hook:
         existing_instance_state = 
ec2_hook.get_instance_state(instance_id=created_instance_id)
         assert created_instance_state == existing_instance_state
 
-    @mock_ec2
+    @mock_aws
     def test_client_type_start_instances(self):
         ec2_hook = EC2Hook(api_type="client_type")
         created_instance_id = self._create_instance(ec2_hook)
@@ -114,7 +114,7 @@ class TestEC2Hook:
         assert response["StartingInstances"][0]["InstanceId"] == 
created_instance_id
         assert ec2_hook.get_instance_state(created_instance_id) == "running"
 
-    @mock_ec2
+    @mock_aws
     def test_client_type_stop_instances(self):
         ec2_hook = EC2Hook(api_type="client_type")
         created_instance_id = self._create_instance(ec2_hook)
@@ -123,7 +123,7 @@ class TestEC2Hook:
         assert response["StoppingInstances"][0]["InstanceId"] == 
created_instance_id
         assert ec2_hook.get_instance_state(created_instance_id) == "stopped"
 
-    @mock_ec2
+    @mock_aws
     def test_client_type_terminate_instances(self):
         ec2_hook = EC2Hook(api_type="client_type")
         created_instance_id = self._create_instance(ec2_hook)
@@ -132,7 +132,7 @@ class TestEC2Hook:
         assert response["TerminatingInstances"][0]["InstanceId"] == 
created_instance_id
         assert ec2_hook.get_instance_state(created_instance_id) == "terminated"
 
-    @mock_ec2
+    @mock_aws
     def test_client_type_describe_instances(self):
         ec2_hook = EC2Hook(api_type="client_type")
         created_instance_id = self._create_instance(ec2_hook)
@@ -159,7 +159,7 @@ class TestEC2Hook:
 
         assert len(response["Reservations"]) == 0
 
-    @mock_ec2
+    @mock_aws
     def test_client_type_get_instances(self):
         ec2_hook = EC2Hook(api_type="client_type")
         created_instances = self._create_instances(ec2_hook, max_count=2, 
min_count=2)
@@ -196,7 +196,7 @@ class TestEC2Hook:
 
         assert len(response) == 0
 
-    @mock_ec2
+    @mock_aws
     def test_client_type_get_instance_ids(self):
         ec2_hook = EC2Hook(api_type="client_type")
         created_instances = self._create_instances(ec2_hook, max_count=2, 
min_count=2)
@@ -223,7 +223,7 @@ class TestEC2Hook:
 
         assert len(response) == 0
 
-    @mock_ec2
+    @mock_aws
     def test_decorator_only_client_type(self):
         ec2_hook = EC2Hook()
 
diff --git a/tests/providers/amazon/aws/hooks/test_ecr.py 
b/tests/providers/amazon/aws/hooks/test_ecr.py
index ccc624bbe6..730b6e11d4 100644
--- a/tests/providers/amazon/aws/hooks/test_ecr.py
+++ b/tests/providers/amazon/aws/hooks/test_ecr.py
@@ -21,7 +21,7 @@ from unittest import mock
 
 import boto3
 import pytest
-from moto import mock_ecr
+from moto import mock_aws
 from moto.core import DEFAULT_ACCOUNT_ID
 
 from airflow.providers.amazon.aws.hooks.ecr import EcrHook
@@ -35,7 +35,7 @@ def patch_hook(monkeypatch):
     yield
 
 
-@mock_ecr
+@mock_aws
 class TestEcrHook:
     def test_service_type(self):
         """Test expected boto3 client type."""
diff --git a/tests/providers/amazon/aws/hooks/test_eks.py 
b/tests/providers/amazon/aws/hooks/test_eks.py
index e35c0eba1f..9d8e1f57f1 100644
--- a/tests/providers/amazon/aws/hooks/test_eks.py
+++ b/tests/providers/amazon/aws/hooks/test_eks.py
@@ -28,7 +28,7 @@ import pytest
 import time_machine
 import yaml
 from botocore.exceptions import ClientError
-from moto import mock_eks
+from moto import mock_aws
 from moto.core import DEFAULT_ACCOUNT_ID
 from moto.eks.exceptions import (
     InvalidParameterException,
@@ -128,13 +128,9 @@ def cluster_builder():
     def _execute(count: int = 1, minimal: bool = True) -> tuple[EksHook, 
ClusterTestDataFactory]:
         return eks_hook, ClusterTestDataFactory(count=count, minimal=minimal)
 
-    mock_eks().start()
-    eks_hook = EksHook(
-        aws_conn_id=DEFAULT_CONN_ID,
-        region_name=REGION,
-    )
-    yield _execute
-    mock_eks().stop()
+    with mock_aws():
+        eks_hook = EksHook(aws_conn_id=DEFAULT_CONN_ID, region_name=REGION)
+        yield _execute
 
 
 @pytest.fixture(scope="function")
@@ -234,7 +230,7 @@ class TestEksHooks:
     # in the list at initialization, which means the mock
     # decorator must be used manually in this one case.
     ###
-    @mock_eks
+    @mock_aws
     def test_list_clusters_returns_empty_by_default(self) -> None:
         eks_hook: EksHook = EksHook(aws_conn_id=DEFAULT_CONN_ID, 
region_name=REGION)
 
@@ -422,7 +418,7 @@ class TestEksHooks:
 
         assert_result_matches_expected_list(result, expected_result)
 
-    @mock_eks
+    @mock_aws
     def test_create_nodegroup_throws_exception_when_cluster_not_found(self) -> 
None:
         eks_hook: EksHook = EksHook(aws_conn_id=DEFAULT_CONN_ID, 
region_name=REGION)
         non_existent_cluster_name: str = NON_EXISTING_CLUSTER_NAME
@@ -829,7 +825,7 @@ class TestEksHooks:
 
         assert_result_matches_expected_list(result, expected_result)
 
-    @mock_eks
+    @mock_aws
     def 
test_create_fargate_profile_throws_exception_when_cluster_not_found(self) -> 
None:
         eks_hook: EksHook = EksHook(aws_conn_id=DEFAULT_CONN_ID, 
region_name=REGION)
         non_existent_cluster_name: str = NON_EXISTING_CLUSTER_NAME
@@ -1161,7 +1157,7 @@ class TestEksHooks:
         "selectors, expected_message, expected_result",
         selector_formatting_test_cases,
     )
-    @mock_eks
+    @mock_aws
     def test_create_fargate_selectors(self, cluster_builder, selectors, 
expected_message, expected_result):
         client, generated_test_data = cluster_builder()
         cluster_name: str = generated_test_data.existing_cluster_name
diff --git a/tests/providers/amazon/aws/hooks/test_emr.py 
b/tests/providers/amazon/aws/hooks/test_emr.py
index 4de79967f8..cf3caafafb 100644
--- a/tests/providers/amazon/aws/hooks/test_emr.py
+++ b/tests/providers/amazon/aws/hooks/test_emr.py
@@ -23,7 +23,7 @@ from unittest import mock
 import boto3
 import pytest
 from botocore.exceptions import WaiterError
-from moto import mock_emr
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.emr import EmrHook
@@ -44,12 +44,12 @@ class TestEmrHook:
 
         assert sorted(hook.list_waiters()) == sorted([*official_waiters, 
*custom_waiters])
 
-    @mock_emr
+    @mock_aws
     def test_get_conn_returns_a_boto3_connection(self):
         hook = EmrHook(aws_conn_id="aws_default", region_name="ap-southeast-2")
         assert hook.get_conn().list_clusters() is not None
 
-    @mock_emr
+    @mock_aws
     def test_create_job_flow_uses_the_emr_config_to_create_a_cluster(self):
         client = boto3.client("emr", region_name="us-east-1")
 
@@ -60,7 +60,7 @@ class TestEmrHook:
 
         assert client.list_clusters()["Clusters"][0]["Id"] == 
cluster["JobFlowId"]
 
-    @mock_emr
+    @mock_aws
     @pytest.mark.parametrize("num_steps", [1, 2, 3, 4])
     def test_add_job_flow_steps_one_step(self, num_steps):
         hook = EmrHook(aws_conn_id="aws_default", emr_conn_id="emr_default", 
region_name="us-east-1")
@@ -153,7 +153,7 @@ class TestEmrHook:
         mock_conn.get_waiter.assert_called_with("step_complete")
 
     @pytest.mark.db_test
-    @mock_emr
+    @mock_aws
     def test_create_job_flow_extra_args(self):
         """
         Test that we can add extra arguments to the launch call.
@@ -224,7 +224,7 @@ class TestEmrHook:
         assert not result
         assert message.startswith("'Amazon Elastic MapReduce' Airflow 
Connection cannot be tested")
 
-    @mock_emr
+    @mock_aws
     def test_get_cluster_id_by_name(self):
         """
         Test that we can resolve cluster id by cluster name.
@@ -245,7 +245,7 @@ class TestEmrHook:
 
         assert no_match is None
 
-    @mock_emr
+    @mock_aws
     def test_get_cluster_id_by_name_duplicate(self):
         """
         Test that we get an exception when there are duplicate clusters
@@ -259,7 +259,7 @@ class TestEmrHook:
         with pytest.raises(AirflowException):
             hook.get_cluster_id_by_name("test_cluster", ["RUNNING", "WAITING", 
"BOOTSTRAPPING"])
 
-    @mock_emr
+    @mock_aws
     def test_get_cluster_id_by_name_pagination(self):
         """
         Test that we can resolve cluster id by cluster name when there are
diff --git a/tests/providers/amazon/aws/hooks/test_eventbridge.py 
b/tests/providers/amazon/aws/hooks/test_eventbridge.py
index 1659ba1d8d..62fbb5ef55 100644
--- a/tests/providers/amazon/aws/hooks/test_eventbridge.py
+++ b/tests/providers/amazon/aws/hooks/test_eventbridge.py
@@ -17,12 +17,12 @@
 from __future__ import annotations
 
 import pytest
-from moto import mock_events
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.eventbridge import EventBridgeHook
 
 
-@mock_events
+@mock_aws
 class TestEventBridgeHook:
     def test_conn_returns_a_boto3_connection(self):
         hook = EventBridgeHook(aws_conn_id="aws_default")
diff --git a/tests/providers/amazon/aws/hooks/test_glue.py 
b/tests/providers/amazon/aws/hooks/test_glue.py
index 799547dd57..42834b7090 100644
--- a/tests/providers/amazon/aws/hooks/test_glue.py
+++ b/tests/providers/amazon/aws/hooks/test_glue.py
@@ -24,7 +24,7 @@ from unittest import mock
 import boto3
 import pytest
 from botocore.exceptions import ClientError
-from moto import mock_glue, mock_iam
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
@@ -38,7 +38,7 @@ class TestGlueJobHook:
     def setup_method(self):
         self.some_aws_region = "us-west-2"
 
-    @mock_iam
+    @mock_aws
     @pytest.mark.parametrize("role_path", ["/", "/custom-path/"])
     def test_get_iam_execution_role(self, role_path):
         expected_role = "my_test_role"
@@ -308,7 +308,7 @@ class TestGlueJobHook:
         )
         assert result == job_name
 
-    @mock_glue
+    @mock_aws
     @mock.patch.object(GlueJobHook, "get_iam_execution_role")
     def test_create_or_update_glue_job_worker_type(self, 
mock_get_iam_execution_role):
         mock_get_iam_execution_role.return_value = {"Role": {"RoleName": 
"my_test_role", "Arn": "test_role"}}
diff --git a/tests/providers/amazon/aws/hooks/test_glue_catalog.py 
b/tests/providers/amazon/aws/hooks/test_glue_catalog.py
index c021eb19e0..d3add4fc0a 100644
--- a/tests/providers/amazon/aws/hooks/test_glue_catalog.py
+++ b/tests/providers/amazon/aws/hooks/test_glue_catalog.py
@@ -22,7 +22,7 @@ from unittest import mock
 import boto3
 import pytest
 from botocore.exceptions import ClientError
-from moto import mock_glue
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.glue_catalog import GlueCatalogHook
@@ -41,7 +41,7 @@ PARTITION_INPUT: dict = {
 }
 
 
-@mock_glue
+@mock_aws
 class TestGlueCatalogHook:
     def setup_method(self, method):
         self.client = boto3.client("glue", region_name="us-east-1")
diff --git a/tests/providers/amazon/aws/hooks/test_glue_crawler.py 
b/tests/providers/amazon/aws/hooks/test_glue_crawler.py
index 167ffd0d3d..69990769ed 100644
--- a/tests/providers/amazon/aws/hooks/test_glue_crawler.py
+++ b/tests/providers/amazon/aws/hooks/test_glue_crawler.py
@@ -21,7 +21,7 @@ from copy import deepcopy
 from typing import TYPE_CHECKING
 from unittest import mock
 
-from moto import mock_sts
+from moto import mock_aws
 from moto.core import DEFAULT_ACCOUNT_ID
 
 from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook
@@ -114,7 +114,7 @@ class TestGlueCrawlerHook:
         assert self.hook.has_crawler(mock_crawler_name) is False
         
mock_get_conn.return_value.get_crawler.assert_called_once_with(Name=mock_crawler_name)
 
-    @mock_sts
+    @mock_aws
     @mock.patch.object(GlueCrawlerHook, "get_conn")
     def test_update_crawler_needed(self, mock_get_conn):
         mock_get_conn.return_value.get_crawler.return_value = {"Crawler": 
mock_config}
@@ -126,7 +126,7 @@ class TestGlueCrawlerHook:
         
mock_get_conn.return_value.get_crawler.assert_called_once_with(Name=mock_crawler_name)
         
mock_get_conn.return_value.update_crawler.assert_called_once_with(**mock_config_two)
 
-    @mock_sts
+    @mock_aws
     @mock.patch.object(GlueCrawlerHook, "get_conn")
     def test_update_crawler_missing_keys(self, mock_get_conn):
         mock_config_missing_configuration = deepcopy(mock_config)
@@ -139,7 +139,7 @@ class TestGlueCrawlerHook:
         
mock_get_conn.return_value.get_crawler.assert_called_once_with(Name=mock_crawler_name)
         
mock_get_conn.return_value.update_crawler.assert_called_once_with(**mock_config_two)
 
-    @mock_sts
+    @mock_aws
     @mock.patch.object(GlueCrawlerHook, "get_conn")
     def test_update_tags_not_needed(self, mock_get_conn):
         mock_get_conn.return_value.get_crawler.return_value = {"Crawler": 
mock_config}
@@ -150,7 +150,7 @@ class TestGlueCrawlerHook:
         mock_get_conn.return_value.tag_resource.assert_not_called()
         mock_get_conn.return_value.untag_resource.assert_not_called()
 
-    @mock_sts
+    @mock_aws
     @mock.patch.object(GlueCrawlerHook, "get_conn")
     def test_remove_all_tags(self, mock_get_conn):
         mock_get_conn.return_value.get_crawler.return_value = {"Crawler": 
mock_config}
@@ -163,7 +163,7 @@ class TestGlueCrawlerHook:
             ResourceArn=self.crawler_arn, TagsToRemove=["test", "bar"]
         )
 
-    @mock_sts
+    @mock_aws
     @mock.patch.object(GlueCrawlerHook, "get_conn")
     def test_update_missing_tags(self, mock_get_conn):
         mock_config_missing_tags = deepcopy(mock_config)
@@ -175,7 +175,7 @@ class TestGlueCrawlerHook:
         mock_get_conn.return_value.tag_resource.assert_not_called()
         mock_get_conn.return_value.untag_resource.assert_not_called()
 
-    @mock_sts
+    @mock_aws
     @mock.patch.object(GlueCrawlerHook, "get_conn")
     def test_replace_tag(self, mock_get_conn):
         mock_get_conn.return_value.get_crawler.return_value = {"Crawler": 
mock_config}
@@ -188,7 +188,7 @@ class TestGlueCrawlerHook:
             ResourceArn=self.crawler_arn, TagsToAdd={"test": "bla"}
         )
 
-    @mock_sts
+    @mock_aws
     @mock.patch.object(GlueCrawlerHook, "get_conn")
     def test_update_crawler_not_needed(self, mock_get_conn):
         mock_get_conn.return_value.get_crawler.return_value = {"Crawler": 
mock_config}
diff --git a/tests/providers/amazon/aws/hooks/test_kinesis.py 
b/tests/providers/amazon/aws/hooks/test_kinesis.py
index e77f11126a..1080aa5379 100644
--- a/tests/providers/amazon/aws/hooks/test_kinesis.py
+++ b/tests/providers/amazon/aws/hooks/test_kinesis.py
@@ -20,12 +20,12 @@ from __future__ import annotations
 import uuid
 
 import boto3
-from moto import mock_firehose, mock_s3
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.kinesis import FirehoseHook
 
 
-@mock_firehose
+@mock_aws
 class TestFirehoseHook:
     def test_get_conn_returns_a_boto3_connection(self):
         hook = FirehoseHook(
@@ -33,7 +33,6 @@ class TestFirehoseHook:
         )
         assert hook.get_conn() is not None
 
-    @mock_s3
     def test_insert_batch_records_kinesis_firehose(self):
         boto3.client("s3").create_bucket(Bucket="kinesis-test")
         hook = FirehoseHook(
diff --git a/tests/providers/amazon/aws/hooks/test_logs.py 
b/tests/providers/amazon/aws/hooks/test_logs.py
index 4e2ec3a595..d892dc646e 100644
--- a/tests/providers/amazon/aws/hooks/test_logs.py
+++ b/tests/providers/amazon/aws/hooks/test_logs.py
@@ -21,12 +21,12 @@ from unittest import mock
 from unittest.mock import ANY, patch
 
 import pytest
-from moto import mock_logs
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
 
 
-@mock_logs
+@mock_aws
 class TestAwsLogsHook:
     @pytest.mark.parametrize(
         "get_log_events_response, num_skip_events, expected_num_events, 
end_time",
diff --git a/tests/providers/amazon/aws/hooks/test_neptune.py 
b/tests/providers/amazon/aws/hooks/test_neptune.py
index bf8372190b..b07074428b 100644
--- a/tests/providers/amazon/aws/hooks/test_neptune.py
+++ b/tests/providers/amazon/aws/hooks/test_neptune.py
@@ -20,7 +20,7 @@ from __future__ import annotations
 from typing import Generator
 
 import pytest
-from moto import mock_neptune
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.neptune import NeptuneHook
 
@@ -28,7 +28,7 @@ from airflow.providers.amazon.aws.hooks.neptune import 
NeptuneHook
 @pytest.fixture
 def neptune_hook() -> Generator[NeptuneHook, None, None]:
     """Returns a NeptuneHook mocked with moto"""
-    with mock_neptune():
+    with mock_aws():
         yield NeptuneHook(aws_conn_id="aws_default")
 
 
diff --git a/tests/providers/amazon/aws/hooks/test_rds.py 
b/tests/providers/amazon/aws/hooks/test_rds.py
index a167567c9f..b2668febfb 100644
--- a/tests/providers/amazon/aws/hooks/test_rds.py
+++ b/tests/providers/amazon/aws/hooks/test_rds.py
@@ -21,7 +21,7 @@ from typing import TYPE_CHECKING, Generator
 from unittest.mock import patch
 
 import pytest
-from moto import mock_rds
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException, AirflowNotFoundException
 from airflow.providers.amazon.aws.hooks.rds import RdsHook
@@ -33,7 +33,7 @@ if TYPE_CHECKING:
 @pytest.fixture
 def rds_hook() -> Generator[RdsHook, None, None]:
     """Returns an RdsHook whose underlying connection is mocked with moto"""
-    with mock_rds():
+    with mock_aws():
         yield RdsHook(aws_conn_id="aws_default", region_name="us-east-1")
 
 
diff --git a/tests/providers/amazon/aws/hooks/test_redshift_cluster.py 
b/tests/providers/amazon/aws/hooks/test_redshift_cluster.py
index 083c78eead..6ebaab56c9 100644
--- a/tests/providers/amazon/aws/hooks/test_redshift_cluster.py
+++ b/tests/providers/amazon/aws/hooks/test_redshift_cluster.py
@@ -17,7 +17,7 @@
 from __future__ import annotations
 
 import boto3
-from moto import mock_redshift
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftHook
@@ -42,7 +42,7 @@ class TestRedshiftHook:
         if not client.describe_clusters()["Clusters"]:
             raise ValueError("AWS not properly mocked")
 
-    @mock_redshift
+    @mock_aws
     def 
test_get_client_type_returns_a_boto3_client_of_the_requested_type(self):
         self._create_clusters()
         hook = AwsBaseHook(aws_conn_id="aws_default", client_type="redshift")
@@ -51,7 +51,7 @@ class TestRedshiftHook:
         clusters = client_from_hook.describe_clusters()["Clusters"]
         assert len(clusters) == 2
 
-    @mock_redshift
+    @mock_aws
     def 
test_restore_from_cluster_snapshot_returns_dict_with_cluster_data(self):
         self._create_clusters()
         hook = RedshiftHook(aws_conn_id="aws_default")
@@ -61,7 +61,7 @@ class TestRedshiftHook:
             == "test_cluster_3"
         )
 
-    @mock_redshift
+    @mock_aws
     def test_delete_cluster_returns_a_dict_with_cluster_data(self):
         self._create_clusters()
         hook = RedshiftHook(aws_conn_id="aws_default")
@@ -69,7 +69,7 @@ class TestRedshiftHook:
         cluster = hook.delete_cluster("test_cluster_2")
         assert cluster is not None
 
-    @mock_redshift
+    @mock_aws
     def test_create_cluster_snapshot_returns_snapshot_data(self):
         self._create_clusters()
         hook = RedshiftHook(aws_conn_id="aws_default")
@@ -77,14 +77,14 @@ class TestRedshiftHook:
         snapshot = hook.create_cluster_snapshot("test_snapshot_2", 
"test_cluster")
         assert snapshot is not None
 
-    @mock_redshift
+    @mock_aws
     def test_cluster_status_returns_cluster_not_found(self):
         self._create_clusters()
         hook = RedshiftHook(aws_conn_id="aws_default")
         status = hook.cluster_status("test_cluster_not_here")
         assert status == "cluster_not_found"
 
-    @mock_redshift
+    @mock_aws
     def test_cluster_status_returns_available_cluster(self):
         self._create_clusters()
         hook = RedshiftHook(aws_conn_id="aws_default")
diff --git a/tests/providers/amazon/aws/hooks/test_s3.py 
b/tests/providers/amazon/aws/hooks/test_s3.py
index 21bcdcabd7..bea5828e67 100644
--- a/tests/providers/amazon/aws/hooks/test_s3.py
+++ b/tests/providers/amazon/aws/hooks/test_s3.py
@@ -30,7 +30,7 @@ from urllib.parse import parse_qs
 import boto3
 import pytest
 from botocore.exceptions import ClientError
-from moto import mock_s3
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException
 from airflow.models import Connection
@@ -45,7 +45,7 @@ from airflow.utils.timezone import datetime
 
 @pytest.fixture
 def mocked_s3_res():
-    with mock_s3():
+    with mock_aws():
         yield boto3.resource("s3")
 
 
@@ -57,7 +57,7 @@ def s3_bucket(mocked_s3_res):
 
 
 class TestAwsS3Hook:
-    @mock_s3
+    @mock_aws
     def test_get_conn(self):
         hook = S3Hook()
         assert hook.get_conn() is not None
@@ -129,18 +129,18 @@ class TestAwsS3Hook:
         assert hook.check_for_bucket(s3_bucket) is True
         assert hook.check_for_bucket("not-a-bucket") is False
 
-    @mock_s3
+    @mock_aws
     def test_get_bucket(self):
         hook = S3Hook()
         assert hook.get_bucket("bucket") is not None
 
-    @mock_s3
+    @mock_aws
     def test_create_bucket_default_region(self):
         hook = S3Hook()
         hook.create_bucket(bucket_name="new_bucket")
         assert hook.get_bucket("new_bucket") is not None
 
-    @mock_s3
+    @mock_aws
     def test_create_bucket_us_standard_region(self, monkeypatch):
         monkeypatch.delenv("AWS_DEFAULT_REGION", raising=False)
 
@@ -153,7 +153,7 @@ class TestAwsS3Hook:
         # If location is "us-east-1", LocationConstraint should be None
         assert region is None
 
-    @mock_s3
+    @mock_aws
     def test_create_bucket_other_region(self):
         hook = S3Hook()
         hook.create_bucket(bucket_name="new_bucket", region_name="us-east-2")
@@ -162,7 +162,7 @@ class TestAwsS3Hook:
         region = 
bucket.meta.client.get_bucket_location(Bucket=bucket.name).get("LocationConstraint")
         assert region == "us-east-2"
 
-    @mock_s3
+    @mock_aws
     @pytest.mark.parametrize("region_name", ["eu-west-1", "us-east-1"])
     def test_create_bucket_regional_endpoint(self, region_name, monkeypatch):
         conn = Connection(
@@ -959,7 +959,7 @@ class TestAwsS3Hook:
         )
         assert (response["Grants"][0]["Permission"] == "FULL_CONTROL") and 
(len(response["Grants"]) == 1)
 
-    @mock_s3
+    @mock_aws
     def test_delete_bucket_if_bucket_exist(self, s3_bucket):
         # assert if the bucket is created
         mock_hook = S3Hook()
@@ -968,12 +968,12 @@ class TestAwsS3Hook:
         mock_hook.delete_bucket(bucket_name=s3_bucket, force_delete=True)
         assert not mock_hook.check_for_bucket(s3_bucket)
 
-    @mock_s3
-    def test_delete_bucket_if_not_bucket_exist(self, s3_bucket):
+    @mock_aws
+    def test_delete_bucket_if_bucket_not_exist(self, s3_bucket):
         # assert if exception is raised if bucket not present
-        mock_hook = S3Hook()
+        mock_hook = S3Hook(aws_conn_id=None)
         with pytest.raises(ClientError) as ctx:
-            assert mock_hook.delete_bucket(bucket_name=s3_bucket, 
force_delete=True)
+            assert 
mock_hook.delete_bucket(bucket_name="not-exists-bucket-name", force_delete=True)
         assert ctx.value.response["Error"]["Code"] == "NoSuchBucket"
 
     @pytest.mark.db_test
@@ -1241,7 +1241,7 @@ class TestAwsS3Hook:
             second_call_extra_args,
         ]
 
-    @mock_s3
+    @mock_aws
     def test_get_bucket_tagging_no_tags_raises_error(self):
         hook = S3Hook()
         hook.create_bucket(bucket_name="new_bucket")
@@ -1249,14 +1249,14 @@ class TestAwsS3Hook:
         with pytest.raises(ClientError, match=r".*NoSuchTagSet.*"):
             hook.get_bucket_tagging(bucket_name="new_bucket")
 
-    @mock_s3
+    @mock_aws
     def test_get_bucket_tagging_no_bucket_raises_error(self):
         hook = S3Hook()
 
         with pytest.raises(ClientError, match=r".*NoSuchBucket.*"):
             hook.get_bucket_tagging(bucket_name="new_bucket")
 
-    @mock_s3
+    @mock_aws
     def test_put_bucket_tagging_with_valid_set(self):
         hook = S3Hook()
         hook.create_bucket(bucket_name="new_bucket")
@@ -1265,7 +1265,7 @@ class TestAwsS3Hook:
 
         assert hook.get_bucket_tagging(bucket_name="new_bucket") == tag_set
 
-    @mock_s3
+    @mock_aws
     def test_put_bucket_tagging_with_dict(self):
         hook = S3Hook()
         hook.create_bucket(bucket_name="new_bucket")
@@ -1274,7 +1274,7 @@ class TestAwsS3Hook:
 
         assert hook.get_bucket_tagging(bucket_name="new_bucket") == [{"Key": 
"Color", "Value": "Green"}]
 
-    @mock_s3
+    @mock_aws
     def test_put_bucket_tagging_with_pair(self):
         hook = S3Hook()
         hook.create_bucket(bucket_name="new_bucket")
@@ -1285,7 +1285,7 @@ class TestAwsS3Hook:
 
         assert hook.get_bucket_tagging(bucket_name="new_bucket") == tag_set
 
-    @mock_s3
+    @mock_aws
     def test_put_bucket_tagging_with_pair_and_set(self):
         hook = S3Hook()
         hook.create_bucket(bucket_name="new_bucket")
@@ -1299,7 +1299,7 @@ class TestAwsS3Hook:
         assert len(result) == 2
         assert result == expected
 
-    @mock_s3
+    @mock_aws
     def test_put_bucket_tagging_with_key_but_no_value_raises_error(self):
         hook = S3Hook()
 
@@ -1308,7 +1308,7 @@ class TestAwsS3Hook:
         with pytest.raises(ValueError):
             hook.put_bucket_tagging(bucket_name="new_bucket", key=key)
 
-    @mock_s3
+    @mock_aws
     def test_put_bucket_tagging_with_value_but_no_key_raises_error(self):
         hook = S3Hook()
         hook.create_bucket(bucket_name="new_bucket")
@@ -1316,7 +1316,7 @@ class TestAwsS3Hook:
         with pytest.raises(ValueError):
             hook.put_bucket_tagging(bucket_name="new_bucket", value=value)
 
-    @mock_s3
+    @mock_aws
     def test_put_bucket_tagging_with_key_and_set_raises_error(self):
         hook = S3Hook()
         hook.create_bucket(bucket_name="new_bucket")
@@ -1325,7 +1325,7 @@ class TestAwsS3Hook:
         with pytest.raises(ValueError):
             hook.put_bucket_tagging(bucket_name="new_bucket", key=key, 
tag_set=tag_set)
 
-    @mock_s3
+    @mock_aws
     def test_put_bucket_tagging_with_value_and_set_raises_error(self):
         hook = S3Hook()
         hook.create_bucket(bucket_name="new_bucket")
@@ -1334,7 +1334,7 @@ class TestAwsS3Hook:
         with pytest.raises(ValueError):
             hook.put_bucket_tagging(bucket_name="new_bucket", value=value, 
tag_set=tag_set)
 
-    @mock_s3
+    @mock_aws
     def test_put_bucket_tagging_when_tags_exist_overwrites(self):
         hook = S3Hook()
         hook.create_bucket(bucket_name="new_bucket")
@@ -1350,7 +1350,7 @@ class TestAwsS3Hook:
         assert len(result) == 1
         assert result == new_tag_set
 
-    @mock_s3
+    @mock_aws
     def test_delete_bucket_tagging(self):
         hook = S3Hook()
         hook.create_bucket(bucket_name="new_bucket")
@@ -1362,7 +1362,7 @@ class TestAwsS3Hook:
         with pytest.raises(ClientError, match=r".*NoSuchTagSet.*"):
             hook.get_bucket_tagging(bucket_name="new_bucket")
 
-    @mock_s3
+    @mock_aws
     def test_delete_bucket_tagging_with_no_tags(self):
         hook = S3Hook()
         hook.create_bucket(bucket_name="new_bucket")
diff --git a/tests/providers/amazon/aws/hooks/test_sagemaker.py 
b/tests/providers/amazon/aws/hooks/test_sagemaker.py
index 39c9ed3c79..7c3549c9b7 100644
--- a/tests/providers/amazon/aws/hooks/test_sagemaker.py
+++ b/tests/providers/amazon/aws/hooks/test_sagemaker.py
@@ -25,7 +25,7 @@ from unittest.mock import patch
 import pytest
 from botocore.exceptions import ClientError
 from dateutil.tz import tzlocal
-from moto import mock_sagemaker
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
@@ -689,14 +689,14 @@ class TestSageMakerHook:
         ret = hook.count_processing_jobs_by_name("existing_job")
         assert ret == 0
 
-    @mock_sagemaker
+    @mock_aws
     def test_delete_model(self):
         hook = SageMakerHook()
         with patch.object(hook.conn, "delete_model") as mock_delete:
             hook.delete_model(model_name="test")
         mock_delete.assert_called_once_with(ModelName="test")
 
-    @mock_sagemaker
+    @mock_aws
     def test_delete_model_when_not_exist(self):
         hook = SageMakerHook()
         with pytest.raises(ClientError) as raised_exception:
diff --git a/tests/providers/amazon/aws/hooks/test_secrets_manager.py 
b/tests/providers/amazon/aws/hooks/test_secrets_manager.py
index c5af2001ec..df63385752 100644
--- a/tests/providers/amazon/aws/hooks/test_secrets_manager.py
+++ b/tests/providers/amazon/aws/hooks/test_secrets_manager.py
@@ -20,12 +20,12 @@ from __future__ import annotations
 import base64
 import json
 
-from moto import mock_secretsmanager
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.secrets_manager import 
SecretsManagerHook
 
 
-@mock_secretsmanager
+@mock_aws
 class TestSecretsManagerHook:
     def test_get_conn_returns_a_boto3_connection(self):
         hook = SecretsManagerHook(aws_conn_id="aws_default")
diff --git a/tests/providers/amazon/aws/hooks/test_ses.py 
b/tests/providers/amazon/aws/hooks/test_ses.py
index ba0bff7475..023b1f58fa 100644
--- a/tests/providers/amazon/aws/hooks/test_ses.py
+++ b/tests/providers/amazon/aws/hooks/test_ses.py
@@ -18,20 +18,20 @@ from __future__ import annotations
 
 import boto3
 import pytest
-from moto import mock_ses
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.ses import SesHook
 
 boto3.setup_default_session()
 
 
-@mock_ses
+@mock_aws
 def test_get_conn():
     hook = SesHook(aws_conn_id="aws_default")
     assert hook.get_conn() is not None
 
 
-@mock_ses
+@mock_aws
 @pytest.mark.parametrize(
     "to", ["t...@domain.com", ["t...@domain.com", "t...@domain.com"], 
"t...@domain.com,t...@domain.com"]
 )
diff --git a/tests/providers/amazon/aws/hooks/test_sns.py 
b/tests/providers/amazon/aws/hooks/test_sns.py
index 16bfa2489a..d204a56a5d 100644
--- a/tests/providers/amazon/aws/hooks/test_sns.py
+++ b/tests/providers/amazon/aws/hooks/test_sns.py
@@ -18,12 +18,12 @@
 from __future__ import annotations
 
 import pytest
-from moto import mock_sns
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.sns import SnsHook
 
 
-@mock_sns
+@mock_aws
 class TestSnsHook:
     def test_get_conn_returns_a_boto3_connection(self):
         hook = SnsHook(aws_conn_id="aws_default")
diff --git a/tests/providers/amazon/aws/hooks/test_sqs.py 
b/tests/providers/amazon/aws/hooks/test_sqs.py
index f2aaec16a4..218e1fdc68 100644
--- a/tests/providers/amazon/aws/hooks/test_sqs.py
+++ b/tests/providers/amazon/aws/hooks/test_sqs.py
@@ -17,13 +17,13 @@
 # under the License.
 from __future__ import annotations
 
-from moto import mock_sqs
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.sqs import SqsHook
 
 
 class TestSqsHook:
-    @mock_sqs
+    @mock_aws
     def test_get_conn(self):
         hook = SqsHook(aws_conn_id="aws_default")
         assert hook.get_conn() is not None
diff --git a/tests/providers/amazon/aws/hooks/test_ssm.py 
b/tests/providers/amazon/aws/hooks/test_ssm.py
index f47e0a5d71..a083f36b9d 100644
--- a/tests/providers/amazon/aws/hooks/test_ssm.py
+++ b/tests/providers/amazon/aws/hooks/test_ssm.py
@@ -21,7 +21,7 @@ from unittest import mock
 
 import botocore.exceptions
 import pytest
-from moto import mock_ssm
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.ssm import SsmHook
 
@@ -43,7 +43,7 @@ class TestSsmHook:
         ],
     )
     def setup_tests(self, request):
-        with mock_ssm():
+        with mock_aws():
             self.hook = SsmHook(region_name=REGION)
             self.param_type = request.param
             self.hook.conn.put_parameter(
diff --git a/tests/providers/amazon/aws/hooks/test_step_function.py 
b/tests/providers/amazon/aws/hooks/test_step_function.py
index a2a9e7b004..393d27715b 100644
--- a/tests/providers/amazon/aws/hooks/test_step_function.py
+++ b/tests/providers/amazon/aws/hooks/test_step_function.py
@@ -17,12 +17,12 @@
 # under the License.
 from __future__ import annotations
 
-from moto import mock_stepfunctions
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.step_function import StepFunctionHook
 
 
-@mock_stepfunctions
+@mock_aws
 class TestStepFunctionHook:
     def test_get_conn_returns_a_boto3_connection(self):
         hook = StepFunctionHook(aws_conn_id="aws_default")
diff --git a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py 
b/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py
index d9cea48579..7ceb423481 100644
--- a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py
+++ b/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py
@@ -24,8 +24,8 @@ from unittest import mock
 from unittest.mock import ANY, Mock, call
 
 import boto3
-import moto
 import pytest
+from moto import mock_aws
 from watchtower import CloudWatchLogHandler
 
 from airflow.models import DAG, DagRun, TaskInstance
@@ -44,9 +44,9 @@ def get_time_str(time_in_milliseconds):
     return dt_time.strftime("%Y-%m-%d %H:%M:%S,000")
 
 
-@pytest.fixture(autouse=True, scope="module")
+@pytest.fixture(autouse=True)
 def logmock():
-    with moto.mock_logs():
+    with mock_aws():
         yield
 
 
@@ -83,8 +83,6 @@ class TestCloudwatchTaskHandler:
         self.remote_log_stream = 
(f"{dag_id}/{task_id}/{date.isoformat()}/{self.ti.try_number}.log").replace(
             ":", "_"
         )
-
-        moto.moto_api._internal.models.moto_api_backend.reset()
         self.conn = boto3.client("logs", region_name=self.region_name)
 
         yield
diff --git a/tests/providers/amazon/aws/log/test_s3_task_handler.py 
b/tests/providers/amazon/aws/log/test_s3_task_handler.py
index 67d5d3257f..a0dbe646af 100644
--- a/tests/providers/amazon/aws/log/test_s3_task_handler.py
+++ b/tests/providers/amazon/aws/log/test_s3_task_handler.py
@@ -23,9 +23,9 @@ import os
 from unittest import mock
 
 import boto3
-import moto
 import pytest
 from botocore.exceptions import ClientError
+from moto import mock_aws
 
 from airflow.models import DAG, DagRun, TaskInstance
 from airflow.operators.empty import EmptyOperator
@@ -37,9 +37,9 @@ from airflow.utils.timezone import datetime
 from tests.test_utils.config import conf_vars
 
 
-@pytest.fixture(autouse=True, scope="module")
+@pytest.fixture(autouse=True)
 def s3mock():
-    with moto.mock_s3():
+    with mock_aws():
         yield
 
 
@@ -72,9 +72,6 @@ class TestS3TaskHandler:
         self.ti.state = State.RUNNING
 
         self.conn = boto3.client("s3")
-        # We need to create the bucket since this is all in Moto's 'virtual'
-        # AWS account
-        moto.moto_api._internal.models.moto_api_backend.reset()
         self.conn.create_bucket(Bucket="bucket")
 
         yield
diff --git a/tests/providers/amazon/aws/operators/test_datasync.py 
b/tests/providers/amazon/aws/operators/test_datasync.py
index fa666dd476..d4b30d073a 100644
--- a/tests/providers/amazon/aws/operators/test_datasync.py
+++ b/tests/providers/amazon/aws/operators/test_datasync.py
@@ -20,7 +20,7 @@ from unittest import mock
 
 import boto3
 import pytest
-from moto import mock_datasync
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException
 from airflow.models import DAG, DagRun, TaskInstance
@@ -66,7 +66,7 @@ MOCK_DATA = {
 }
 
 
-@mock_datasync
+@mock_aws
 @mock.patch.object(DataSyncHook, "get_conn")
 class DataSyncTestCaseBase:
     # Runs once for each test
@@ -144,7 +144,7 @@ def test_generic_params():
     assert op.hook.wait_interval_seconds is not None
 
 
-@mock_datasync
+@mock_aws
 @mock.patch.object(DataSyncHook, "get_conn")
 class TestDataSyncOperatorCreate(DataSyncTestCaseBase):
     def set_up_operator(
@@ -356,7 +356,7 @@ class TestDataSyncOperatorCreate(DataSyncTestCaseBase):
         mock_get_conn.assert_called()
 
 
-@mock_datasync
+@mock_aws
 @mock.patch.object(DataSyncHook, "get_conn")
 class TestDataSyncOperatorGetTasks(DataSyncTestCaseBase):
     def set_up_operator(
@@ -550,7 +550,7 @@ class TestDataSyncOperatorGetTasks(DataSyncTestCaseBase):
         mock_get_conn.assert_called()
 
 
-@mock_datasync
+@mock_aws
 @mock.patch.object(DataSyncHook, "get_conn")
 class TestDataSyncOperatorUpdate(DataSyncTestCaseBase):
     def set_up_operator(
@@ -649,7 +649,7 @@ class TestDataSyncOperatorUpdate(DataSyncTestCaseBase):
         mock_get_conn.assert_called()
 
 
-@mock_datasync
+@mock_aws
 @mock.patch.object(DataSyncHook, "get_conn")
 class TestDataSyncOperator(DataSyncTestCaseBase):
     def set_up_operator(
@@ -819,7 +819,7 @@ class TestDataSyncOperator(DataSyncTestCaseBase):
         mock_get_conn.assert_called()
 
 
-@mock_datasync
+@mock_aws
 @mock.patch.object(DataSyncHook, "get_conn")
 class TestDataSyncOperatorDelete(DataSyncTestCaseBase):
     def set_up_operator(self, task_id="test_datasync_delete_task_operator", 
task_arn="self"):
diff --git a/tests/providers/amazon/aws/operators/test_ec2.py 
b/tests/providers/amazon/aws/operators/test_ec2.py
index b11d72b714..8f8a755a84 100644
--- a/tests/providers/amazon/aws/operators/test_ec2.py
+++ b/tests/providers/amazon/aws/operators/test_ec2.py
@@ -18,7 +18,7 @@
 from __future__ import annotations
 
 import pytest
-from moto import mock_ec2
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException
 from airflow.providers.amazon.aws.hooks.ec2 import EC2Hook
@@ -61,7 +61,7 @@ class TestEC2CreateInstanceOperator(BaseEc2TestClass):
         assert ec2_operator.max_attempts == 20
         assert ec2_operator.poll_interval == 20
 
-    @mock_ec2
+    @mock_aws
     def test_create_instance(self):
         ec2_hook = EC2Hook()
         create_instance = EC2CreateInstanceOperator(
@@ -72,7 +72,7 @@ class TestEC2CreateInstanceOperator(BaseEc2TestClass):
 
         assert ec2_hook.get_instance_state(instance_id=instance_id[0]) == 
"running"
 
-    @mock_ec2
+    @mock_aws
     def test_create_multiple_instances(self):
         ec2_hook = EC2Hook()
         create_instances = EC2CreateInstanceOperator(
@@ -99,7 +99,7 @@ class TestEC2TerminateInstanceOperator(BaseEc2TestClass):
         assert ec2_operator.max_attempts == 20
         assert ec2_operator.poll_interval == 20
 
-    @mock_ec2
+    @mock_aws
     def test_terminate_instance(self):
         ec2_hook = EC2Hook()
 
@@ -118,7 +118,7 @@ class TestEC2TerminateInstanceOperator(BaseEc2TestClass):
 
         assert ec2_hook.get_instance_state(instance_id=instance_id[0]) == 
"terminated"
 
-    @mock_ec2
+    @mock_aws
     def test_terminate_multiple_instances(self):
         ec2_hook = EC2Hook()
         create_instances = EC2CreateInstanceOperator(
@@ -156,7 +156,7 @@ class TestEC2StartInstanceOperator(BaseEc2TestClass):
         assert ec2_operator.region_name == "region-test"
         assert ec2_operator.check_interval == 3
 
-    @mock_ec2
+    @mock_aws
     def test_start_instance(self):
         # create instance
         ec2_hook = EC2Hook()
@@ -191,7 +191,7 @@ class TestEC2StopInstanceOperator(BaseEc2TestClass):
         assert ec2_operator.region_name == "region-test"
         assert ec2_operator.check_interval == 3
 
-    @mock_ec2
+    @mock_aws
     def test_stop_instance(self):
         # create instance
         ec2_hook = EC2Hook()
@@ -220,7 +220,7 @@ class TestEC2HibernateInstanceOperator(BaseEc2TestClass):
         assert ec2_operator.task_id == "task_test"
         assert ec2_operator.instance_ids == "i-123abc"
 
-    @mock_ec2
+    @mock_aws
     def test_hibernate_instance(self):
         # create instance
         ec2_hook = EC2Hook()
@@ -240,7 +240,7 @@ class TestEC2HibernateInstanceOperator(BaseEc2TestClass):
         # assert instance state is stopped
         assert ec2_hook.get_instance_state(instance_id=instance_id[0]) == 
"stopped"
 
-    @mock_ec2
+    @mock_aws
     def test_hibernate_multiple_instances(self):
         ec2_hook = EC2Hook()
         create_instances = EC2CreateInstanceOperator(
@@ -263,7 +263,7 @@ class TestEC2HibernateInstanceOperator(BaseEc2TestClass):
         for id in instance_ids:
             assert ec2_hook.get_instance_state(instance_id=id) == "stopped"
 
-    @mock_ec2
+    @mock_aws
     def test_cannot_hibernate_instance(self):
         # create instance
         ec2_hook = EC2Hook()
@@ -289,7 +289,7 @@ class TestEC2HibernateInstanceOperator(BaseEc2TestClass):
         # assert instance state is running
         assert ec2_hook.get_instance_state(instance_id=instance_id[0]) == 
"running"
 
-    @mock_ec2
+    @mock_aws
     def test_cannot_hibernate_some_instances(self):
         # create instance
         ec2_hook = EC2Hook()
@@ -332,7 +332,7 @@ class TestEC2RebootInstanceOperator(BaseEc2TestClass):
         assert ec2_operator.task_id == "task_test"
         assert ec2_operator.instance_ids == "i-123abc"
 
-    @mock_ec2
+    @mock_aws
     def test_reboot_instance(self):
         # create instance
         ec2_hook = EC2Hook()
@@ -351,7 +351,7 @@ class TestEC2RebootInstanceOperator(BaseEc2TestClass):
         # assert instance state is running
         assert ec2_hook.get_instance_state(instance_id=instance_id[0]) == 
"running"
 
-    @mock_ec2
+    @mock_aws
     def test_reboot_multiple_instances(self):
         ec2_hook = EC2Hook()
         create_instances = EC2CreateInstanceOperator(
diff --git a/tests/providers/amazon/aws/operators/test_glue_databrew.py 
b/tests/providers/amazon/aws/operators/test_glue_databrew.py
index 3bc5a9c9e6..53a323e6f0 100644
--- a/tests/providers/amazon/aws/operators/test_glue_databrew.py
+++ b/tests/providers/amazon/aws/operators/test_glue_databrew.py
@@ -21,7 +21,7 @@ from typing import Generator
 from unittest import mock
 
 import pytest
-from moto import mock_databrew
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.glue_databrew import GlueDataBrewHook
 from airflow.providers.amazon.aws.operators.glue_databrew import 
GlueDataBrewStartJobOperator
@@ -31,7 +31,7 @@ JOB_NAME = "test_job"
 
 @pytest.fixture
 def hook() -> Generator[GlueDataBrewHook, None, None]:
-    with mock_databrew():
+    with mock_aws():
         yield GlueDataBrewHook(aws_conn_id="aws_default")
 
 
diff --git a/tests/providers/amazon/aws/operators/test_neptune.py 
b/tests/providers/amazon/aws/operators/test_neptune.py
index af7dc289d4..69b3d1e09a 100644
--- a/tests/providers/amazon/aws/operators/test_neptune.py
+++ b/tests/providers/amazon/aws/operators/test_neptune.py
@@ -21,7 +21,7 @@ from typing import Generator
 from unittest import mock
 
 import pytest
-from moto import mock_neptune
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.neptune import NeptuneHook
 from airflow.providers.amazon.aws.operators.neptune import (
@@ -36,7 +36,7 @@ EXPECTED_RESPONSE = {"db_cluster_id": CLUSTER_ID}
 
 @pytest.fixture
 def hook() -> Generator[NeptuneHook, None, None]:
-    with mock_neptune():
+    with mock_aws():
         yield NeptuneHook(aws_conn_id="aws_default")
 
 
diff --git a/tests/providers/amazon/aws/operators/test_rds.py 
b/tests/providers/amazon/aws/operators/test_rds.py
index 7d7770fd9f..fd464019dd 100644
--- a/tests/providers/amazon/aws/operators/test_rds.py
+++ b/tests/providers/amazon/aws/operators/test_rds.py
@@ -23,7 +23,7 @@ from unittest import mock
 from unittest.mock import patch
 
 import pytest
-from moto import mock_rds
+from moto import mock_aws
 
 from airflow.exceptions import TaskDeferred
 from airflow.models import DAG
@@ -171,7 +171,7 @@ class TestRdsCreateDbSnapshotOperator:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     def test_create_db_instance_snapshot(self):
         _create_db_instance(self.hook)
         instance_snapshot_operator = RdsCreateDbSnapshotOperator(
@@ -191,7 +191,7 @@ class TestRdsCreateDbSnapshotOperator:
         assert instance_snapshots
         assert len(instance_snapshots) == 1
 
-    @mock_rds
+    @mock_aws
     @patch.object(RdsHook, "wait_for_db_snapshot_state")
     def test_create_db_instance_snapshot_no_wait(self, mock_wait):
         _create_db_instance(self.hook)
@@ -214,7 +214,7 @@ class TestRdsCreateDbSnapshotOperator:
         assert len(instance_snapshots) == 1
         mock_wait.assert_not_called()
 
-    @mock_rds
+    @mock_aws
     def test_create_db_cluster_snapshot(self):
         _create_db_cluster(self.hook)
         cluster_snapshot_operator = RdsCreateDbSnapshotOperator(
@@ -234,7 +234,7 @@ class TestRdsCreateDbSnapshotOperator:
         assert cluster_snapshots
         assert len(cluster_snapshots) == 1
 
-    @mock_rds
+    @mock_aws
     @patch.object(RdsHook, "wait_for_db_cluster_snapshot_state")
     def test_create_db_cluster_snapshot_no_wait(self, mock_wait):
         _create_db_cluster(self.hook)
@@ -270,7 +270,7 @@ class TestRdsCopyDbSnapshotOperator:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     def test_copy_db_instance_snapshot(self):
         _create_db_instance(self.hook)
         _create_db_instance_snapshot(self.hook)
@@ -291,7 +291,7 @@ class TestRdsCopyDbSnapshotOperator:
         assert instance_snapshots
         assert len(instance_snapshots) == 1
 
-    @mock_rds
+    @mock_aws
     @patch.object(RdsHook, "wait_for_db_snapshot_state")
     def test_copy_db_instance_snapshot_no_wait(self, mock_await_status):
         _create_db_instance(self.hook)
@@ -315,7 +315,7 @@ class TestRdsCopyDbSnapshotOperator:
         assert len(instance_snapshots) == 1
         mock_await_status.assert_not_called()
 
-    @mock_rds
+    @mock_aws
     def test_copy_db_cluster_snapshot(self):
         _create_db_cluster(self.hook)
         _create_db_cluster_snapshot(self.hook)
@@ -338,7 +338,7 @@ class TestRdsCopyDbSnapshotOperator:
         assert cluster_snapshots
         assert len(cluster_snapshots) == 1
 
-    @mock_rds
+    @mock_aws
     @patch.object(RdsHook, "wait_for_db_snapshot_state")
     def test_copy_db_cluster_snapshot_no_wait(self, mock_await_status):
         _create_db_cluster(self.hook)
@@ -376,7 +376,7 @@ class TestRdsDeleteDbSnapshotOperator:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     def test_delete_db_instance_snapshot(self):
         _create_db_instance(self.hook)
         _create_db_instance_snapshot(self.hook)
@@ -396,7 +396,7 @@ class TestRdsDeleteDbSnapshotOperator:
         with pytest.raises(self.hook.conn.exceptions.ClientError):
             
self.hook.conn.describe_db_snapshots(DBSnapshotIdentifier=DB_INSTANCE_SNAPSHOT)
 
-    @mock_rds
+    @mock_aws
     def test_delete_db_instance_snapshot_no_wait(self):
         """
         Check that the operator does not wait for the DB instance snapshot 
delete operation to complete when
@@ -421,7 +421,7 @@ class TestRdsDeleteDbSnapshotOperator:
         with pytest.raises(self.hook.conn.exceptions.ClientError):
             
self.hook.conn.describe_db_snapshots(DBSnapshotIdentifier=DB_INSTANCE_SNAPSHOT)
 
-    @mock_rds
+    @mock_aws
     def test_delete_db_cluster_snapshot(self):
         _create_db_cluster(self.hook)
         _create_db_cluster_snapshot(self.hook)
@@ -441,7 +441,7 @@ class TestRdsDeleteDbSnapshotOperator:
         with pytest.raises(self.hook.conn.exceptions.ClientError):
             
self.hook.conn.describe_db_cluster_snapshots(DBClusterSnapshotIdentifier=DB_CLUSTER_SNAPSHOT)
 
-    @mock_rds
+    @mock_aws
     def test_delete_db_cluster_snapshot_no_wait(self):
         """
         Check that the operator does not wait for the DB cluster snapshot 
delete operation to complete when
@@ -479,7 +479,7 @@ class TestRdsStartExportTaskOperator:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     def test_start_export_task(self):
         _create_db_instance(self.hook)
         _create_db_instance_snapshot(self.hook)
@@ -504,7 +504,7 @@ class TestRdsStartExportTaskOperator:
         assert len(export_tasks) == 1
         assert export_tasks[0]["Status"] == "complete"
 
-    @mock_rds
+    @mock_aws
     @patch.object(RdsHook, "wait_for_export_task_state")
     def test_start_export_task_no_wait(self, mock_await_status):
         _create_db_instance(self.hook)
@@ -545,7 +545,7 @@ class TestRdsCancelExportTaskOperator:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     def test_cancel_export_task(self):
         _create_db_instance(self.hook)
         _create_db_instance_snapshot(self.hook)
@@ -567,7 +567,7 @@ class TestRdsCancelExportTaskOperator:
         assert len(export_tasks) == 1
         assert export_tasks[0]["Status"] == "canceled"
 
-    @mock_rds
+    @mock_aws
     @patch.object(RdsHook, "wait_for_export_task_state")
     def test_cancel_export_task_no_wait(self, mock_await_status):
         _create_db_instance(self.hook)
@@ -605,7 +605,7 @@ class TestRdsCreateEventSubscriptionOperator:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     def test_create_event_subscription(self):
         _create_db_instance(self.hook)
 
@@ -628,7 +628,7 @@ class TestRdsCreateEventSubscriptionOperator:
         assert len(subscriptions) == 1
         assert subscriptions[0]["Status"] == "active"
 
-    @mock_rds
+    @mock_aws
     @patch.object(RdsHook, "wait_for_event_subscription_state")
     def test_create_event_subscription_no_wait(self, mock_await_status):
         _create_db_instance(self.hook)
@@ -667,7 +667,7 @@ class TestRdsDeleteEventSubscriptionOperator:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     def test_delete_event_subscription(self):
         _create_event_subscription(self.hook)
 
@@ -696,7 +696,7 @@ class TestRdsCreateDbInstanceOperator:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     def test_create_db_instance(self):
         create_db_instance_operator = RdsCreateDbInstanceOperator(
             task_id="test_create_db_instance",
@@ -719,7 +719,7 @@ class TestRdsCreateDbInstanceOperator:
         assert len(db_instances) == 1
         assert db_instances[0]["DBInstanceStatus"] == "available"
 
-    @mock_rds
+    @mock_aws
     @patch.object(RdsHook, "wait_for_db_instance_state")
     def test_create_db_instance_no_wait(self, mock_await_status):
         create_db_instance_operator = RdsCreateDbInstanceOperator(
@@ -758,7 +758,7 @@ class TestRdsDeleteDbInstanceOperator:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     def test_delete_db_instance(self):
         _create_db_instance(self.hook)
 
@@ -777,7 +777,7 @@ class TestRdsDeleteDbInstanceOperator:
         with pytest.raises(self.hook.conn.exceptions.ClientError):
             
self.hook.conn.describe_db_instances(DBInstanceIdentifier=DB_INSTANCE_NAME)
 
-    @mock_rds
+    @mock_aws
     @patch.object(RdsHook, "wait_for_db_instance_state")
     def test_delete_db_instance_no_wait(self, mock_await_status):
         _create_db_instance(self.hook)
@@ -812,7 +812,7 @@ class TestRdsStopDbOperator:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     @patch.object(RdsHook, "wait_for_db_instance_state")
     def test_stop_db_instance(self, mock_await_status):
         _create_db_instance(self.hook)
@@ -824,7 +824,7 @@ class TestRdsStopDbOperator:
         assert status == "stopped"
         mock_await_status.assert_called()
 
-    @mock_rds
+    @mock_aws
     @patch.object(RdsHook, "wait_for_db_instance_state")
     def test_stop_db_instance_no_wait(self, mock_await_status):
         _create_db_instance(self.hook)
@@ -851,7 +851,7 @@ class TestRdsStopDbOperator:
 
         assert isinstance(defer.value.trigger, RdsDbStoppedTrigger)
 
-    @mock_rds
+    @mock_aws
     def test_stop_db_instance_create_snapshot(self):
         _create_db_instance(self.hook)
         stop_db_instance = RdsStopDbOperator(
@@ -871,7 +871,7 @@ class TestRdsStopDbOperator:
         assert instance_snapshots
         assert len(instance_snapshots) == 1
 
-    @mock_rds
+    @mock_aws
     @patch.object(RdsHook, "wait_for_db_cluster_state")
     def test_stop_db_cluster(self, mock_await_status):
         _create_db_cluster(self.hook)
@@ -886,7 +886,7 @@ class TestRdsStopDbOperator:
         assert status == "stopped"
         mock_await_status.assert_called()
 
-    @mock_rds
+    @mock_aws
     def test_stop_db_cluster_create_snapshot_logs_warning_message(self, 
caplog):
         _create_db_cluster(self.hook)
         stop_db_cluster = RdsStopDbOperator(
@@ -916,7 +916,7 @@ class TestRdsStartDbOperator:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     def test_start_db_instance(self):
         _create_db_instance(self.hook)
         self.hook.conn.stop_db_instance(DBInstanceIdentifier=DB_INSTANCE_NAME)
@@ -934,7 +934,7 @@ class TestRdsStartDbOperator:
         status_after = result_after["DBInstances"][0]["DBInstanceStatus"]
         assert status_after == "available"
 
-    @mock_rds
+    @mock_aws
     def test_start_db_cluster(self):
         _create_db_cluster(self.hook)
         self.hook.conn.stop_db_cluster(DBClusterIdentifier=DB_CLUSTER_NAME)
diff --git a/tests/providers/amazon/aws/operators/test_s3.py 
b/tests/providers/amazon/aws/operators/test_s3.py
index 0ec3de1fb4..a2b381e50b 100644
--- a/tests/providers/amazon/aws/operators/test_s3.py
+++ b/tests/providers/amazon/aws/operators/test_s3.py
@@ -27,7 +27,7 @@ from unittest import mock
 
 import boto3
 import pytest
-from moto import mock_s3
+from moto import mock_aws
 from openlineage.client.facet import (
     LifecycleStateChange,
     LifecycleStateChangeDatasetFacet,
@@ -64,7 +64,7 @@ class TestS3CreateBucketOperator:
             bucket_name=BUCKET_NAME,
         )
 
-    @mock_s3
+    @mock_aws
     @mock.patch.object(S3Hook, "create_bucket")
     @mock.patch.object(S3Hook, "check_for_bucket")
     def test_execute_if_bucket_exist(self, mock_check_for_bucket, 
mock_create_bucket):
@@ -74,7 +74,7 @@ class TestS3CreateBucketOperator:
         mock_check_for_bucket.assert_called_once_with(BUCKET_NAME)
         mock_create_bucket.assert_not_called()
 
-    @mock_s3
+    @mock_aws
     @mock.patch.object(S3Hook, "create_bucket")
     @mock.patch.object(S3Hook, "check_for_bucket")
     def test_execute_if_not_bucket_exist(self, mock_check_for_bucket, 
mock_create_bucket):
@@ -92,7 +92,7 @@ class TestS3DeleteBucketOperator:
             bucket_name=BUCKET_NAME,
         )
 
-    @mock_s3
+    @mock_aws
     @mock.patch.object(S3Hook, "delete_bucket")
     @mock.patch.object(S3Hook, "check_for_bucket")
     def test_execute_if_bucket_exist(self, mock_check_for_bucket, 
mock_delete_bucket):
@@ -102,7 +102,7 @@ class TestS3DeleteBucketOperator:
         mock_check_for_bucket.assert_called_once_with(BUCKET_NAME)
         mock_delete_bucket.assert_called_once_with(bucket_name=BUCKET_NAME, 
force_delete=False)
 
-    @mock_s3
+    @mock_aws
     @mock.patch.object(S3Hook, "delete_bucket")
     @mock.patch.object(S3Hook, "check_for_bucket")
     def test_execute_if_not_bucket_exist(self, mock_check_for_bucket, 
mock_delete_bucket):
@@ -120,7 +120,7 @@ class TestS3GetBucketTaggingOperator:
             bucket_name=BUCKET_NAME,
         )
 
-    @mock_s3
+    @mock_aws
     @mock.patch.object(S3Hook, "get_bucket_tagging")
     @mock.patch.object(S3Hook, "check_for_bucket")
     def test_execute_if_bucket_exist(self, mock_check_for_bucket, 
get_bucket_tagging):
@@ -130,7 +130,7 @@ class TestS3GetBucketTaggingOperator:
         mock_check_for_bucket.assert_called_once_with(BUCKET_NAME)
         get_bucket_tagging.assert_called_once_with(BUCKET_NAME)
 
-    @mock_s3
+    @mock_aws
     @mock.patch.object(S3Hook, "get_bucket_tagging")
     @mock.patch.object(S3Hook, "check_for_bucket")
     def test_execute_if_not_bucket_exist(self, mock_check_for_bucket, 
get_bucket_tagging):
@@ -149,7 +149,7 @@ class TestS3PutBucketTaggingOperator:
             bucket_name=BUCKET_NAME,
         )
 
-    @mock_s3
+    @mock_aws
     @mock.patch.object(S3Hook, "put_bucket_tagging")
     @mock.patch.object(S3Hook, "check_for_bucket")
     def test_execute_if_bucket_exist(self, mock_check_for_bucket, 
put_bucket_tagging):
@@ -161,7 +161,7 @@ class TestS3PutBucketTaggingOperator:
             key=None, value=None, tag_set=TAG_SET, bucket_name=BUCKET_NAME
         )
 
-    @mock_s3
+    @mock_aws
     @mock.patch.object(S3Hook, "put_bucket_tagging")
     @mock.patch.object(S3Hook, "check_for_bucket")
     def test_execute_if_not_bucket_exist(self, mock_check_for_bucket, 
put_bucket_tagging):
@@ -179,7 +179,7 @@ class TestS3DeleteBucketTaggingOperator:
             bucket_name=BUCKET_NAME,
         )
 
-    @mock_s3
+    @mock_aws
     @mock.patch.object(S3Hook, "delete_bucket_tagging")
     @mock.patch.object(S3Hook, "check_for_bucket")
     def test_execute_if_bucket_exist(self, mock_check_for_bucket, 
delete_bucket_tagging):
@@ -189,7 +189,7 @@ class TestS3DeleteBucketTaggingOperator:
         mock_check_for_bucket.assert_called_once_with(BUCKET_NAME)
         delete_bucket_tagging.assert_called_once_with(BUCKET_NAME)
 
-    @mock_s3
+    @mock_aws
     @mock.patch.object(S3Hook, "delete_bucket_tagging")
     @mock.patch.object(S3Hook, "check_for_bucket")
     def test_execute_if_not_bucket_exist(self, mock_check_for_bucket, 
delete_bucket_tagging):
@@ -221,7 +221,7 @@ class TestS3FileTransformOperator:
 
     @mock.patch("subprocess.Popen")
     @mock.patch.object(S3FileTransformOperator, "log")
-    @mock_s3
+    @mock_aws
     def test_execute_with_transform_script(self, mock_log, mock_popen):
         process_output = [b"Foo", b"Bar", b"Baz"]
         self.mock_process(mock_popen, process_output=process_output)
@@ -241,7 +241,7 @@ class TestS3FileTransformOperator:
         )
 
     @mock.patch("subprocess.Popen")
-    @mock_s3
+    @mock_aws
     def test_execute_with_failing_transform_script(self, mock_popen):
         self.mock_process(mock_popen, return_code=42)
         input_path, output_path = self.s3_paths()
@@ -260,7 +260,7 @@ class TestS3FileTransformOperator:
         assert "Transform script failed: 42" == str(ctx.value)
 
     @mock.patch("subprocess.Popen")
-    @mock_s3
+    @mock_aws
     def test_execute_with_transform_script_args(self, mock_popen):
         self.mock_process(mock_popen, process_output=[b"Foo", b"Bar", b"Baz"])
         input_path, output_path = self.s3_paths()
@@ -279,7 +279,7 @@ class TestS3FileTransformOperator:
         assert script_args == mock_popen.call_args.args[0][3:]
 
     @mock.patch("airflow.providers.amazon.aws.hooks.s3.S3Hook.select_key", 
return_value="input")
-    @mock_s3
+    @mock_aws
     def test_execute_with_select_expression(self, mock_select_key):
         input_path, output_path = self.s3_paths()
         select_expression = "SELECT * FROM s3object s"
@@ -388,7 +388,7 @@ class TestS3CopyObjectOperator:
         self.dest_bucket = "bucket2"
         self.dest_key = "path2/data_copy.txt"
 
-    @mock_s3
+    @mock_aws
     def test_s3_copy_object_arg_combination_1(self):
         conn = boto3.client("s3")
         conn.create_bucket(Bucket=self.source_bucket)
@@ -413,7 +413,7 @@ class TestS3CopyObjectOperator:
         # the object found should be consistent with dest_key specified earlier
         assert objects_in_dest_bucket["Contents"][0]["Key"] == self.dest_key
 
-    @mock_s3
+    @mock_aws
     def test_s3_copy_object_arg_combination_2(self):
         conn = boto3.client("s3")
         conn.create_bucket(Bucket=self.source_bucket)
@@ -488,7 +488,7 @@ class TestS3CopyObjectOperator:
         assert lineage.outputs[0] == expected_output
 
 
-@mock_s3
+@mock_aws
 class TestS3DeleteObjectsOperator:
     def test_s3_delete_single_object(self):
         bucket = "testbucket"
diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_notebook.py 
b/tests/providers/amazon/aws/operators/test_sagemaker_notebook.py
index cb44b459b4..cde4944440 100644
--- a/tests/providers/amazon/aws/operators/test_sagemaker_notebook.py
+++ b/tests/providers/amazon/aws/operators/test_sagemaker_notebook.py
@@ -21,7 +21,7 @@ from typing import Generator
 from unittest import mock
 
 import pytest
-from moto import mock_sagemaker
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.sagemaker import SageMakerHook
 from airflow.providers.amazon.aws.operators.sagemaker import (
@@ -38,7 +38,7 @@ ROLE_ARN = "arn:aws:iam:role/role"
 
 @pytest.fixture
 def hook() -> Generator[SageMakerHook, None, None]:
-    with mock_sagemaker():
+    with mock_aws():
         yield SageMakerHook(aws_conn_id="aws_default")
 
 
diff --git a/tests/providers/amazon/aws/operators/test_sqs.py 
b/tests/providers/amazon/aws/operators/test_sqs.py
index 5f05c827be..ffca3110ce 100644
--- a/tests/providers/amazon/aws/operators/test_sqs.py
+++ b/tests/providers/amazon/aws/operators/test_sqs.py
@@ -21,7 +21,7 @@ from unittest import mock
 
 import pytest
 from botocore.exceptions import ClientError
-from moto import mock_sqs
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.sqs import SqsHook
 from airflow.providers.amazon.aws.operators.sqs import SqsPublishOperator
@@ -73,7 +73,7 @@ class TestSqsPublishOperator:
         assert op.hook._config is not None
         assert op.hook._config.read_timeout == 42
 
-    @mock_sqs
+    @mock_aws
     def test_execute_success(self, mocked_context):
         self.sqs_client.create_queue(QueueName=QUEUE_NAME)
 
@@ -89,7 +89,7 @@ class TestSqsPublishOperator:
         assert message["Messages"][0]["MessageId"] == result["MessageId"]
         assert message["Messages"][0]["Body"] == "hello"
 
-    @mock_sqs
+    @mock_aws
     def test_execute_failure_fifo_queue(self, mocked_context):
         self.sqs_client.create_queue(QueueName=FIFO_QUEUE_NAME, 
Attributes={"FifoQueue": "true"})
 
@@ -101,7 +101,7 @@ class TestSqsPublishOperator:
         with pytest.raises(ClientError, match=error_message):
             op.execute(mocked_context)
 
-    @mock_sqs
+    @mock_aws
     def test_execute_success_fifo_queue(self, mocked_context):
         self.sqs_client.create_queue(
             QueueName=FIFO_QUEUE_NAME, Attributes={"FifoQueue": "true", 
"ContentBasedDeduplication": "true"}
diff --git a/tests/providers/amazon/aws/secrets/test_secrets_manager.py 
b/tests/providers/amazon/aws/secrets/test_secrets_manager.py
index d23ce639ed..39f2d4710a 100644
--- a/tests/providers/amazon/aws/secrets/test_secrets_manager.py
+++ b/tests/providers/amazon/aws/secrets/test_secrets_manager.py
@@ -20,7 +20,7 @@ import json
 from unittest import mock
 
 import pytest
-from moto import mock_secretsmanager
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.secrets.secrets_manager import 
SecretsManagerBackend
 
@@ -32,7 +32,7 @@ class TestSecretsManagerBackend:
         conn = SecretsManagerBackend().get_connection("fake_conn")
         assert conn.host == "host"
 
-    @mock_secretsmanager
+    @mock_aws
     def test_get_conn_value_full_url_mode(self):
         secret_id = "airflow/connections/test_postgres"
         create_param = {
@@ -53,7 +53,7 @@ class TestSecretsManagerBackend:
             (False, "is%20url%20encoded", "not%2520idempotent"),
         ],
     )
-    @mock_secretsmanager
+    @mock_aws
     def test_get_connection_broken_field_mode_url_encoding(self, 
are_secret_values_urlencoded, login, host):
         secret_id = "airflow/connections/test_postgres"
         create_param = {
@@ -82,7 +82,7 @@ class TestSecretsManagerBackend:
         assert conn.conn_id == "test_postgres"
         assert conn.extra_dejson["foo"] == "bar"
 
-    @mock_secretsmanager
+    @mock_aws
     def test_get_connection_broken_field_mode_extra_allows_nested_json(self):
         secret_id = "airflow/connections/test_postgres"
         create_param = {
@@ -104,7 +104,7 @@ class TestSecretsManagerBackend:
         conn = secrets_manager_backend.get_connection(conn_id="test_postgres")
         assert conn.extra_dejson["foo"] == "bar"
 
-    @mock_secretsmanager
+    @mock_aws
     def test_get_conn_value_broken_field_mode(self):
         secret_id = "airflow/connections/test_postgres"
         create_param = {
@@ -122,7 +122,7 @@ class TestSecretsManagerBackend:
         returned_uri = conn.get_uri()
         assert "postgres://airflow:airflow@host:5432/airflow" == returned_uri
 
-    @mock_secretsmanager
+    @mock_aws
     def test_get_conn_value_broken_field_mode_extra_words_added(self):
         secret_id = "airflow/connections/test_postgres"
         create_param = {
@@ -142,7 +142,7 @@ class TestSecretsManagerBackend:
         returned_uri = conn.get_uri()
         assert "postgres://airflow:airflow@host:5432/airflow" == returned_uri
 
-    @mock_secretsmanager
+    @mock_aws
     def test_get_conn_value_non_existent_key(self):
         """
         Test that if the key with connection ID is not present,
@@ -162,7 +162,7 @@ class TestSecretsManagerBackend:
         assert secrets_manager_backend.get_conn_value(conn_id=conn_id) is None
         assert secrets_manager_backend.get_connection(conn_id=conn_id) is None
 
-    @mock_secretsmanager
+    @mock_aws
     def test_get_variable(self):
         secret_id = "airflow/variables/hello"
         create_param = {"Name": secret_id, "SecretString": "world"}
@@ -173,7 +173,7 @@ class TestSecretsManagerBackend:
         returned_uri = secrets_manager_backend.get_variable("hello")
         assert "world" == returned_uri
 
-    @mock_secretsmanager
+    @mock_aws
     def test_get_variable_non_existent_key(self):
         """
         Test that if Variable key is not present,
@@ -187,7 +187,7 @@ class TestSecretsManagerBackend:
 
         assert secrets_manager_backend.get_variable("test_mysql") is None
 
-    @mock_secretsmanager
+    @mock_aws
     def test_get_config_non_existent_key(self):
         """
         Test that if Config key is not present,
diff --git a/tests/providers/amazon/aws/secrets/test_systems_manager.py 
b/tests/providers/amazon/aws/secrets/test_systems_manager.py
index 1f40b560b3..d4c9c0f9bc 100644
--- a/tests/providers/amazon/aws/secrets/test_systems_manager.py
+++ b/tests/providers/amazon/aws/secrets/test_systems_manager.py
@@ -20,7 +20,7 @@ import json
 from unittest import mock
 
 import pytest
-from moto import mock_ssm
+from moto import mock_aws
 
 from airflow.configuration import initialize_secrets_backends
 from airflow.providers.amazon.aws.secrets.systems_manager import 
SystemsManagerParameterStoreBackend
@@ -55,7 +55,7 @@ class TestSsmSecrets:
         conn = 
SystemsManagerParameterStoreBackend().get_connection("fake_conn")
         assert conn.host == "host"
 
-    @mock_ssm
+    @mock_aws
     @pytest.mark.parametrize("ssm_value", [JSON_CONNECTION, URI_CONNECTION])
     def test_get_conn_value(self, ssm_value):
         param = {
@@ -80,7 +80,7 @@ class TestSsmSecrets:
         assert test_conn.schema == "my-schema"
         assert test_conn.extra_dejson == {"param1": "val1", "param2": "val2"}
 
-    @mock_ssm
+    @mock_aws
     def test_get_conn_value_non_existent_key(self):
         """
         Test that if the key with connection ID is not present in SSM,
@@ -99,7 +99,7 @@ class TestSsmSecrets:
         assert ssm_backend.get_conn_value(conn_id=conn_id) is None
         assert ssm_backend.get_connection(conn_id=conn_id) is None
 
-    @mock_ssm
+    @mock_aws
     def test_get_variable(self):
         param = {"Name": "/airflow/variables/hello", "Type": "String", 
"Value": "world"}
 
@@ -109,7 +109,7 @@ class TestSsmSecrets:
         returned_uri = ssm_backend.get_variable("hello")
         assert "world" == returned_uri
 
-    @mock_ssm
+    @mock_aws
     def test_get_config(self):
         param = {
             "Name": "/airflow/config/sql_alchemy_conn",
@@ -123,7 +123,7 @@ class TestSsmSecrets:
         returned_uri = ssm_backend.get_config("sql_alchemy_conn")
         assert "sqlite:///Users/test_user/airflow.db" == returned_uri
 
-    @mock_ssm
+    @mock_aws
     def test_get_variable_secret_string(self):
         param = {"Name": "/airflow/variables/hello", "Type": "SecureString", 
"Value": "world"}
         ssm_backend = SystemsManagerParameterStoreBackend()
@@ -131,7 +131,7 @@ class TestSsmSecrets:
         returned_uri = ssm_backend.get_variable("hello")
         assert "world" == returned_uri
 
-    @mock_ssm
+    @mock_aws
     def test_get_variable_non_existent_key(self):
         """
         Test that if Variable key is not present in SSM,
diff --git a/tests/providers/amazon/aws/sensors/test_cloud_formation.py 
b/tests/providers/amazon/aws/sensors/test_cloud_formation.py
index ca41774411..7edc252864 100644
--- a/tests/providers/amazon/aws/sensors/test_cloud_formation.py
+++ b/tests/providers/amazon/aws/sensors/test_cloud_formation.py
@@ -21,7 +21,7 @@ from unittest.mock import patch
 
 import boto3
 import pytest
-from moto import mock_cloudformation
+from moto import mock_aws
 
 from airflow.exceptions import AirflowSkipException
 from airflow.providers.amazon.aws.sensors.cloud_formation import (
@@ -37,7 +37,7 @@ def mocked_hook_client():
 
 
 class TestCloudFormationCreateStackSensor:
-    @mock_cloudformation
+    @mock_aws
     def setup_method(self, method):
         self.client = boto3.client("cloudformation", region_name="us-east-1")
 
@@ -65,7 +65,7 @@ class TestCloudFormationCreateStackSensor:
         assert sensor.hook._verify is None
         assert sensor.hook._config is None
 
-    @mock_cloudformation
+    @mock_aws
     def test_poke(self):
         self.client.create_stack(StackName="foobar", 
TemplateBody='{"Resources": {}}')
         op = CloudFormationCreateStackSensor(task_id="task", 
stack_name="foobar")
@@ -91,7 +91,7 @@ class TestCloudFormationCreateStackSensor:
 
 
 class TestCloudFormationDeleteStackSensor:
-    @mock_cloudformation
+    @mock_aws
     def setup_method(self, method):
         self.client = boto3.client("cloudformation", region_name="us-east-1")
 
@@ -119,7 +119,7 @@ class TestCloudFormationDeleteStackSensor:
         assert sensor.hook._verify is None
         assert sensor.hook._config is None
 
-    @mock_cloudformation
+    @mock_aws
     def test_poke(self):
         stack_name = "foobar"
         self.client.create_stack(StackName=stack_name, 
TemplateBody='{"Resources": {}}')
@@ -145,7 +145,7 @@ class TestCloudFormationDeleteStackSensor:
         with pytest.raises(expected_exception, match="Stack foo in bad state: 
bar"):
             op.poke({})
 
-    @mock_cloudformation
+    @mock_aws
     def test_poke_stack_does_not_exist(self):
         op = CloudFormationDeleteStackSensor(task_id="task", stack_name="foo")
         assert op.poke({})
diff --git a/tests/providers/amazon/aws/sensors/test_dynamodb.py 
b/tests/providers/amazon/aws/sensors/test_dynamodb.py
index 1508354657..d0a7eab8fc 100644
--- a/tests/providers/amazon/aws/sensors/test_dynamodb.py
+++ b/tests/providers/amazon/aws/sensors/test_dynamodb.py
@@ -18,7 +18,7 @@
 from __future__ import annotations
 
 import pytest
-from moto import mock_dynamodb
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
 from airflow.providers.amazon.aws.sensors.dynamodb import DynamoDBValueSensor
@@ -47,7 +47,7 @@ class TestDynamoDBValueSensor:
             sort_key_value=self.sk_value,
         )
 
-    @mock_dynamodb
+    @mock_aws
     def test_sensor_with_pk(self):
         hook = DynamoDBHook(table_name=self.table_name, 
table_keys=[self.pk_name])
 
@@ -65,7 +65,7 @@ class TestDynamoDBValueSensor:
 
         assert self.sensor.poke(None)
 
-    @mock_dynamodb
+    @mock_aws
     def test_sensor_with_pk_and_sk(self):
         hook = DynamoDBHook(table_name=self.table_name, 
table_keys=[self.pk_name, self.sk_name])
 
@@ -156,7 +156,7 @@ class TestDynamoDBMultipleValuesSensor:
         assert sensor.hook._verify is None
         assert sensor.hook._config is None
 
-    @mock_dynamodb
+    @mock_aws
     def test_sensor_with_pk(self):
         hook = DynamoDBHook(table_name=self.table_name, 
table_keys=[self.pk_name])
 
@@ -174,7 +174,7 @@ class TestDynamoDBMultipleValuesSensor:
 
         assert self.sensor.poke(None)
 
-    @mock_dynamodb
+    @mock_aws
     def test_sensor_with_pk_and_sk(self):
         hook = DynamoDBHook(table_name=self.table_name, 
table_keys=[self.pk_name, self.sk_name])
 
diff --git a/tests/providers/amazon/aws/sensors/test_ec2.py 
b/tests/providers/amazon/aws/sensors/test_ec2.py
index 3f5a6f09f7..228791b491 100644
--- a/tests/providers/amazon/aws/sensors/test_ec2.py
+++ b/tests/providers/amazon/aws/sensors/test_ec2.py
@@ -18,7 +18,7 @@
 from __future__ import annotations
 
 import pytest
-from moto import mock_ec2
+from moto import mock_aws
 
 from airflow.exceptions import TaskDeferred
 from airflow.providers.amazon.aws.hooks.ec2 import EC2Hook
@@ -65,7 +65,7 @@ class TestEC2InstanceStateSensor:
         response = ec2_client.run_instances(MaxCount=1, MinCount=1, 
ImageId=images[0]["ImageId"])
         return response["Instances"][0]["InstanceId"]
 
-    @mock_ec2
+    @mock_aws
     def test_running(self):
         # create instance
         ec2_hook = EC2Hook()
@@ -86,7 +86,7 @@ class TestEC2InstanceStateSensor:
         # assert instance state is running
         assert start_sensor.poke(None)
 
-    @mock_ec2
+    @mock_aws
     def test_stopped(self):
         # create instance
         ec2_hook = EC2Hook()
@@ -107,7 +107,7 @@ class TestEC2InstanceStateSensor:
         # assert instance state is stopped
         assert stop_sensor.poke(None)
 
-    @mock_ec2
+    @mock_aws
     def test_terminated(self):
         # create instance
         ec2_hook = EC2Hook()
@@ -128,7 +128,7 @@ class TestEC2InstanceStateSensor:
         # assert instance state is terminated
         assert stop_sensor.poke(None)
 
-    @mock_ec2
+    @mock_aws
     def test_deferrable(self):
         # create instance
         ec2_hook = EC2Hook()
diff --git a/tests/providers/amazon/aws/sensors/test_glue_catalog_partition.py 
b/tests/providers/amazon/aws/sensors/test_glue_catalog_partition.py
index 2364a6021f..6dc4397056 100644
--- a/tests/providers/amazon/aws/sensors/test_glue_catalog_partition.py
+++ b/tests/providers/amazon/aws/sensors/test_glue_catalog_partition.py
@@ -20,7 +20,7 @@ from __future__ import annotations
 from unittest import mock
 
 import pytest
-from moto import mock_glue
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException, AirflowSkipException, 
TaskDeferred
 from airflow.providers.amazon.aws.hooks.glue_catalog import GlueCatalogHook
@@ -30,21 +30,21 @@ from 
airflow.providers.amazon.aws.sensors.glue_catalog_partition import GlueCata
 class TestGlueCatalogPartitionSensor:
     task_id = "test_glue_catalog_partition_sensor"
 
-    @mock_glue
+    @mock_aws
     @mock.patch.object(GlueCatalogHook, "check_for_partition")
     def test_poke(self, mock_check_for_partition):
         mock_check_for_partition.return_value = True
         op = GlueCatalogPartitionSensor(task_id=self.task_id, table_name="tbl")
         assert op.poke({})
 
-    @mock_glue
+    @mock_aws
     @mock.patch.object(GlueCatalogHook, "check_for_partition")
     def test_poke_false(self, mock_check_for_partition):
         mock_check_for_partition.return_value = False
         op = GlueCatalogPartitionSensor(task_id=self.task_id, table_name="tbl")
         assert not op.poke({})
 
-    @mock_glue
+    @mock_aws
     @mock.patch.object(GlueCatalogHook, "check_for_partition")
     def test_poke_default_args(self, mock_check_for_partition):
         table_name = "test_glue_catalog_partition_sensor_tbl"
@@ -55,7 +55,7 @@ class TestGlueCatalogPartitionSensor:
         assert op.hook.aws_conn_id == "aws_default"
         mock_check_for_partition.assert_called_once_with("default", 
table_name, "ds='{{ ds }}'")
 
-    @mock_glue
+    @mock_aws
     @mock.patch.object(GlueCatalogHook, "check_for_partition")
     def test_poke_nondefault_args(self, mock_check_for_partition):
         table_name = "my_table"
@@ -86,7 +86,7 @@ class TestGlueCatalogPartitionSensor:
         assert op.timeout == timeout
         mock_check_for_partition.assert_called_once_with(database_name, 
table_name, expression)
 
-    @mock_glue
+    @mock_aws
     @mock.patch.object(GlueCatalogHook, "check_for_partition")
     def test_dot_notation(self, mock_check_for_partition):
         db_table = "my_db.my_tbl"
diff --git a/tests/providers/amazon/aws/sensors/test_rds.py 
b/tests/providers/amazon/aws/sensors/test_rds.py
index bd18621b1d..fa771eff9d 100644
--- a/tests/providers/amazon/aws/sensors/test_rds.py
+++ b/tests/providers/amazon/aws/sensors/test_rds.py
@@ -16,7 +16,7 @@
 # under the License.
 from __future__ import annotations
 
-from moto import mock_rds
+from moto import mock_aws
 
 from airflow.models import DAG
 from airflow.providers.amazon.aws.hooks.rds import RdsHook
@@ -129,7 +129,7 @@ class TestRdsSnapshotExistenceSensor:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     def test_db_instance_snapshot_poke_true(self):
         _create_db_instance_snapshot(self.hook)
         op = RdsSnapshotExistenceSensor(
@@ -141,7 +141,7 @@ class TestRdsSnapshotExistenceSensor:
         )
         assert op.poke(None)
 
-    @mock_rds
+    @mock_aws
     def test_db_instance_snapshot_poke_false(self):
         _create_db_instance(self.hook)
         op = RdsSnapshotExistenceSensor(
@@ -153,7 +153,7 @@ class TestRdsSnapshotExistenceSensor:
         )
         assert not op.poke(None)
 
-    @mock_rds
+    @mock_aws
     def test_db_instance_cluster_poke_true(self):
         _create_db_cluster_snapshot(self.hook)
         op = RdsSnapshotExistenceSensor(
@@ -165,7 +165,7 @@ class TestRdsSnapshotExistenceSensor:
         )
         assert op.poke(None)
 
-    @mock_rds
+    @mock_aws
     def test_db_instance_cluster_poke_false(self):
         op = RdsSnapshotExistenceSensor(
             task_id="test_cluster_snap_false",
@@ -188,7 +188,7 @@ class TestRdsExportTaskExistenceSensor:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     def test_export_task_poke_true(self):
         _create_db_instance_snapshot(self.hook)
         _start_export_task(self.hook)
@@ -200,7 +200,7 @@ class TestRdsExportTaskExistenceSensor:
         )
         assert op.poke(None)
 
-    @mock_rds
+    @mock_aws
     def test_export_task_poke_false(self):
         _create_db_instance_snapshot(self.hook)
         op = RdsExportTaskExistenceSensor(
@@ -223,7 +223,7 @@ class TestRdsDbSensor:
         del cls.dag
         del cls.hook
 
-    @mock_rds
+    @mock_aws
     def test_poke_true_instance(self):
         """
         By default RdsDbSensor should wait for an instance to enter the 
'available' state
@@ -237,7 +237,7 @@ class TestRdsDbSensor:
         )
         assert op.poke(None)
 
-    @mock_rds
+    @mock_aws
     def test_poke_false_instance(self):
         _create_db_instance(self.hook)
         op = RdsDbSensor(
@@ -249,7 +249,7 @@ class TestRdsDbSensor:
         )
         assert not op.poke(None)
 
-    @mock_rds
+    @mock_aws
     def test_poke_true_cluster(self):
         _create_db_cluster(self.hook)
         op = RdsDbSensor(
@@ -261,7 +261,7 @@ class TestRdsDbSensor:
         )
         assert op.poke(None)
 
-    @mock_rds
+    @mock_aws
     def test_poke_false_cluster(self):
         _create_db_cluster(self.hook)
         op = RdsDbSensor(
diff --git a/tests/providers/amazon/aws/sensors/test_redshift_cluster.py 
b/tests/providers/amazon/aws/sensors/test_redshift_cluster.py
index 04d4ca55fd..2ecf83c3df 100644
--- a/tests/providers/amazon/aws/sensors/test_redshift_cluster.py
+++ b/tests/providers/amazon/aws/sensors/test_redshift_cluster.py
@@ -20,7 +20,7 @@ from unittest import mock
 
 import boto3
 import pytest
-from moto import mock_redshift
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException, TaskDeferred
 from airflow.providers.amazon.aws.sensors.redshift_cluster import 
RedshiftClusterSensor
@@ -52,7 +52,7 @@ class TestRedshiftClusterSensor:
         if not client.describe_clusters()["Clusters"]:
             raise ValueError("AWS not properly mocked")
 
-    @mock_redshift
+    @mock_aws
     def test_poke(self):
         self._create_cluster()
         op = RedshiftClusterSensor(
@@ -65,7 +65,7 @@ class TestRedshiftClusterSensor:
         )
         assert op.poke({})
 
-    @mock_redshift
+    @mock_aws
     def test_poke_false(self):
         self._create_cluster()
         op = RedshiftClusterSensor(
@@ -79,7 +79,7 @@ class TestRedshiftClusterSensor:
 
         assert not op.poke({})
 
-    @mock_redshift
+    @mock_aws
     def test_poke_cluster_not_found(self):
         self._create_cluster()
         op = RedshiftClusterSensor(
diff --git a/tests/providers/amazon/aws/sensors/test_sqs.py 
b/tests/providers/amazon/aws/sensors/test_sqs.py
index 7044f05f21..a65c59bb5c 100644
--- a/tests/providers/amazon/aws/sensors/test_sqs.py
+++ b/tests/providers/amazon/aws/sensors/test_sqs.py
@@ -21,7 +21,7 @@ import json
 from unittest import mock
 
 import pytest
-from moto import mock_sqs
+from moto import mock_aws
 
 from airflow.exceptions import AirflowException, AirflowSkipException, 
TaskDeferred
 from airflow.providers.amazon.aws.hooks.sqs import SqsHook
@@ -80,7 +80,7 @@ class TestSqsSensor:
         assert sensor.hook._config is not None
         assert sensor.hook._config.read_timeout == 42
 
-    @mock_sqs
+    @mock_aws
     def test_poke_success(self, mocked_context):
         self.sqs_client.create_queue(QueueName=QUEUE_NAME)
         self.sqs_client.send_message(QueueUrl=QUEUE_URL, MessageBody="hello")
@@ -96,7 +96,7 @@ class TestSqsSensor:
         xcom_value = call_kwargs["value"]
         assert xcom_value[0]["Body"] == "hello"
 
-    @mock_sqs
+    @mock_aws
     def test_poke_no_message(self, mocked_context):
         self.sqs_client.create_queue(QueueName=QUEUE_NAME)
 
@@ -157,7 +157,7 @@ class TestSqsSensor:
         ]
         mocked_client.assert_has_calls(calls_receive_message)
 
-    @mock_sqs
+    @mock_aws
     def test_poke_message_invalid_filtering(self):
         self.sqs_client.create_queue(QueueName=QUEUE_NAME)
         self.sqs_client.send_message(QueueUrl=QUEUE_URL, MessageBody="hello")
@@ -386,7 +386,7 @@ class TestSqsSensor:
         sensor.poke(mocked_context)
         assert mocked_client.delete_message_batch.called is False
 
-    @mock_sqs
+    @mock_aws
     def test_poke_batch_messages(self, mocked_context):
         messages = ["hello", "brave", "world"]
         self.sqs_client.create_queue(QueueName=QUEUE_NAME)
diff --git a/tests/providers/amazon/aws/system/utils/test_helpers.py 
b/tests/providers/amazon/aws/system/utils/test_helpers.py
index 8860a8d0b9..9ed07dcaa2 100644
--- a/tests/providers/amazon/aws/system/utils/test_helpers.py
+++ b/tests/providers/amazon/aws/system/utils/test_helpers.py
@@ -26,7 +26,7 @@ from io import StringIO
 from unittest.mock import ANY, patch
 
 import pytest
-from moto import mock_ssm
+from moto import mock_aws
 
 from tests.system.providers.amazon.aws import utils
 from tests.system.providers.amazon.aws.utils import (
@@ -54,7 +54,7 @@ def provide_test_name():
         yield name
 
 
-@mock_ssm
+@mock_aws
 class TestAmazonSystemTestHelpers:
     FETCH_VARIABLE_TEST_CASES = [
         # Format is:
diff --git a/tests/providers/amazon/aws/transfers/test_azure_blob_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_azure_blob_to_s3.py
index f34877e187..bf25390d2f 100644
--- a/tests/providers/amazon/aws/transfers/test_azure_blob_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_azure_blob_to_s3.py
@@ -20,7 +20,7 @@ from __future__ import annotations
 from io import RawIOBase
 from unittest import mock
 
-from moto import mock_s3
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
 from airflow.providers.amazon.aws.transfers.azure_blob_to_s3 import 
AzureBlobStorageToS3Operator
@@ -44,7 +44,7 @@ def _create_test_bucket():
     return hook, bucket
 
 
-@mock_s3
+@mock_aws
 class TestAzureBlobToS3Operator:
     
@mock.patch("airflow.providers.amazon.aws.transfers.azure_blob_to_s3.WasbHook")
     def test_operator_all_file_upload(self, mock_hook):
diff --git a/tests/providers/amazon/aws/transfers/test_gcs_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_gcs_to_s3.py
index 520b521a21..7f7802ca23 100644
--- a/tests/providers/amazon/aws/transfers/test_gcs_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_gcs_to_s3.py
@@ -21,7 +21,7 @@ from tempfile import NamedTemporaryFile
 from unittest import mock
 
 import pytest
-from moto import mock_s3
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
 from airflow.providers.amazon.aws.transfers.gcs_to_s3 import GCSToS3Operator
@@ -46,7 +46,7 @@ def _create_test_bucket():
     return hook, bucket
 
 
-@mock_s3
+@mock_aws
 class TestGCSToS3Operator:
     @mock.patch("airflow.providers.amazon.aws.transfers.gcs_to_s3.GCSHook")
     def test_execute__match_glob(self, mock_hook):
diff --git a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py 
b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py
index 07676540e4..fcd01cb7e4 100644
--- a/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py
+++ b/tests/providers/amazon/aws/transfers/test_hive_to_dynamodb.py
@@ -22,7 +22,7 @@ import json
 from unittest import mock
 
 import pandas as pd
-from moto import mock_dynamodb
+from moto import mock_aws
 
 import airflow.providers.amazon.aws.transfers.hive_to_dynamodb
 from airflow.models.dag import DAG
@@ -45,7 +45,7 @@ class TestHiveToDynamoDBOperator:
     def process_data(data, *args, **kwargs):
         return json.loads(data.to_json(orient="records"))
 
-    @mock_dynamodb
+    @mock_aws
     def test_get_conn_returns_a_boto3_connection(self):
         hook = DynamoDBHook(aws_conn_id="aws_default")
         assert hook.get_conn() is not None
@@ -54,7 +54,7 @@ class TestHiveToDynamoDBOperator:
         
"airflow.providers.apache.hive.hooks.hive.HiveServer2Hook.get_pandas_df",
         return_value=pd.DataFrame(data=[("1", "sid")], columns=["id", "name"]),
     )
-    @mock_dynamodb
+    @mock_aws
     def test_get_records_with_schema(self, mock_get_pandas_df):
         # this table needs to be created in production
         self.hook.get_conn().create_table(
@@ -84,7 +84,7 @@ class TestHiveToDynamoDBOperator:
         
"airflow.providers.apache.hive.hooks.hive.HiveServer2Hook.get_pandas_df",
         return_value=pd.DataFrame(data=[("1", "sid"), ("1", "gupta")], 
columns=["id", "name"]),
     )
-    @mock_dynamodb
+    @mock_aws
     def test_pre_process_records_with_schema(self, mock_get_pandas_df):
         # this table needs to be created in production
         self.hook.get_conn().create_table(
diff --git a/tests/providers/amazon/aws/transfers/test_http_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_http_to_s3.py
index 5710e3d751..89b224932f 100644
--- a/tests/providers/amazon/aws/transfers/test_http_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_http_to_s3.py
@@ -21,7 +21,7 @@ import datetime
 from unittest import mock
 
 import boto3
-from moto import mock_s3
+from moto import mock_aws
 
 from airflow.models.dag import DAG
 from airflow.providers.amazon.aws.transfers.http_to_s3 import HttpToS3Operator
@@ -54,7 +54,7 @@ class TestHttpToS3Operator:
         assert operator.s3_bucket == self.s3_bucket
         assert operator.http_conn_id == self.http_conn_id
 
-    @mock_s3
+    @mock_aws
     def test_execute(self, requests_mock):
         requests_mock.register_uri("GET", EXAMPLE_URL, content=self.response)
         conn = boto3.client("s3")
diff --git a/tests/providers/amazon/aws/transfers/test_local_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_local_to_s3.py
index ad811a0a0c..fa1d294239 100644
--- a/tests/providers/amazon/aws/transfers/test_local_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_local_to_s3.py
@@ -22,7 +22,7 @@ import os
 
 import boto3
 import pytest
-from moto import mock_s3
+from moto import mock_aws
 
 from airflow.models.dag import DAG
 from airflow.providers.amazon.aws.transfers.local_to_s3 import 
LocalFilesystemToS3Operator
@@ -72,7 +72,7 @@ class TestFileToS3Operator:
         with pytest.raises(TypeError):
             operator.execute(None)
 
-    @mock_s3
+    @mock_aws
     def test_execute(self):
         conn = boto3.client("s3")
         conn.create_bucket(Bucket=self.dest_bucket)
@@ -92,7 +92,7 @@ class TestFileToS3Operator:
         # the object found should be consistent with dest_key specified earlier
         assert objects_in_dest_bucket["Contents"][0]["Key"] == self.dest_key
 
-    @mock_s3
+    @mock_aws
     def test_execute_with_only_key(self):
         conn = boto3.client("s3")
         conn.create_bucket(Bucket=self.dest_bucket)
diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py 
b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py
index 9675e336b1..70b25824b1 100644
--- a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py
+++ b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py
@@ -19,7 +19,7 @@ from __future__ import annotations
 
 import boto3
 import pytest
-from moto import mock_s3
+from moto import mock_aws
 
 from airflow.models import DAG
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -66,7 +66,7 @@ class TestS3ToSFTPOperator:
         self.sftp_path = SFTP_PATH
         self.s3_key = S3_KEY
 
-    @mock_s3
+    @mock_aws
     @conf_vars({("core", "enable_xcom_pickling"): "True"})
     def test_s3_to_sftp_operation(self):
         s3_hook = S3Hook(aws_conn_id=None)
diff --git a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py 
b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py
index 50da4b93cf..be438a85a4 100644
--- a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py
@@ -19,7 +19,7 @@ from __future__ import annotations
 
 import boto3
 import pytest
-from moto import mock_s3
+from moto import mock_aws
 
 from airflow.models import DAG
 from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -66,7 +66,7 @@ class TestSFTPToS3Operator:
         self.s3_key = S3_KEY
 
     @pytest.mark.parametrize("use_temp_file", [True, False])
-    @mock_s3
+    @mock_aws
     @conf_vars({("core", "enable_xcom_pickling"): "True"})
     def test_sftp_to_s3_operation(self, use_temp_file):
         # Setting
diff --git a/tests/providers/amazon/aws/waiters/test_custom_waiters.py 
b/tests/providers/amazon/aws/waiters/test_custom_waiters.py
index 19f9296b6a..3272d603c4 100644
--- a/tests/providers/amazon/aws/waiters/test_custom_waiters.py
+++ b/tests/providers/amazon/aws/waiters/test_custom_waiters.py
@@ -25,7 +25,7 @@ import boto3
 import pytest
 from botocore.exceptions import WaiterError
 from botocore.waiter import WaiterModel
-from moto import mock_eks
+from moto import mock_aws
 
 from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
 from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook
@@ -104,7 +104,7 @@ class TestCustomEKSServiceWaiters:
             assert waiter in hook.list_waiters()
             assert waiter in hook._list_custom_waiters()
 
-    @mock_eks
+    @mock_aws
     def test_existing_waiter_inherited(self):
         """
         AwsBaseHook::get_waiter will first check if there is a custom waiter 
with the
diff --git a/tests/providers/apache/hive/transfers/test_s3_to_hive.py 
b/tests/providers/apache/hive/transfers/test_s3_to_hive.py
index 3f674ec3fa..11fa12721d 100644
--- a/tests/providers/apache/hive/transfers/test_s3_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_s3_to_hive.py
@@ -193,7 +193,7 @@ class TestS3ToHiveTransfer:
         assert self._check_file_equality(bz2_txt_nh, fn_bz2, ".bz2"), "bz2 
Compressed file not as expected"
 
     
@mock.patch("airflow.providers.apache.hive.transfers.s3_to_hive.HiveCliHook")
-    @moto.mock_s3
+    @moto.mock_aws
     def test_execute(self, mock_hiveclihook):
         conn = boto3.client("s3")
         if conn.meta.region_name == "us-east-1":
@@ -226,7 +226,7 @@ class TestS3ToHiveTransfer:
             s32hive.execute(None)
 
     
@mock.patch("airflow.providers.apache.hive.transfers.s3_to_hive.HiveCliHook")
-    @moto.mock_s3
+    @moto.mock_aws
     def test_execute_with_select_expression(self, mock_hiveclihook):
         conn = boto3.client("s3")
         if conn.meta.region_name == "us-east-1":

Reply via email to