This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 59b32dc0a0 Fix bad regexp in mypy-providers specification in 
pre-commits (#35465)
59b32dc0a0 is described below

commit 59b32dc0a0bcdffd124b82d92428f334646cd8cd
Author: Jarek Potiuk <ja...@potiuk.com>
AuthorDate: Mon Nov 6 19:40:52 2023 +0100

    Fix bad regexp in mypy-providers specification in pre-commits (#35465)
    
    The mypy-providers specification has been wrong for tests/providers
    and tests/system/providers so a number of tests there has not
    been checked with type-hinting with MyPy. This PR fixes the
    specification and all the tests that were breaking mypy checks
---
 .pre-commit-config.yaml                            |  2 +-
 airflow/decorators/task_group.py                   |  4 +-
 airflow/providers/microsoft/winrm/hooks/winrm.py   |  2 +-
 airflow/providers/openlineage/utils/utils.py       |  2 +-
 tests/providers/airbyte/sensors/test_airbyte.py    |  4 +-
 .../alibaba/cloud/sensors/test_oss_key.py          |  2 +
 .../amazon/aws/executors/ecs/test_ecs_executor.py  | 10 +--
 tests/providers/amazon/aws/hooks/test_base_aws.py  |  2 +-
 tests/providers/amazon/aws/hooks/test_eks.py       |  4 +-
 .../amazon/aws/hooks/test_emr_serverless.py        |  4 +-
 .../amazon/aws/hooks/test_hooks_signature.py       | 10 +--
 .../amazon/aws/hooks/test_lambda_function.py       |  2 +-
 tests/providers/amazon/aws/hooks/test_rds.py       |  8 ++-
 tests/providers/amazon/aws/operators/test_ecs.py   |  4 +-
 .../amazon/aws/operators/test_emr_serverless.py    | 10 +--
 .../amazon/aws/operators/test_eventbridge.py       | 10 +--
 .../amazon/aws/operators/test_glue_databrew.py     |  3 +-
 tests/providers/amazon/aws/operators/test_rds.py   |  2 +-
 .../aws/operators/test_sagemaker_notebook.py       |  3 +-
 tests/providers/amazon/aws/triggers/test_glue.py   |  4 +-
 .../amazon/aws/triggers/test_serialization.py      |  3 +-
 tests/providers/amazon/aws/triggers/test_sqs.py    |  3 +-
 tests/providers/apache/beam/triggers/test_beam.py  |  2 +-
 tests/providers/apache/impala/hooks/test_impala.py |  2 +-
 .../kubernetes/sensors/test_spark_kubernetes.py    | 34 +++++-----
 .../common/sql/operators/test_sql_execute.py       | 72 +++++++++++-----------
 tests/providers/common/sql/sensors/test_sql.py     | 12 ++--
 tests/providers/docker/conftest.py                 |  3 +
 tests/providers/docker/hooks/test_docker.py        |  4 +-
 .../providers/exasol/operators/test_exasol_sql.py  | 36 +++++------
 .../providers/google/cloud/hooks/test_dataflow.py  |  2 +-
 .../providers/google/cloud/hooks/test_dataprep.py  |  4 ++
 .../log/test_stackdriver_task_handler_system.py    |  1 -
 .../test_cloud_storage_transfer_service.py         |  6 +-
 .../google/cloud/operators/test_mlengine.py        |  2 +-
 .../cloud/sensors/test_dataproc_metastore.py       |  3 +-
 .../google/cloud/transfers/test_sql_to_gcs.py      |  2 +-
 .../google/cloud/triggers/test_cloud_batch.py      | 12 ++--
 .../google/cloud/triggers/test_cloud_run.py        |  6 +-
 tests/providers/google/cloud/triggers/test_gcs.py  |  3 +-
 .../google/cloud/triggers/test_mlengine.py         |  2 +-
 .../sensors/test_display_video.py                  |  4 +-
 tests/providers/http/operators/test_http.py        |  2 +
 tests/providers/http/triggers/test_http.py         |  3 +-
 .../azure/hooks/test_azure_data_factory.py         |  3 +
 .../operators/test_azure_container_instances.py    | 17 ++---
 .../extractors/test_default_extractor.py           |  8 +--
 tests/providers/opensearch/conftest.py             |  8 ++-
 .../opensearch/operators/test_opensearch.py        |  3 +-
 tests/providers/sftp/sensors/test_sftp.py          |  3 +
 .../providers/alibaba/example_adb_spark_batch.py   |  5 +-
 .../providers/alibaba/example_adb_spark_sql.py     |  5 +-
 .../system/providers/alibaba/example_oss_bucket.py |  7 +--
 .../system/providers/alibaba/example_oss_object.py |  5 +-
 .../providers/amazon/aws/example_dynamodb.py       |  3 +
 .../providers/amazon/aws/example_eks_templated.py  |  3 -
 .../amazon/aws/example_eks_with_fargate_profile.py |  3 -
 .../amazon/aws/example_eks_with_nodegroups.py      |  3 -
 tests/system/providers/amazon/aws/example_emr.py   |  3 +-
 .../amazon/aws/example_sagemaker_pipeline.py       |  2 +-
 .../apache/cassandra/example_cassandra_dag.py      |  3 +-
 .../providers/apache/spark/example_pyspark.py      |  3 +-
 tests/system/providers/asana/example_asana.py      |  4 +-
 .../cloud/automl/example_automl_translation.py     |  3 +-
 .../automl/example_automl_vision_classification.py |  2 +-
 .../cloud/azure/example_azure_blob_to_gcs.py       |  4 --
 .../google/cloud/bigtable/example_bigtable.py      | 16 ++---
 .../cloud/cloud_batch/example_cloud_batch.py       |  2 +-
 .../google/cloud/cloud_run/example_cloud_run.py    |  2 +-
 .../cloud_sql/example_cloud_sql_query_mysql.py     |  8 +--
 .../cloud_sql/example_cloud_sql_query_postgres.py  |  7 +--
 .../cloud/composer/example_cloud_composer.py       |  2 +-
 .../composer/example_cloud_composer_deferrable.py  |  2 +-
 .../data_loss_prevention/example_dlp_info_types.py | 15 +++--
 .../google/cloud/datafusion/example_datafusion.py  |  7 ++-
 .../google/cloud/dataplex/example_dataplex.py      |  2 +-
 .../google/cloud/dataplex/example_dataplex_dq.py   | 29 +++++----
 .../google/cloud/dataprep/example_dataprep.py      |  2 +-
 .../dataproc/example_dataproc_batch_persistent.py  |  2 +-
 .../dataproc/example_dataproc_cluster_generator.py |  2 +-
 .../google/cloud/gcs/example_calendar_to_gcs.py    |  2 +-
 .../google/cloud/gcs/example_firestore.py          |  2 +-
 .../google/cloud/gcs/example_gcs_to_gdrive.py      |  2 +-
 .../google/cloud/gcs/example_gcs_to_sheets.py      |  2 +-
 .../google/cloud/gcs/example_gdrive_to_gcs.py      |  2 +-
 .../google/cloud/gcs/example_mysql_to_gcs.py       |  2 +-
 .../providers/google/cloud/gcs/example_sheets.py   |  2 +-
 .../google/cloud/gcs/example_sheets_to_gcs.py      |  2 +-
 .../google/cloud/pubsub/example_pubsub.py          |  2 +-
 .../cloud/pubsub/example_pubsub_deferrable.py      |  2 +-
 .../cloud/speech_to_text/example_speech_to_text.py |  6 +-
 .../cloud/sql_to_sheets/example_sql_to_sheets.py   |  4 +-
 .../cloud/transfers/example_gdrive_to_local.py     |  2 +-
 .../cloud/transfers/example_postgres_to_gcs.py     |  2 +-
 .../translate_speech/example_translate_speech.py   |  8 ++-
 ...ample_vertex_ai_auto_ml_forecasting_training.py |  2 -
 .../example_vertex_ai_auto_ml_image_training.py    |  2 -
 .../example_vertex_ai_auto_ml_list_training.py     |  2 -
 .../example_vertex_ai_auto_ml_tabular_training.py  |  2 -
 .../example_vertex_ai_auto_ml_text_training.py     |  2 -
 .../example_vertex_ai_auto_ml_video_training.py    |  2 -
 .../example_vertex_ai_batch_prediction_job.py      |  4 +-
 .../example_vertex_ai_custom_container.py          |  2 -
 .../vertex_ai/example_vertex_ai_custom_job.py      |  2 -
 .../example_vertex_ai_custom_job_python_package.py |  2 -
 .../cloud/vertex_ai/example_vertex_ai_dataset.py   |  2 -
 .../cloud/vertex_ai/example_vertex_ai_endpoint.py  |  2 -
 .../example_vertex_ai_hyperparameter_tuning_job.py |  2 -
 .../example_vertex_ai_list_custom_jobs.py          |  2 -
 .../vertex_ai/example_vertex_ai_model_service.py   |  2 -
 .../vertex_ai/example_vertex_ai_pipeline_job.py    |  2 -
 .../google/suite/example_local_to_drive.py         |  2 +-
 tests/system/providers/http/example_http.py        |  1 +
 .../microsoft/azure/example_adf_run_pipeline.py    | 20 ++++--
 .../microsoft/azure/example_azure_cosmosdb.py      |  6 +-
 .../microsoft/azure/example_local_to_wasb.py       |  4 --
 .../providers/opensearch/example_opensearch.py     | 11 ++--
 tests/system/providers/sftp/example_sftp_sensor.py |  6 +-
 118 files changed, 331 insertions(+), 308 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 9d33319532..b17a18b0db 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1000,7 +1000,7 @@ repos:
         name: Run mypy for providers
         language: python
         entry: ./scripts/ci/pre_commit/pre_commit_mypy.py --namespace-packages
-        files: 
^airflow/providers/.*\.py$|^tests/providers/\*\.py$|^tests/system/providers/\*\.py$
+        files: 
^airflow/providers/.*\.py$|^tests/providers/.*\.py$|^tests/system/providers/.*\.py$
         exclude: ^.*/.*_vendor/
         require_serial: true
         additional_dependencies: ['rich>=12.4.4', 'inputimeout', 'pyyaml']
diff --git a/airflow/decorators/task_group.py b/airflow/decorators/task_group.py
index 312c174b0e..30aafc4e9e 100644
--- a/airflow/decorators/task_group.py
+++ b/airflow/decorators/task_group.py
@@ -116,14 +116,14 @@ class _TaskGroupFactory(ExpandableFactory, 
Generic[FParams, FReturn]):
         return task_group
 
     def override(self, **kwargs: Any) -> _TaskGroupFactory[FParams, FReturn]:
-        # TODO: fixme when mypy gets compatible with new attrs
+        # TODO: FIXME when mypy gets compatible with new attrs
         return attr.evolve(self, tg_kwargs={**self.tg_kwargs, **kwargs})  # 
type: ignore[arg-type]
 
     def partial(self, **kwargs: Any) -> _TaskGroupFactory[FParams, FReturn]:
         self._validate_arg_names("partial", kwargs)
         prevent_duplicates(self.partial_kwargs, kwargs, fail_reason="duplicate 
partial")
         kwargs.update(self.partial_kwargs)
-        # TODO: fixme when mypy gets compatible with new attrs
+        # TODO: FIXME when mypy gets compatible with new attrs
         return attr.evolve(self, partial_kwargs=kwargs)  # type: 
ignore[arg-type]
 
     def expand(self, **kwargs: OperatorExpandArgument) -> DAGNode:
diff --git a/airflow/providers/microsoft/winrm/hooks/winrm.py 
b/airflow/providers/microsoft/winrm/hooks/winrm.py
index e7bc9e7c29..b8a2b56d46 100644
--- a/airflow/providers/microsoft/winrm/hooks/winrm.py
+++ b/airflow/providers/microsoft/winrm/hooks/winrm.py
@@ -24,7 +24,7 @@ from airflow.exceptions import AirflowException
 from airflow.hooks.base import BaseHook
 from airflow.utils.platform import getuser
 
-# TODO: Fixme please - I have too complex implementation
+# TODO: FIXME please - I have too complex implementation
 
 
 class WinRMHook(BaseHook):
diff --git a/airflow/providers/openlineage/utils/utils.py 
b/airflow/providers/openlineage/utils/utils.py
index a52c7022ca..1f6c723883 100644
--- a/airflow/providers/openlineage/utils/utils.py
+++ b/airflow/providers/openlineage/utils/utils.py
@@ -354,7 +354,7 @@ class OpenLineageRedactor(SecretsMasker):
                 if name and should_hide_value_for_key(name):
                     return self._redact_all(item, depth, max_depth)
                 if attrs.has(type(item)):
-                    # TODO: fixme when mypy gets compatible with new attrs
+                    # TODO: FIXME when mypy gets compatible with new attrs
                     for dict_key, subval in attrs.asdict(
                         item,  # type: ignore[arg-type]
                         recurse=False,
diff --git a/tests/providers/airbyte/sensors/test_airbyte.py 
b/tests/providers/airbyte/sensors/test_airbyte.py
index fc25659fc8..0609028c3c 100644
--- a/tests/providers/airbyte/sensors/test_airbyte.py
+++ b/tests/providers/airbyte/sensors/test_airbyte.py
@@ -52,7 +52,7 @@ class TestAirbyteJobSensor:
         "soft_fail, expected_exception", ((False, AirflowException), (True, 
AirflowSkipException))
     )
     @mock.patch("airflow.providers.airbyte.hooks.airbyte.AirbyteHook.get_job")
-    def test_failed(self, mock_get_job, soft_fail: bool, expected_exception: 
AirflowException):
+    def test_failed(self, mock_get_job, soft_fail: bool, expected_exception: 
type[AirflowException]):
         mock_get_job.return_value = self.get_job("failed")
 
         sensor = AirbyteJobSensor(
@@ -85,7 +85,7 @@ class TestAirbyteJobSensor:
         "soft_fail, expected_exception", ((False, AirflowException), (True, 
AirflowSkipException))
     )
     @mock.patch("airflow.providers.airbyte.hooks.airbyte.AirbyteHook.get_job")
-    def test_cancelled(self, mock_get_job, soft_fail: bool, 
expected_exception: AirflowException):
+    def test_cancelled(self, mock_get_job, soft_fail: bool, 
expected_exception: type[AirflowException]):
         mock_get_job.return_value = self.get_job("cancelled")
 
         sensor = AirbyteJobSensor(
diff --git a/tests/providers/alibaba/cloud/sensors/test_oss_key.py 
b/tests/providers/alibaba/cloud/sensors/test_oss_key.py
index 81dcccb4fe..ba68cdc926 100644
--- a/tests/providers/alibaba/cloud/sensors/test_oss_key.py
+++ b/tests/providers/alibaba/cloud/sensors/test_oss_key.py
@@ -35,6 +35,8 @@ MOCK_KEY = "mock_key"
 MOCK_KEYS = ["mock_key1", "mock_key_2", "mock_key3"]
 MOCK_CONTENT = "mock_content"
 
+# mypy: disable-error-code="call-overload"
+
 
 @pytest.fixture
 def oss_key_sensor():
diff --git a/tests/providers/amazon/aws/executors/ecs/test_ecs_executor.py 
b/tests/providers/amazon/aws/executors/ecs/test_ecs_executor.py
index ef4c012640..ba2536f1b9 100644
--- a/tests/providers/amazon/aws/executors/ecs/test_ecs_executor.py
+++ b/tests/providers/amazon/aws/executors/ecs/test_ecs_executor.py
@@ -48,7 +48,7 @@ from airflow.providers.amazon.aws.executors.ecs.utils import (
     parse_assign_public_ip,
 )
 from airflow.utils.helpers import convert_camel_to_snake
-from airflow.utils.state import State
+from airflow.utils.state import State, TaskInstanceState
 
 pytestmark = pytest.mark.db_test
 
@@ -693,8 +693,8 @@ class TestAwsEcsExecutor:
     def _mock_sync(
         self,
         executor: AwsEcsExecutor,
-        expected_state: State = State.SUCCESS,
-        set_task_state: State = State.RUNNING,
+        expected_state=TaskInstanceState.SUCCESS,
+        set_task_state=TaskInstanceState.RUNNING,
     ) -> None:
         """Mock ECS to the expected state."""
         self._add_mock_task(executor, ARN1, set_task_state)
@@ -718,9 +718,9 @@ class TestAwsEcsExecutor:
         executor.ecs.describe_tasks.return_value = {"tasks": 
[response_task_json], "failures": []}
 
     @staticmethod
-    def _add_mock_task(executor: AwsEcsExecutor, arn: str, state: State = 
State.RUNNING):
+    def _add_mock_task(executor: AwsEcsExecutor, arn: str, 
state=TaskInstanceState.RUNNING):
         task = mock_task(arn, state)
-        executor.active_workers.add_task(task, mock.Mock(spec=tuple), 
mock_queue, mock_cmd, mock_config, 1)
+        executor.active_workers.add_task(task, mock.Mock(spec=tuple), 
mock_queue, mock_cmd, mock_config, 1)  # type:ignore[arg-type]
 
     def _sync_mock_with_call_counts(self, sync_func: Callable):
         """Mock won't work here, because we actually want to call the 'sync' 
func."""
diff --git a/tests/providers/amazon/aws/hooks/test_base_aws.py 
b/tests/providers/amazon/aws/hooks/test_base_aws.py
index 2ea511c45d..ba94048421 100644
--- a/tests/providers/amazon/aws/hooks/test_base_aws.py
+++ b/tests/providers/amazon/aws/hooks/test_base_aws.py
@@ -411,7 +411,7 @@ class TestAwsBaseHook:
             assert key.lower() in result_user_agent_tag_keys
 
     @staticmethod
-    def fetch_tags() -> dict[str:str]:
+    def fetch_tags() -> dict[str, str]:
         """Helper method which creates an AwsBaseHook and returns the user 
agent string split into a dict."""
         user_agent_string = 
AwsBaseHook(client_type="s3").get_client_type().meta.config.user_agent
         # Split the list of {Key}/{Value} into a dict
diff --git a/tests/providers/amazon/aws/hooks/test_eks.py 
b/tests/providers/amazon/aws/hooks/test_eks.py
index d3e04f4a1e..35cb246091 100644
--- a/tests/providers/amazon/aws/hooks/test_eks.py
+++ b/tests/providers/amazon/aws/hooks/test_eks.py
@@ -22,7 +22,7 @@ from copy import deepcopy
 from pathlib import Path
 from typing import TYPE_CHECKING
 from unittest import mock
-from urllib.parse import ParseResult, urlsplit
+from urllib.parse import urlsplit
 
 import pytest
 import time_machine
@@ -1351,7 +1351,7 @@ def assert_result_matches_expected_list(
 
 
 def assert_is_valid_uri(value: str) -> None:
-    result: ParseResult = urlsplit(value)
+    result = urlsplit(value)
 
     assert all([result.scheme, result.netloc, result.path])
     assert REGION in value
diff --git a/tests/providers/amazon/aws/hooks/test_emr_serverless.py 
b/tests/providers/amazon/aws/hooks/test_emr_serverless.py
index 33726bc62b..75e4c5f897 100644
--- a/tests/providers/amazon/aws/hooks/test_emr_serverless.py
+++ b/tests/providers/amazon/aws/hooks/test_emr_serverless.py
@@ -42,7 +42,7 @@ class TestEmrServerlessHook:
         conn_mock().get_paginator().paginate.return_value = [{"jobRuns": 
[{"id": "job1"}, {"id": "job2"}]}]
         hook = EmrServerlessHook(aws_conn_id="aws_default")
         waiter_mock = MagicMock()
-        hook.get_waiter = waiter_mock
+        hook.get_waiter = waiter_mock  # type:ignore[method-assign]
 
         hook.cancel_running_jobs("app")
 
@@ -59,7 +59,7 @@ class TestEmrServerlessHook:
         ]
         hook = EmrServerlessHook(aws_conn_id="aws_default")
         waiter_mock = MagicMock()
-        hook.get_waiter = waiter_mock
+        hook.get_waiter = waiter_mock  # type:ignore[method-assign]
 
         hook.cancel_running_jobs("app")
 
diff --git a/tests/providers/amazon/aws/hooks/test_hooks_signature.py 
b/tests/providers/amazon/aws/hooks/test_hooks_signature.py
index 9b4cfe9f6c..b0ab654be2 100644
--- a/tests/providers/amazon/aws/hooks/test_hooks_signature.py
+++ b/tests/providers/amazon/aws/hooks/test_hooks_signature.py
@@ -187,11 +187,11 @@ def test_expected_thin_hooks(hook_module: str):
     if not hooks:
         pytest.skip(reason=f"Module {hook_module!r} doesn't contain subclasses 
of `AwsGenericHook`.")
 
-    errors = []
-    for hook, hook_name in hooks:
-        is_valid, msg = validate_hook(hook, hook_name, hook_module)
-        if not is_valid:
-            errors.append(msg)
+    errors = [
+        message
+        for valid, message in (validate_hook(hook, hook_name, hook_module) for 
hook, hook_name in hooks)
+        if not valid and message
+    ]
 
     if errors:
         errors_msg = "\n * ".join(errors)
diff --git a/tests/providers/amazon/aws/hooks/test_lambda_function.py 
b/tests/providers/amazon/aws/hooks/test_lambda_function.py
index 006d0977a2..112bd51e83 100644
--- a/tests/providers/amazon/aws/hooks/test_lambda_function.py
+++ b/tests/providers/amazon/aws/hooks/test_lambda_function.py
@@ -31,7 +31,7 @@ BYTES_PAYLOAD = b'{"hello": "airflow"}'
 RUNTIME = "python3.9"
 ROLE = "role"
 HANDLER = "handler"
-CODE = {}
+CODE: dict = {}
 LOG_RESPONSE = base64.b64encode(b"FOO\n\nBAR\n\n").decode()
 BAD_LOG_RESPONSE = LOG_RESPONSE[:-3]
 
diff --git a/tests/providers/amazon/aws/hooks/test_rds.py 
b/tests/providers/amazon/aws/hooks/test_rds.py
index a34e43c944..a167567c9f 100644
--- a/tests/providers/amazon/aws/hooks/test_rds.py
+++ b/tests/providers/amazon/aws/hooks/test_rds.py
@@ -17,6 +17,7 @@
 # under the License.
 from __future__ import annotations
 
+from typing import TYPE_CHECKING, Generator
 from unittest.mock import patch
 
 import pytest
@@ -25,9 +26,12 @@ from moto import mock_rds
 from airflow.exceptions import AirflowException, AirflowNotFoundException
 from airflow.providers.amazon.aws.hooks.rds import RdsHook
 
+if TYPE_CHECKING:
+    from mypy_boto3_rds.type_defs import DBSnapshotTypeDef
+
 
 @pytest.fixture
-def rds_hook() -> RdsHook:
+def rds_hook() -> Generator[RdsHook, None, None]:
     """Returns an RdsHook whose underlying connection is mocked with moto"""
     with mock_rds():
         yield RdsHook(aws_conn_id="aws_default", region_name="us-east-1")
@@ -62,7 +66,7 @@ def db_cluster_id(rds_hook: RdsHook) -> str:
 
 
 @pytest.fixture
-def db_snapshot(rds_hook: RdsHook, db_instance_id: str) -> dict:
+def db_snapshot(rds_hook: RdsHook, db_instance_id: str) -> DBSnapshotTypeDef:
     """
     Creates a mock DB instance snapshot and returns the DBSnapshot dict from 
the boto response object.
     
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/rds.html#RDS.Client.create_db_snapshot
diff --git a/tests/providers/amazon/aws/operators/test_ecs.py 
b/tests/providers/amazon/aws/operators/test_ecs.py
index ba00407844..f402a60436 100644
--- a/tests/providers/amazon/aws/operators/test_ecs.py
+++ b/tests/providers/amazon/aws/operators/test_ecs.py
@@ -723,7 +723,7 @@ class TestEcsCreateClusterOperator(EcsBaseTestCase):
         }
 
         with pytest.raises(TaskDeferred) as defer:
-            op.execute(None)
+            op.execute(context={})
 
         assert defer.value.trigger.waiter_delay == 12
         assert defer.value.trigger.attempts == 34
@@ -787,7 +787,7 @@ class TestEcsDeleteClusterOperator(EcsBaseTestCase):
         }
 
         with pytest.raises(TaskDeferred) as defer:
-            op.execute(None)
+            op.execute(context={})
 
         assert defer.value.trigger.waiter_delay == 12
         assert defer.value.trigger.attempts == 34
diff --git a/tests/providers/amazon/aws/operators/test_emr_serverless.py 
b/tests/providers/amazon/aws/operators/test_emr_serverless.py
index e1a1ca0f37..f72cebbfec 100644
--- a/tests/providers/amazon/aws/operators/test_emr_serverless.py
+++ b/tests/providers/amazon/aws/operators/test_emr_serverless.py
@@ -892,7 +892,7 @@ class TestEmrServerlessStopOperator:
         mock_get_waiter().wait.return_value = True
         operator = EmrServerlessStopApplicationOperator(task_id=task_id, 
application_id="test")
 
-        operator.execute(None)
+        operator.execute({})
 
         mock_get_waiter().wait.assert_called_once()
         mock_conn.stop_application.assert_called_once()
@@ -904,7 +904,7 @@ class TestEmrServerlessStopOperator:
             task_id=task_id, application_id="test", wait_for_completion=False
         )
 
-        operator.execute(None)
+        operator.execute({})
 
         mock_get_waiter().wait.assert_not_called()
         mock_conn.stop_application.assert_called_once()
@@ -921,7 +921,7 @@ class TestEmrServerlessStopOperator:
             task_id=task_id, application_id="test", force_stop=True
         )
 
-        operator.execute(None)
+        operator.execute({})
 
         mock_cancel_running_jobs.assert_called_once()
         mock_conn.stop_application.assert_called_once()
@@ -934,7 +934,7 @@ class TestEmrServerlessStopOperator:
             task_id=task_id, application_id="test", deferrable=True, 
force_stop=True
         )
         with pytest.raises(TaskDeferred):
-            operator.execute(None)
+            operator.execute({})
         assert "now waiting for the 2 cancelled job(s) to terminate" in 
caplog.messages
 
     @mock.patch.object(EmrServerlessHook, "conn")
@@ -948,6 +948,6 @@ class TestEmrServerlessStopOperator:
             task_id=task_id, application_id="test", deferrable=True, 
force_stop=True
         )
         with pytest.raises(TaskDeferred):
-            operator.execute(None)
+            operator.execute({})
 
         assert "no running jobs found with application ID test" in 
caplog.messages
diff --git a/tests/providers/amazon/aws/operators/test_eventbridge.py 
b/tests/providers/amazon/aws/operators/test_eventbridge.py
index 97c0578e56..9527439777 100644
--- a/tests/providers/amazon/aws/operators/test_eventbridge.py
+++ b/tests/providers/amazon/aws/operators/test_eventbridge.py
@@ -59,7 +59,7 @@ class TestEventBridgePutEventsOperator:
             entries=ENTRIES,
         )
 
-        result = operator.execute(None)
+        result = operator.execute(context={})
 
         assert result == ["foobar"]
 
@@ -78,7 +78,7 @@ class TestEventBridgePutEventsOperator:
         )
 
         with pytest.raises(AirflowException):
-            operator.execute(None)
+            operator.execute(context={})
 
 
 class TestEventBridgePutRuleOperator:
@@ -100,7 +100,7 @@ class TestEventBridgePutRuleOperator:
             event_pattern=EVENT_PATTERN,
         )
 
-        result = operator.execute(None)
+        result = operator.execute(context={})
 
         assert result == hook_response
 
@@ -131,7 +131,7 @@ class TestEventBridgeEnableRuleOperator:
             name=RULE_NAME,
         )
 
-        enable_rule.execute(None)
+        enable_rule.execute(context={})
         mock_conn.enable_rule.assert_called_with(Name=RULE_NAME)
 
 
@@ -151,5 +151,5 @@ class TestEventBridgeDisableRuleOperator:
             name=RULE_NAME,
         )
 
-        disable_rule.execute(None)
+        disable_rule.execute(context={})
         mock_conn.disable_rule.assert_called_with(Name=RULE_NAME)
diff --git a/tests/providers/amazon/aws/operators/test_glue_databrew.py 
b/tests/providers/amazon/aws/operators/test_glue_databrew.py
index 571e4816b5..3bc5a9c9e6 100644
--- a/tests/providers/amazon/aws/operators/test_glue_databrew.py
+++ b/tests/providers/amazon/aws/operators/test_glue_databrew.py
@@ -17,6 +17,7 @@
 # under the License.
 from __future__ import annotations
 
+from typing import Generator
 from unittest import mock
 
 import pytest
@@ -29,7 +30,7 @@ JOB_NAME = "test_job"
 
 
 @pytest.fixture
-def hook() -> GlueDataBrewHook:
+def hook() -> Generator[GlueDataBrewHook, None, None]:
     with mock_databrew():
         yield GlueDataBrewHook(aws_conn_id="aws_default")
 
diff --git a/tests/providers/amazon/aws/operators/test_rds.py 
b/tests/providers/amazon/aws/operators/test_rds.py
index ed1224851c..7d7770fd9f 100644
--- a/tests/providers/amazon/aws/operators/test_rds.py
+++ b/tests/providers/amazon/aws/operators/test_rds.py
@@ -137,7 +137,7 @@ def _create_event_subscription(hook: RdsHook):
 def _patch_hook_get_connection(hook: AwsGenericHook) -> None:
     # We're mocking all actual AWS calls and don't need a connection. This
     # avoids an Airflow warning about connection cannot be found.
-    hook.get_connection = lambda _: None
+    hook.get_connection = lambda _: None  # type: 
ignore[assignment,return-value]
 
 
 class TestBaseRdsOperator:
diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_notebook.py 
b/tests/providers/amazon/aws/operators/test_sagemaker_notebook.py
index d622b1177f..cb44b459b4 100644
--- a/tests/providers/amazon/aws/operators/test_sagemaker_notebook.py
+++ b/tests/providers/amazon/aws/operators/test_sagemaker_notebook.py
@@ -17,6 +17,7 @@
 # under the License.
 from __future__ import annotations
 
+from typing import Generator
 from unittest import mock
 
 import pytest
@@ -36,7 +37,7 @@ ROLE_ARN = "arn:aws:iam:role/role"
 
 
 @pytest.fixture
-def hook() -> SageMakerHook:
+def hook() -> Generator[SageMakerHook, None, None]:
     with mock_sagemaker():
         yield SageMakerHook(aws_conn_id="aws_default")
 
diff --git a/tests/providers/amazon/aws/triggers/test_glue.py 
b/tests/providers/amazon/aws/triggers/test_glue.py
index bb4f7189e3..80f2adb98a 100644
--- a/tests/providers/amazon/aws/triggers/test_glue.py
+++ b/tests/providers/amazon/aws/triggers/test_glue.py
@@ -45,7 +45,7 @@ class TestGlueJobTrigger:
         ]
 
         generator = trigger.run()
-        event = await generator.asend(None)
+        event = await generator.asend(None)  # type:ignore[attr-defined]
 
         assert get_state_mock.call_count == 3
         assert event.payload["status"] == "success"
@@ -67,7 +67,7 @@ class TestGlueJobTrigger:
         ]
 
         with pytest.raises(AirflowException):
-            await trigger.run().asend(None)
+            await trigger.run().asend(None)  # type:ignore[attr-defined]
 
         assert get_state_mock.call_count == 3
 
diff --git a/tests/providers/amazon/aws/triggers/test_serialization.py 
b/tests/providers/amazon/aws/triggers/test_serialization.py
index ed9215d415..39000f3ab6 100644
--- a/tests/providers/amazon/aws/triggers/test_serialization.py
+++ b/tests/providers/amazon/aws/triggers/test_serialization.py
@@ -96,7 +96,6 @@ TEST_MAX_MESSAGES = 1
 TEST_NUM_BATCHES = 1
 TEST_WAIT_TIME_SECONDS = 1
 TEST_VISIBILITY_TIMEOUT = 1
-TEST_MESSAGE_FILTERING = "literal"
 TEST_MESSAGE_FILTERING_MATCH_VALUES = "test"
 TEST_MESSAGE_FILTERING_CONFIG = "test-message-filtering-config"
 TEST_DELETE_MESSAGE_ON_RECEPTION = False
@@ -341,7 +340,7 @@ class TestTriggersSerialization:
                 num_batches=TEST_NUM_BATCHES,
                 wait_time_seconds=TEST_WAIT_TIME_SECONDS,
                 visibility_timeout=TEST_VISIBILITY_TIMEOUT,
-                message_filtering=TEST_MESSAGE_FILTERING,
+                message_filtering="literal",
                 
message_filtering_match_values=TEST_MESSAGE_FILTERING_MATCH_VALUES,
                 message_filtering_config=TEST_MESSAGE_FILTERING_CONFIG,
                 delete_message_on_reception=TEST_DELETE_MESSAGE_ON_RECEPTION,
diff --git a/tests/providers/amazon/aws/triggers/test_sqs.py 
b/tests/providers/amazon/aws/triggers/test_sqs.py
index 48bbf6bde6..b41eac08dd 100644
--- a/tests/providers/amazon/aws/triggers/test_sqs.py
+++ b/tests/providers/amazon/aws/triggers/test_sqs.py
@@ -28,7 +28,6 @@ TEST_MAX_MESSAGES = 1
 TEST_NUM_BATCHES = 1
 TEST_WAIT_TIME_SECONDS = 1
 TEST_VISIBILITY_TIMEOUT = 1
-TEST_MESSAGE_FILTERING = "literal"
 TEST_MESSAGE_FILTERING_MATCH_VALUES = "test"
 TEST_MESSAGE_FILTERING_CONFIG = "test-message-filtering-config"
 TEST_DELETE_MESSAGE_ON_RECEPTION = False
@@ -41,7 +40,7 @@ trigger = SqsSensorTrigger(
     num_batches=TEST_NUM_BATCHES,
     wait_time_seconds=TEST_WAIT_TIME_SECONDS,
     visibility_timeout=TEST_VISIBILITY_TIMEOUT,
-    message_filtering=TEST_MESSAGE_FILTERING,
+    message_filtering="literal",
     message_filtering_match_values=TEST_MESSAGE_FILTERING_MATCH_VALUES,
     message_filtering_config=TEST_MESSAGE_FILTERING_CONFIG,
     delete_message_on_reception=TEST_DELETE_MESSAGE_ON_RECEPTION,
diff --git a/tests/providers/apache/beam/triggers/test_beam.py 
b/tests/providers/apache/beam/triggers/test_beam.py
index ce22dda215..82e56ff3ec 100644
--- a/tests/providers/apache/beam/triggers/test_beam.py
+++ b/tests/providers/apache/beam/triggers/test_beam.py
@@ -33,7 +33,7 @@ INSTANCE = {"type": "BASIC", "displayName": INSTANCE_NAME}
 PROJECT_ID = "test_project_id"
 TEST_VARIABLES = {"output": "gs://bucket_test/output", "labels": 
{"airflow-version": "v2-7-0-dev0"}}
 TEST_PY_FILE = "apache_beam.examples.wordcount"
-TEST_PY_OPTIONS = []
+TEST_PY_OPTIONS: list[str] = []
 TEST_PY_INTERPRETER = "python3"
 TEST_PY_REQUIREMENTS = ["apache-beam[gcp]==2.46.0"]
 TEST_PY_PACKAGES = False
diff --git a/tests/providers/apache/impala/hooks/test_impala.py 
b/tests/providers/apache/impala/hooks/test_impala.py
index 97f1822eef..7e08079454 100644
--- a/tests/providers/apache/impala/hooks/test_impala.py
+++ b/tests/providers/apache/impala/hooks/test_impala.py
@@ -30,7 +30,7 @@ def impala_hook_fixture() -> ImpalaHook:
     mock_get_conn = MagicMock()
     mock_get_conn.return_value.cursor = MagicMock()
     mock_get_conn.return_value.cursor.return_value.rowcount = 2
-    hook.get_conn = mock_get_conn
+    hook.get_conn = mock_get_conn  # type:ignore[method-assign]
 
     return hook
 
diff --git a/tests/providers/cncf/kubernetes/sensors/test_spark_kubernetes.py 
b/tests/providers/cncf/kubernetes/sensors/test_spark_kubernetes.py
index a832fed97b..3f8e626df9 100644
--- a/tests/providers/cncf/kubernetes/sensors/test_spark_kubernetes.py
+++ b/tests/providers/cncf/kubernetes/sensors/test_spark_kubernetes.py
@@ -31,6 +31,8 @@ from airflow.utils import db, timezone
 
 pytestmark = pytest.mark.db_test
 
+# Ignore missing args provided by default_args
+# mypy: disable-error-code="call-overload"
 
 TEST_COMPLETED_APPLICATION = {
     "apiVersion": "sparkoperator.k8s.io/v1beta2",
@@ -585,7 +587,7 @@ class TestSparkKubernetesSensor:
     )
     def test_completed_application(self, mock_get_namespaced_crd, 
mock_kubernetes_hook):
         sensor = SparkKubernetesSensor(application_name="spark_pi", 
dag=self.dag, task_id="test_task_id")
-        assert sensor.poke(None)
+        assert sensor.poke({})
         mock_kubernetes_hook.assert_called_once_with()
         mock_get_namespaced_crd.assert_called_once_with(
             group="sparkoperator.k8s.io",
@@ -607,13 +609,13 @@ class TestSparkKubernetesSensor:
         mock_get_namespaced_crd,
         mock_kubernetes_hook,
         soft_fail: bool,
-        expected_exception: AirflowException,
+        expected_exception: type[AirflowException],
     ):
         sensor = SparkKubernetesSensor(
             application_name="spark_pi", dag=self.dag, task_id="test_task_id", 
soft_fail=soft_fail
         )
         with pytest.raises(expected_exception):
-            sensor.poke(None)
+            sensor.poke({})
         mock_kubernetes_hook.assert_called_once_with()
         mock_get_namespaced_crd.assert_called_once_with(
             group="sparkoperator.k8s.io",
@@ -629,7 +631,7 @@ class TestSparkKubernetesSensor:
     )
     def test_not_processed_application(self, mock_get_namespaced_crd, 
mock_kubernetes_hook):
         sensor = SparkKubernetesSensor(application_name="spark_pi", 
dag=self.dag, task_id="test_task_id")
-        assert not sensor.poke(None)
+        assert not sensor.poke({})
         mock_kubernetes_hook.assert_called_once_with()
         mock_get_namespaced_crd.assert_called_once_with(
             group="sparkoperator.k8s.io",
@@ -645,7 +647,7 @@ class TestSparkKubernetesSensor:
     )
     def test_new_application(self, mock_get_namespaced_crd, 
mock_kubernetes_hook):
         sensor = SparkKubernetesSensor(application_name="spark_pi", 
dag=self.dag, task_id="test_task_id")
-        assert not sensor.poke(None)
+        assert not sensor.poke({})
         mock_kubernetes_hook.assert_called_once_with()
         mock_get_namespaced_crd.assert_called_once_with(
             group="sparkoperator.k8s.io",
@@ -661,7 +663,7 @@ class TestSparkKubernetesSensor:
     )
     def test_running_application(self, mock_get_namespaced_crd, 
mock_kubernetes_hook):
         sensor = SparkKubernetesSensor(application_name="spark_pi", 
dag=self.dag, task_id="test_task_id")
-        assert not sensor.poke(None)
+        assert not sensor.poke({})
         mock_kubernetes_hook.assert_called_once_with()
         mock_get_namespaced_crd.assert_called_once_with(
             group="sparkoperator.k8s.io",
@@ -677,7 +679,7 @@ class TestSparkKubernetesSensor:
     )
     def test_submitted_application(self, mock_get_namespaced_crd, 
mock_kubernetes_hook):
         sensor = SparkKubernetesSensor(application_name="spark_pi", 
dag=self.dag, task_id="test_task_id")
-        assert not sensor.poke(None)
+        assert not sensor.poke({})
         mock_kubernetes_hook.assert_called_once_with()
         mock_get_namespaced_crd.assert_called_once_with(
             group="sparkoperator.k8s.io",
@@ -693,7 +695,7 @@ class TestSparkKubernetesSensor:
     )
     def test_pending_rerun_application(self, mock_get_namespaced_crd, 
mock_kubernetes_hook):
         sensor = SparkKubernetesSensor(application_name="spark_pi", 
dag=self.dag, task_id="test_task_id")
-        assert not sensor.poke(None)
+        assert not sensor.poke({})
         mock_kubernetes_hook.assert_called_once_with()
         mock_get_namespaced_crd.assert_called_once_with(
             group="sparkoperator.k8s.io",
@@ -721,7 +723,7 @@ class TestSparkKubernetesSensor:
             application_name="spark_pi", dag=self.dag, task_id="test_task_id", 
soft_fail=soft_fail
         )
         with pytest.raises(expected_exception):
-            sensor.poke(None)
+            sensor.poke({})
         mock_kubernetes_hook.assert_called_once_with()
         mock_get_namespaced_crd.assert_called_once_with(
             group="sparkoperator.k8s.io",
@@ -743,7 +745,7 @@ class TestSparkKubernetesSensor:
             namespace="sensor_namespace",
             task_id="test_task_id",
         )
-        sensor.poke(None)
+        sensor.poke({})
         mock_kubernetes_hook.assert_called_once_with()
         mock_get_namespaced_crd.assert_called_once_with(
             group="sparkoperator.k8s.io",
@@ -768,7 +770,7 @@ class TestSparkKubernetesSensor:
             api_group=api_group,
             api_version=api_version,
         )
-        sensor.poke(None)
+        sensor.poke({})
         mock_kubernetes_hook.assert_called_once_with()
         mock_get_namespaced_crd.assert_called_once_with(
             group=api_group,
@@ -789,7 +791,7 @@ class TestSparkKubernetesSensor:
             kubernetes_conn_id="kubernetes_with_namespace",
             task_id="test_task_id",
         )
-        sensor.poke(None)
+        sensor.poke({})
         mock_kubernetes_hook.assert_called_once_with()
         mock_get_namespaced_crd.assert_called_once_with(
             group="sparkoperator.k8s.io",
@@ -828,7 +830,7 @@ class TestSparkKubernetesSensor:
             soft_fail=soft_fail,
         )
         with pytest.raises(expected_exception):
-            sensor.poke(None)
+            sensor.poke({})
         mock_log_call.assert_called_once_with(
             "spark-pi-driver", namespace="default", 
container="spark-kubernetes-driver"
         )
@@ -852,7 +854,7 @@ class TestSparkKubernetesSensor:
             dag=self.dag,
             task_id="test_task_id",
         )
-        sensor.poke(None)
+        sensor.poke({})
         mock_log_call.assert_called_once_with(
             "spark-pi-2020-02-24-1-driver", namespace="default", 
container="spark-kubernetes-driver"
         )
@@ -878,7 +880,7 @@ class TestSparkKubernetesSensor:
             dag=self.dag,
             task_id="test_task_id",
         )
-        sensor.poke(None)
+        sensor.poke({})
         warn_log_call.assert_called_once()
 
     @patch(
@@ -899,7 +901,7 @@ class TestSparkKubernetesSensor:
             dag=self.dag,
             task_id="test_task_id",
         )
-        sensor.poke(None)
+        sensor.poke({})
         mock_log_call.assert_called_once_with(
             "spark-pi-2020-02-24-1-driver", namespace="default", 
container="spark-kubernetes-driver"
         )
diff --git a/tests/providers/common/sql/operators/test_sql_execute.py 
b/tests/providers/common/sql/operators/test_sql_execute.py
index 5dc2e6e30c..565cc44c3f 100644
--- a/tests/providers/common/sql/operators/test_sql_execute.py
+++ b/tests/providers/common/sql/operators/test_sql_execute.py
@@ -51,45 +51,45 @@ class Row2(NamedTuple):
             "select * from dummy",
             True,
             True,
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             [[("id",), ("value",)]],
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             id="Scalar: Single SQL statement, return_last, split statement",
         ),
         pytest.param(
             "select * from dummy;select * from dummy2",
             True,
             True,
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             [[("id",), ("value",)]],
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             id="Scalar: Multiple SQL statements, return_last, split statement",
         ),
         pytest.param(
             "select * from dummy",
             False,
             False,
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             [[("id",), ("value",)]],
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             id="Scalar: Single SQL statements, no return_last (doesn't 
matter), no split statement",
         ),
         pytest.param(
             "select * from dummy",
             True,
             False,
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             [[("id",), ("value",)]],
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             id="Scalar: Single SQL statements, return_last (doesn't matter), 
no split statement",
         ),
         pytest.param(
             ["select * from dummy"],
             False,
             False,
-            [[Row(id=1, value="value1"), Row(id=2, value="value2")]],
+            [[Row(id="1", value="value1"), Row(id="2", value="value2")]],
             [[("id",), ("value",)]],
-            [[Row(id=1, value="value1"), Row(id=2, value="value2")]],
+            [[Row(id="1", value="value1"), Row(id="2", value="value2")]],
             id="Non-Scalar: Single SQL statements in list, no return_last, no 
split statement",
         ),
         pytest.param(
@@ -97,13 +97,13 @@ class Row2(NamedTuple):
             False,
             False,
             [
-                [Row(id=1, value="value1"), Row(id=2, value="value2")],
-                [Row2(id2=1, value2="value1"), Row2(id2=2, value2="value2")],
+                [Row(id="1", value="value1"), Row(id="2", value="value2")],
+                [Row2(id2="1", value2="value1"), Row2(id2="2", 
value2="value2")],
             ],
             [[("id",), ("value",)], [("id2",), ("value2",)]],
             [
-                [Row(id=1, value="value1"), Row(id=2, value="value2")],
-                [Row2(id2=1, value2="value1"), Row2(id2=2, value2="value2")],
+                [Row(id="1", value="value1"), Row(id="2", value="value2")],
+                [Row2(id2="1", value2="value1"), Row2(id2="2", 
value2="value2")],
             ],
             id="Non-Scalar: Multiple SQL statements in list, no return_last 
(no matter), no split statement",
         ),
@@ -112,13 +112,13 @@ class Row2(NamedTuple):
             True,
             False,
             [
-                [Row(id=1, value="value1"), Row(id=2, value="value2")],
-                [Row2(id2=1, value2="value1"), Row2(id2=2, value2="value2")],
+                [Row(id="1", value="value1"), Row(id="2", value="value2")],
+                [Row2(id2="1", value2="value1"), Row2(id2="2", 
value2="value2")],
             ],
             [[("id",), ("value",)], [("id2",), ("value2",)]],
             [
-                [Row(id=1, value="value1"), Row(id=2, value="value2")],
-                [Row2(id2=1, value2="value1"), Row2(id2=2, value2="value2")],
+                [Row(id="1", value="value1"), Row(id="2", value="value2")],
+                [Row2(id2="1", value2="value1"), Row2(id2="2", 
value2="value2")],
             ],
             id="Non-Scalar: Multiple SQL statements in list, return_last (no 
matter), no split statement",
         ),
@@ -166,45 +166,45 @@ def test_exec_success(sql, return_last, split_statement, 
hook_results, hook_desc
             "select * from dummy",
             True,
             True,
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             [[("id",), ("value",)]],
-            ([("id",), ("value",)], [Row(id=1, value="value1"), Row(id=2, 
value="value2")]),
+            ([("id",), ("value",)], [Row(id="1", value="value1"), Row(id="2", 
value="value2")]),
             id="Scalar: Single SQL statement, return_last, split statement",
         ),
         pytest.param(
             "select * from dummy;select * from dummy2",
             True,
             True,
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             [[("id",), ("value",)]],
-            ([("id",), ("value",)], [Row(id=1, value="value1"), Row(id=2, 
value="value2")]),
+            ([("id",), ("value",)], [Row(id="1", value="value1"), Row(id="2", 
value="value2")]),
             id="Scalar: Multiple SQL statements, return_last, split statement",
         ),
         pytest.param(
             "select * from dummy",
             False,
             False,
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             [[("id",), ("value",)]],
-            ([("id",), ("value",)], [Row(id=1, value="value1"), Row(id=2, 
value="value2")]),
+            ([("id",), ("value",)], [Row(id="1", value="value1"), Row(id="2", 
value="value2")]),
             id="Scalar: Single SQL statements, no return_last (doesn't 
matter), no split statement",
         ),
         pytest.param(
             "select * from dummy",
             True,
             False,
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             [[("id",), ("value",)]],
-            ([("id",), ("value",)], [Row(id=1, value="value1"), Row(id=2, 
value="value2")]),
+            ([("id",), ("value",)], [Row(id="1", value="value1"), Row(id="2", 
value="value2")]),
             id="Scalar: Single SQL statements, return_last (doesn't matter), 
no split statement",
         ),
         pytest.param(
             ["select * from dummy"],
             False,
             False,
-            [[Row(id=1, value="value1"), Row(id=2, value="value2")]],
+            [[Row(id="1", value="value1"), Row(id="2", value="value2")]],
             [[("id",), ("value",)]],
-            [([("id",), ("value",)], [Row(id=1, value="value1"), Row(id=2, 
value="value2")])],
+            [([("id",), ("value",)], [Row(id="1", value="value1"), Row(id="2", 
value="value2")])],
             id="Non-Scalar: Single SQL statements in list, no return_last, no 
split statement",
         ),
         pytest.param(
@@ -212,13 +212,13 @@ def test_exec_success(sql, return_last, split_statement, 
hook_results, hook_desc
             False,
             False,
             [
-                [Row(id=1, value="value1"), Row(id=2, value="value2")],
-                [Row2(id2=1, value2="value1"), Row2(id2=2, value2="value2")],
+                [Row(id="1", value="value1"), Row(id="2", value="value2")],
+                [Row2(id2="1", value2="value1"), Row2(id2="2", 
value2="value2")],
             ],
             [[("id",), ("value",)], [("id2",), ("value2",)]],
             [
-                ([("id",), ("value",)], [Row(id=1, value="value1"), Row(id=2, 
value="value2")]),
-                ([("id2",), ("value2",)], [Row2(id2=1, value2="value1"), 
Row2(id2=2, value2="value2")]),
+                ([("id",), ("value",)], [Row(id="1", value="value1"), 
Row(id="2", value="value2")]),
+                ([("id2",), ("value2",)], [Row2(id2="1", value2="value1"), 
Row2(id2="2", value2="value2")]),
             ],
             id="Non-Scalar: Multiple SQL statements in list, no return_last 
(no matter), no split statement",
         ),
@@ -227,13 +227,13 @@ def test_exec_success(sql, return_last, split_statement, 
hook_results, hook_desc
             True,
             False,
             [
-                [Row(id=1, value="value1"), Row(id=2, value="value2")],
-                [Row2(id2=1, value2="value1"), Row2(id2=2, value2="value2")],
+                [Row(id="1", value="value1"), Row(id="2", value="value2")],
+                [Row2(id2="1", value2="value1"), Row2(id2="2", 
value2="value2")],
             ],
             [[("id",), ("value",)], [("id2",), ("value2",)]],
             [
-                ([("id",), ("value",)], [Row(id=1, value="value1"), Row(id=2, 
value="value2")]),
-                ([("id2",), ("value2",)], [Row2(id2=1, value2="value1"), 
Row2(id2=2, value2="value2")]),
+                ([("id",), ("value",)], [Row(id="1", value="value1"), 
Row(id="2", value="value2")]),
+                ([("id2",), ("value2",)], [Row2(id2="1", value2="value1"), 
Row2(id2="2", value2="value2")]),
             ],
             id="Non-Scalar: Multiple SQL statements in list, return_last (no 
matter), no split statement",
         ),
diff --git a/tests/providers/common/sql/sensors/test_sql.py 
b/tests/providers/common/sql/sensors/test_sql.py
index bf4e52c780..2ad27f373a 100644
--- a/tests/providers/common/sql/sensors/test_sql.py
+++ b/tests/providers/common/sql/sensors/test_sql.py
@@ -123,7 +123,7 @@ class TestSqlSensor:
     )
     @mock.patch("airflow.providers.common.sql.sensors.sql.BaseHook")
     def test_sql_sensor_postgres_poke_fail_on_empty(
-        self, mock_hook, soft_fail: bool, expected_exception: AirflowException
+        self, mock_hook, soft_fail: bool, expected_exception: 
type[AirflowException]
     ):
         op = SqlSensor(
             task_id="sql_sensor_check",
@@ -163,7 +163,7 @@ class TestSqlSensor:
     )
     @mock.patch("airflow.providers.common.sql.sensors.sql.BaseHook")
     def test_sql_sensor_postgres_poke_failure(
-        self, mock_hook, soft_fail: bool, expected_exception: AirflowException
+        self, mock_hook, soft_fail: bool, expected_exception: 
type[AirflowException]
     ):
         op = SqlSensor(
             task_id="sql_sensor_check",
@@ -188,7 +188,7 @@ class TestSqlSensor:
     )
     @mock.patch("airflow.providers.common.sql.sensors.sql.BaseHook")
     def test_sql_sensor_postgres_poke_failure_success(
-        self, mock_hook, soft_fail: bool, expected_exception: AirflowException
+        self, mock_hook, soft_fail: bool, expected_exception: 
type[AirflowException]
     ):
         op = SqlSensor(
             task_id="sql_sensor_check",
@@ -217,7 +217,7 @@ class TestSqlSensor:
     )
     @mock.patch("airflow.providers.common.sql.sensors.sql.BaseHook")
     def test_sql_sensor_postgres_poke_failure_success_same(
-        self, mock_hook, soft_fail: bool, expected_exception: AirflowException
+        self, mock_hook, soft_fail: bool, expected_exception: 
type[AirflowException]
     ):
         op = SqlSensor(
             task_id="sql_sensor_check",
@@ -243,7 +243,7 @@ class TestSqlSensor:
     )
     @mock.patch("airflow.providers.common.sql.sensors.sql.BaseHook")
     def test_sql_sensor_postgres_poke_invalid_failure(
-        self, mock_hook, soft_fail: bool, expected_exception: AirflowException
+        self, mock_hook, soft_fail: bool, expected_exception: 
type[AirflowException]
     ):
         op = SqlSensor(
             task_id="sql_sensor_check",
@@ -266,7 +266,7 @@ class TestSqlSensor:
     )
     @mock.patch("airflow.providers.common.sql.sensors.sql.BaseHook")
     def test_sql_sensor_postgres_poke_invalid_success(
-        self, mock_hook, soft_fail: bool, expected_exception: AirflowException
+        self, mock_hook, soft_fail: bool, expected_exception: 
type[AirflowException]
     ):
         op = SqlSensor(
             task_id="sql_sensor_check",
diff --git a/tests/providers/docker/conftest.py 
b/tests/providers/docker/conftest.py
index c698a23de5..af0ee2062b 100644
--- a/tests/providers/docker/conftest.py
+++ b/tests/providers/docker/conftest.py
@@ -22,6 +22,9 @@ from unittest import mock
 
 import pytest
 
+# TODO: FIXME  - Not sure why but this context manager gets tricky typing 
issues
+# mypy: disable-error-code="arg-type,misc"
+
 
 @contextmanager
 def _mocker_context(o, additional_modules: list | None = None) -> 
AbstractContextManager[mock.MagicMock]:
diff --git a/tests/providers/docker/hooks/test_docker.py 
b/tests/providers/docker/hooks/test_docker.py
index 454aee3149..d96c8af275 100644
--- a/tests/providers/docker/hooks/test_docker.py
+++ b/tests/providers/docker/hooks/test_docker.py
@@ -212,13 +212,13 @@ def test_failed_login_to_registry(hook_conn, 
docker_api_client_patcher, caplog):
             id="missing-registry-host",
         ),
         pytest.param(
-            {**TEST_CONN, **{"extra": {"reauth": "enabled"}}},
+            {**TEST_CONN, **{"extra": {"reauth": "enabled"}}},  # type: 
ignore[dict-item]
             ValueError,
             r"Unable parse `reauth` value '.*' to bool\.",
             id="wrong-reauth",
         ),
         pytest.param(
-            {**TEST_CONN, **{"extra": {"reauth": "disabled"}}},
+            {**TEST_CONN, **{"extra": {"reauth": "disabled"}}},  # type: 
ignore[dict-item]
             ValueError,
             r"Unable parse `reauth` value '.*' to bool\.",
             id="wrong-noreauth",
diff --git a/tests/providers/exasol/operators/test_exasol_sql.py 
b/tests/providers/exasol/operators/test_exasol_sql.py
index 9ff999eed3..141f791ba4 100644
--- a/tests/providers/exasol/operators/test_exasol_sql.py
+++ b/tests/providers/exasol/operators/test_exasol_sql.py
@@ -47,45 +47,45 @@ class Row2(NamedTuple):
             "select * from dummy",
             True,
             True,
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             [[("id",), ("value",)]],
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             id="Scalar: Single SQL statement, return_last, split statement",
         ),
         pytest.param(
             "select * from dummy;select * from dummy2",
             True,
             True,
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             [[("id",), ("value",)]],
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             id="Scalar: Multiple SQL statements, return_last, split statement",
         ),
         pytest.param(
             "select * from dummy",
             False,
             False,
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             [[("id",), ("value",)]],
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             id="Scalar: Single SQL statements, no return_last (doesn't 
matter), no split statement",
         ),
         pytest.param(
             "select * from dummy",
             True,
             False,
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             [[("id",), ("value",)]],
-            [Row(id=1, value="value1"), Row(id=2, value="value2")],
+            [Row(id="1", value="value1"), Row(id="2", value="value2")],
             id="Scalar: Single SQL statements, return_last (doesn't matter), 
no split statement",
         ),
         pytest.param(
             ["select * from dummy"],
             False,
             False,
-            [[Row(id=1, value="value1"), Row(id=2, value="value2")]],
+            [[Row(id="1", value="value1"), Row(id="2", value="value2")]],
             [[("id",), ("value",)]],
-            [[Row(id=1, value="value1"), Row(id=2, value="value2")]],
+            [[Row(id="1", value="value1"), Row(id="2", value="value2")]],
             id="Non-Scalar: Single SQL statements in list, no return_last, no 
split statement",
         ),
         pytest.param(
@@ -93,13 +93,13 @@ class Row2(NamedTuple):
             False,
             False,
             [
-                [Row(id=1, value="value1"), Row(id=2, value="value2")],
-                [Row2(id2=1, value2="value1"), Row2(id2=2, value2="value2")],
+                [Row(id="1", value="value1"), Row(id="2", value="value2")],
+                [Row2(id2="1", value2="value1"), Row2(id2="2", 
value2="value2")],
             ],
             [[("id",), ("value",)], [("id2",), ("value2",)]],
             [
-                [Row(id=1, value="value1"), Row(id=2, value="value2")],
-                [Row2(id2=1, value2="value1"), Row2(id2=2, value2="value2")],
+                [Row(id="1", value="value1"), Row(id="2", value="value2")],
+                [Row2(id2="1", value2="value1"), Row2(id2="2", 
value2="value2")],
             ],
             id="Non-Scalar: Multiple SQL statements in list, no return_last 
(no matter), no split statement",
         ),
@@ -108,13 +108,13 @@ class Row2(NamedTuple):
             True,
             False,
             [
-                [Row(id=1, value="value1"), Row(id=2, value="value2")],
-                [Row2(id2=1, value2="value1"), Row2(id2=2, value2="value2")],
+                [Row(id="1", value="value1"), Row(id="2", value="value2")],
+                [Row2(id2="1", value2="value1"), Row2(id2="2", 
value2="value2")],
             ],
             [[("id",), ("value",)], [("id2",), ("value2",)]],
             [
-                [Row(id=1, value="value1"), Row(id=2, value="value2")],
-                [Row2(id2=1, value2="value1"), Row2(id2=2, value2="value2")],
+                [Row(id="1", value="value1"), Row(id="2", value="value2")],
+                [Row2(id2="1", value2="value1"), Row2(id2="2", 
value2="value2")],
             ],
             id="Non-Scalar: Multiple SQL statements in list, return_last (no 
matter), no split statement",
         ),
diff --git a/tests/providers/google/cloud/hooks/test_dataflow.py 
b/tests/providers/google/cloud/hooks/test_dataflow.py
index 3544826bdc..f34cb77666 100644
--- a/tests/providers/google/cloud/hooks/test_dataflow.py
+++ b/tests/providers/google/cloud/hooks/test_dataflow.py
@@ -55,7 +55,7 @@ PARAMETERS = {
     "inputFile": "gs://dataflow-samples/shakespeare/kinglear.txt",
     "output": "gs://test/output/my_output",
 }
-TEST_ENVIRONMENT = {}
+TEST_ENVIRONMENT: dict[str, str] = {}
 PY_FILE = "apache_beam.examples.wordcount"
 JAR_FILE = "unitest.jar"
 JOB_CLASS = "com.example.UnitTest"
diff --git a/tests/providers/google/cloud/hooks/test_dataprep.py 
b/tests/providers/google/cloud/hooks/test_dataprep.py
index a0cef77ae9..2ef0a0a21d 100644
--- a/tests/providers/google/cloud/hooks/test_dataprep.py
+++ b/tests/providers/google/cloud/hooks/test_dataprep.py
@@ -42,6 +42,10 @@ URL_WRANGLED_DATASETS = URL_BASE + "/v4/wrangledDatasets"
 URL_OUTPUT_OBJECTS = URL_BASE + "/v4/outputObjects"
 URL_WRITE_SETTINGS = URL_BASE + "/v4/writeSettings"
 
+# needed to ignore MyPy badly detecting HTTPError as requiring response 
parameter
+# HTTPError will fall-back to None when parameter is not present.
+# mypy: disable-error-code="call-arg"
+
 
 class TestGoogleDataprepHook:
     def setup_method(self):
diff --git 
a/tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py 
b/tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py
index e0c57c935d..b5daac0d81 100644
--- a/tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py
+++ b/tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py
@@ -52,7 +52,6 @@ class TestStackdriverLoggingHandlerSystem(GoogleSystemTest):
         importlib.reload(airflow_local_settings)
         settings.configure_logging()
         clear_db_runs()
-        super().tearDown()
 
     @provide_session
     def test_should_support_key_auth(self, session):
diff --git 
a/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service.py 
b/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service.py
index e3318991bc..40c2b87b0a 100644
--- 
a/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service.py
+++ 
b/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service.py
@@ -219,10 +219,10 @@ class TestTransferJobValidator:
     @pytest.mark.parametrize(
         "transfer_spec",
         [
-            {**SOURCE_AWS, **SOURCE_GCS, **SOURCE_HTTP},
+            {**SOURCE_AWS, **SOURCE_GCS, **SOURCE_HTTP},  # type: 
ignore[arg-type]
             {**SOURCE_AWS, **SOURCE_GCS},
-            {**SOURCE_AWS, **SOURCE_HTTP},
-            {**SOURCE_GCS, **SOURCE_HTTP},
+            {**SOURCE_AWS, **SOURCE_HTTP},  # type: ignore[arg-type]
+            {**SOURCE_GCS, **SOURCE_HTTP},  # type: ignore[arg-type]
         ],
     )
     def test_verify_data_source(self, transfer_spec):
diff --git a/tests/providers/google/cloud/operators/test_mlengine.py 
b/tests/providers/google/cloud/operators/test_mlengine.py
index e599cd347a..bc99de2bf1 100644
--- a/tests/providers/google/cloud/operators/test_mlengine.py
+++ b/tests/providers/google/cloud/operators/test_mlengine.py
@@ -939,7 +939,7 @@ TEST_PYTHON_VERSION = "3.8"
 TEST_JOB_DIR = "gs://example_mlengine_bucket/job-dir"
 TEST_PACKAGE_URIS = 
["gs://system-tests-resources/example_gcp_mlengine/trainer-0.1.tar.gz"]
 TEST_TRAINING_PYTHON_MODULE = "trainer.task"
-TEST_TRAINING_ARGS = []
+TEST_TRAINING_ARGS: list[str] = []
 TEST_LABELS = {"job_type": "training", "***-version": "v2-5-0-dev0"}
 
 
diff --git a/tests/providers/google/cloud/sensors/test_dataproc_metastore.py 
b/tests/providers/google/cloud/sensors/test_dataproc_metastore.py
index 435ceac661..cdc6dbde6c 100644
--- a/tests/providers/google/cloud/sensors/test_dataproc_metastore.py
+++ b/tests/providers/google/cloud/sensors/test_dataproc_metastore.py
@@ -17,6 +17,7 @@
 from __future__ import annotations
 
 from copy import deepcopy
+from typing import Any
 from unittest import mock
 
 import pytest
@@ -39,7 +40,7 @@ MANIFEST_SUCCESS = {
     "filenames": [],
 }
 MANIFEST_FAIL = {"status": {"code": 1, "message": "Bad things happened", 
"details": []}, "filenames": []}
-RESULT_FILE_CONTENT = {"rows": [], "metadata": {}}
+RESULT_FILE_CONTENT: dict[str, Any] = {"rows": [], "metadata": {}}
 ROW_1 = []
 ROW_2 = []
 TEST_SERVICE_ID = "test-service"
diff --git a/tests/providers/google/cloud/transfers/test_sql_to_gcs.py 
b/tests/providers/google/cloud/transfers/test_sql_to_gcs.py
index ea7a3defd3..a65c7fd529 100644
--- a/tests/providers/google/cloud/transfers/test_sql_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_sql_to_gcs.py
@@ -43,7 +43,7 @@ CURSOR_DESCRIPTION = [
     ("column_c", "10", 0, 0, 0, 0, False),
 ]
 TMP_FILE_NAME = "temp-file"
-EMPTY_INPUT_DATA = []
+EMPTY_INPUT_DATA: list[str] = []
 INPUT_DATA = [
     ["101", "school", "2015-01-01"],
     ["102", "business", "2017-05-24"],
diff --git a/tests/providers/google/cloud/triggers/test_cloud_batch.py 
b/tests/providers/google/cloud/triggers/test_cloud_batch.py
index 8da083f17e..82f04b0d31 100644
--- a/tests/providers/google/cloud/triggers/test_cloud_batch.py
+++ b/tests/providers/google/cloud/triggers/test_cloud_batch.py
@@ -69,10 +69,10 @@ class TestCloudBatchJobFinishedTrigger:
         """
         Tests the CloudBatchJobFinishedTrigger fires once the job execution 
reaches a successful state.
         """
-        state = JobStatus.State.SUCCEEDED
+        state = JobStatus.State(JobStatus.State.SUCCEEDED)
         mock_hook.return_value.get_batch_job.return_value = 
self._mock_job_with_state(state)
         generator = trigger.run()
-        actual = await generator.asend(None)
+        actual = await generator.asend(None)  # type:ignore[attr-defined]
         assert (
             TriggerEvent(
                 {
@@ -92,10 +92,10 @@ class TestCloudBatchJobFinishedTrigger:
         """
         Tests the CloudBatchJobFinishedTrigger fires once the job execution 
reaches a successful state.
         """
-        state = JobStatus.State.DELETION_IN_PROGRESS
+        state = JobStatus.State(JobStatus.State.DELETION_IN_PROGRESS)
         mock_hook.return_value.get_batch_job.return_value = 
self._mock_job_with_state(state)
         generator = trigger.run()
-        actual = await generator.asend(None)
+        actual = await generator.asend(None)  # type:ignore[attr-defined]
         assert (
             TriggerEvent(
                 {
@@ -116,7 +116,7 @@ class TestCloudBatchJobFinishedTrigger:
         """
         mock_hook.return_value.get_batch_job.side_effect = Exception("Test 
Exception")
         generator = trigger.run()
-        actual = await generator.asend(None)
+        actual = await generator.asend(None)  # type:ignore[attr-defined]
         assert (
             TriggerEvent(
                 {
@@ -142,7 +142,7 @@ class TestCloudBatchJobFinishedTrigger:
         mock_hook.return_value.get_batch_job = _mock_job
 
         generator = trigger.run()
-        actual = await generator.asend(None)
+        actual = await generator.asend(None)  # type:ignore[attr-defined]
         assert (
             TriggerEvent(
                 {
diff --git a/tests/providers/google/cloud/triggers/test_cloud_run.py 
b/tests/providers/google/cloud/triggers/test_cloud_run.py
index 42c57d9b93..30d56241ed 100644
--- a/tests/providers/google/cloud/triggers/test_cloud_run.py
+++ b/tests/providers/google/cloud/triggers/test_cloud_run.py
@@ -82,7 +82,7 @@ class TestCloudBatchJobFinishedTrigger:
             done, name, error_code, error_message
         )
         generator = trigger.run()
-        actual = await generator.asend(None)
+        actual = await generator.asend(None)  # type:ignore[attr-defined]
         assert (
             TriggerEvent(
                 {
@@ -113,7 +113,7 @@ class TestCloudBatchJobFinishedTrigger:
         generator = trigger.run()
 
         with pytest.raises(expected_exception=AirflowException):
-            await generator.asend(None)
+            await generator.asend(None)  # type:ignore[attr-defined]
 
     @pytest.mark.asyncio
     
@mock.patch("airflow.providers.google.cloud.triggers.cloud_run.CloudRunAsyncHook")
@@ -133,7 +133,7 @@ class TestCloudBatchJobFinishedTrigger:
         mock_hook.return_value.get_operation = _mock_operation
 
         generator = trigger.run()
-        actual = await generator.asend(None)
+        actual = await generator.asend(None)  # type:ignore[attr-defined]
 
         assert (
             TriggerEvent(
diff --git a/tests/providers/google/cloud/triggers/test_gcs.py 
b/tests/providers/google/cloud/triggers/test_gcs.py
index 14744212fe..4fcde67711 100644
--- a/tests/providers/google/cloud/triggers/test_gcs.py
+++ b/tests/providers/google/cloud/triggers/test_gcs.py
@@ -19,6 +19,7 @@ from __future__ import annotations
 
 import asyncio
 from datetime import datetime, timedelta
+from typing import Any
 from unittest import mock
 from unittest.mock import AsyncMock
 
@@ -39,7 +40,7 @@ TEST_OBJECT = "TEST_OBJECT"
 TEST_PREFIX = "TEST_PREFIX"
 TEST_GCP_CONN_ID = "TEST_GCP_CONN_ID"
 TEST_POLLING_INTERVAL = 3.0
-TEST_HOOK_PARAMS = {}
+TEST_HOOK_PARAMS: dict[str, Any] = {}
 TEST_TS_OBJECT = datetime.utcnow()
 
 
diff --git a/tests/providers/google/cloud/triggers/test_mlengine.py 
b/tests/providers/google/cloud/triggers/test_mlengine.py
index 4a539c8c5d..9372ee0c38 100644
--- a/tests/providers/google/cloud/triggers/test_mlengine.py
+++ b/tests/providers/google/cloud/triggers/test_mlengine.py
@@ -34,7 +34,7 @@ TEST_PYTHON_VERSION = "3.8"
 TEST_JOB_DIR = "gs://example_mlengine_bucket/job-dir"
 TEST_PACKAGE_URIS = 
["gs://system-tests-resources/example_gcp_mlengine/trainer-0.1.tar.gz"]
 TEST_TRAINING_PYTHON_MODULE = "trainer.task"
-TEST_TRAINING_ARGS = []
+TEST_TRAINING_ARGS: list[str] = []
 TEST_LABELS = {"job_type": "training", "***-version": "v2-5-0-dev0"}
 TEST_POLL_INTERVAL = 4.0
 
diff --git 
a/tests/providers/google/marketing_platform/sensors/test_display_video.py 
b/tests/providers/google/marketing_platform/sensors/test_display_video.py
index 79ca5a34fe..883b8c34d7 100644
--- a/tests/providers/google/marketing_platform/sensors/test_display_video.py
+++ b/tests/providers/google/marketing_platform/sensors/test_display_video.py
@@ -77,7 +77,7 @@ class TestGoogleDisplayVideo360Sensor:
     @mock.patch(f"{MODULE_NAME}.GoogleDisplayVideo360Hook")
     @mock.patch(f"{MODULE_NAME}.BaseSensorOperator")
     def test_poke_with_exception(
-        self, mock_base_op, hook_mock, soft_fail: bool, expected_exception: 
AirflowException
+        self, mock_base_op, hook_mock, soft_fail: bool, expected_exception: 
type[AirflowException]
     ):
         operation_name = "operation_name"
         op = GoogleDisplayVideo360GetSDFDownloadOperationSensor(
@@ -89,4 +89,4 @@ class TestGoogleDisplayVideo360Sensor:
         hook_mock.return_value.get_sdf_download_operation.return_value = 
{"error": "error"}
 
         with pytest.raises(expected_exception, match="The operation finished 
in error with error"):
-            op.poke(context=None)
+            op.poke(context={})
diff --git a/tests/providers/http/operators/test_http.py 
b/tests/providers/http/operators/test_http.py
index 2b91ad6296..a57af7d764 100644
--- a/tests/providers/http/operators/test_http.py
+++ b/tests/providers/http/operators/test_http.py
@@ -131,6 +131,7 @@ class TestHttpOperator:
                     headers={},
                     extra_options={},
                 )
+            return None
 
         requests_mock.get("http://www.example.com";, json={"value": 5})
         operator = HttpOperator(
@@ -173,6 +174,7 @@ class TestHttpOperator:
             if not has_returned:
                 has_returned = True
                 return dict(endpoint="/")
+            return None
 
         operator = HttpOperator(
             task_id="test_HTTP_op",
diff --git a/tests/providers/http/triggers/test_http.py 
b/tests/providers/http/triggers/test_http.py
index d4433b76a6..6e0b1539cb 100644
--- a/tests/providers/http/triggers/test_http.py
+++ b/tests/providers/http/triggers/test_http.py
@@ -21,6 +21,7 @@ import base64
 import pickle
 from asyncio import Future
 from http.cookies import SimpleCookie
+from typing import Any
 from unittest import mock
 
 import pytest
@@ -39,7 +40,7 @@ TEST_METHOD = "POST"
 TEST_ENDPOINT = "endpoint"
 TEST_HEADERS = {"Authorization": "Bearer test"}
 TEST_DATA = ""
-TEST_EXTRA_OPTIONS = {}
+TEST_EXTRA_OPTIONS: dict[str, Any] = {}
 
 
 @pytest.fixture
diff --git a/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py 
b/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py
index fe80cc757c..874a9a3adb 100644
--- a/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py
+++ b/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py
@@ -56,6 +56,9 @@ ID = "testId"
 
 MODULE = "airflow.providers.microsoft.azure.hooks.data_factory"
 
+# TODO: FIXME: the tests here have tricky issues with typing and need a bit 
more thought to fix them
+# mypy: disable-error-code="union-attr,call-overload"
+
 
 @pytest.fixture(autouse=True)
 def setup_connections(create_mock_connections):
diff --git 
a/tests/providers/microsoft/azure/operators/test_azure_container_instances.py 
b/tests/providers/microsoft/azure/operators/test_azure_container_instances.py
index 9000ebeaaa..a0b41e305a 100644
--- 
a/tests/providers/microsoft/azure/operators/test_azure_container_instances.py
+++ 
b/tests/providers/microsoft/azure/operators/test_azure_container_instances.py
@@ -73,17 +73,20 @@ def make_mock_cg_with_missing_events(container_state):
     return container_g
 
 
+# TODO: FIXME the assignment here seem wrong byt they do work in these mocks - 
should likely be better done
+
+
 def make_mock_container(state: str, exit_code: int, detail_status: str, 
events: Event | None = None):
-    container = Container(name="hello_world", image="test", resources="test")
+    container = Container(name="hello_world", image="test", resources="test")  
# type: ignore[arg-type]
     container_prop = ContainerPropertiesInstanceView()
     container_state = ContainerState()
-    container_state.state = state
-    container_state.exit_code = exit_code
-    container_state.detail_status = detail_status
-    container_prop.current_state = container_state
+    container_state.state = state  # type: ignore[assignment]
+    container_state.exit_code = exit_code  # type: ignore[assignment]
+    container_state.detail_status = detail_status  # type: ignore[assignment]
+    container_prop.current_state = container_state  # type: ignore[assignment]
     if events:
-        container_prop.events = events
-    container.instance_view = container_prop
+        container_prop.events = events  # type: ignore[assignment]
+    container.instance_view = container_prop  # type: ignore[assignment]
 
     cg = ContainerGroup(containers=[container], os_type="Linux")
 
diff --git a/tests/providers/openlineage/extractors/test_default_extractor.py 
b/tests/providers/openlineage/extractors/test_default_extractor.py
index 3e0968d820..7c2174fe5b 100644
--- a/tests/providers/openlineage/extractors/test_default_extractor.py
+++ b/tests/providers/openlineage/extractors/test_default_extractor.py
@@ -38,10 +38,10 @@ pytestmark = pytest.mark.db_test
 
 INPUTS = [Dataset(namespace="database://host:port", name="inputtable")]
 OUTPUTS = [Dataset(namespace="database://host:port", name="inputtable")]
-RUN_FACETS = {
+RUN_FACETS: dict[str, BaseFacet] = {
     "parent": ParentRunFacet.create("3bb703d1-09c1-4a42-8da5-35a0b3216072", 
"namespace", "parentjob")
 }
-JOB_FACETS = {"sql": SqlJobFacet(query="SELECT * FROM inputtable")}
+JOB_FACETS: dict[str, BaseFacet] = {"sql": SqlJobFacet(query="SELECT * FROM 
inputtable")}
 
 
 @define
@@ -49,7 +49,7 @@ class CompleteRunFacet(BaseFacet):
     finished: bool = field(default=False)
 
 
-FINISHED_FACETS = {"complete": CompleteRunFacet(True)}
+FINISHED_FACETS: dict[str, BaseFacet] = {"complete": CompleteRunFacet(True)}
 
 
 class ExampleOperator(BaseOperator):
@@ -142,7 +142,7 @@ class OperatorWrongOperatorLineageClass(BaseOperator):
 
 
 class BrokenOperator(BaseOperator):
-    get_openlineage_facets = []
+    get_openlineage_facets: list[BaseFacet] = []
 
     def execute(self, context) -> Any:
         pass
diff --git a/tests/providers/opensearch/conftest.py 
b/tests/providers/opensearch/conftest.py
index b369fa30b8..47a447188e 100644
--- a/tests/providers/opensearch/conftest.py
+++ b/tests/providers/opensearch/conftest.py
@@ -19,18 +19,22 @@ from __future__ import annotations
 from typing import Any
 
 import pytest
+from opensearchpy import OpenSearch
 
 from airflow.models import Connection
 from airflow.providers.opensearch.hooks.opensearch import OpenSearchHook
 from airflow.utils import db
 
+# TODO: FIXME - those Mocks have overrides that are not used but they also do 
not make Mypy Happy
+# mypy: disable-error-code="override"
+
 MOCK_RETURN = {"status": "test"}
 
 
 class MockSearch(OpenSearchHook):
     # Mock class to override the Hook for monkeypatching
-    def client(self) -> None:
-        return None
+    def client(self) -> OpenSearch:
+        return OpenSearch()
 
     def search(self, query: dict, index_name: str, **kwargs: Any) -> Any:
         return MOCK_RETURN
diff --git a/tests/providers/opensearch/operators/test_opensearch.py 
b/tests/providers/opensearch/operators/test_opensearch.py
index 23528c6f93..706112fef6 100644
--- a/tests/providers/opensearch/operators/test_opensearch.py
+++ b/tests/providers/opensearch/operators/test_opensearch.py
@@ -36,7 +36,8 @@ EXPECTED_SEARCH_RETURN = {"status": "test"}
 
 
 class FakeDocument(Document):
-    title = Text(fields={"raw": Keyword()})
+    # TODO: FIXME - this Fake document has something tricky about typing
+    title = Text(fields={"raw": Keyword()})  # type: ignore[call-arg]
     author = Text()
     published = Text()
 
diff --git a/tests/providers/sftp/sensors/test_sftp.py 
b/tests/providers/sftp/sensors/test_sftp.py
index 55d5e1ee70..fb98b898c8 100644
--- a/tests/providers/sftp/sensors/test_sftp.py
+++ b/tests/providers/sftp/sensors/test_sftp.py
@@ -29,6 +29,9 @@ from airflow.exceptions import AirflowSkipException
 from airflow.providers.sftp.sensors.sftp import SFTPSensor
 from airflow.sensors.base import PokeReturnValue
 
+# Ignore missing args provided by default_args
+# mypy: disable-error-code="arg-type"
+
 
 class TestSFTPSensor:
     @patch("airflow.providers.sftp.sensors.sftp.SFTPHook")
diff --git a/tests/system/providers/alibaba/example_adb_spark_batch.py 
b/tests/system/providers/alibaba/example_adb_spark_batch.py
index 6185338a9d..3deb1c9473 100644
--- a/tests/system/providers/alibaba/example_adb_spark_batch.py
+++ b/tests/system/providers/alibaba/example_adb_spark_batch.py
@@ -16,14 +16,15 @@
 # under the License.
 from __future__ import annotations
 
-# Ignore missing args provided by default_args
-# type: ignore[call-arg]
 import os
 from datetime import datetime
 
 from airflow.models.dag import DAG
 from airflow.providers.alibaba.cloud.operators.analyticdb_spark import 
AnalyticDBSparkBatchOperator
 
+# Ignore missing args provided by default_args
+# mypy: disable-error-code="call-arg"
+
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
 DAG_ID = "adb_spark_batch_dag"
 # [START howto_operator_adb_spark_batch]
diff --git a/tests/system/providers/alibaba/example_adb_spark_sql.py 
b/tests/system/providers/alibaba/example_adb_spark_sql.py
index 923db9c7ee..beff440608 100644
--- a/tests/system/providers/alibaba/example_adb_spark_sql.py
+++ b/tests/system/providers/alibaba/example_adb_spark_sql.py
@@ -16,14 +16,15 @@
 # under the License.
 from __future__ import annotations
 
-# Ignore missing args provided by default_args
-# type: ignore[call-arg]
 import os
 from datetime import datetime
 
 from airflow.models.dag import DAG
 from airflow.providers.alibaba.cloud.operators.analyticdb_spark import 
AnalyticDBSparkSQLOperator
 
+# Ignore missing args provided by default_args
+# mypy: disable-error-code="call-arg"
+
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
 DAG_ID = "adb_spark_sql_dag"
 # [START howto_operator_adb_spark_sql]
diff --git a/tests/system/providers/alibaba/example_oss_bucket.py 
b/tests/system/providers/alibaba/example_oss_bucket.py
index 3baffc9df4..6a48f05e95 100644
--- a/tests/system/providers/alibaba/example_oss_bucket.py
+++ b/tests/system/providers/alibaba/example_oss_bucket.py
@@ -16,8 +16,6 @@
 # under the License.
 from __future__ import annotations
 
-# Ignore missing args provided by default_args
-# type: ignore[call-arg]
 import os
 from datetime import datetime
 
@@ -26,6 +24,7 @@ from airflow.providers.alibaba.cloud.operators.oss import 
OSSCreateBucketOperato
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
 DAG_ID = "oss_bucket_dag"
+REGION = os.environ.get("REGION", "default_regior")
 # [START howto_operator_oss_bucket]
 with DAG(
     dag_id=DAG_ID,
@@ -35,9 +34,9 @@ with DAG(
     tags=["example"],
     catchup=False,
 ) as dag:
-    create_bucket = OSSCreateBucketOperator(task_id="task1")
+    create_bucket = OSSCreateBucketOperator(task_id="task1", region=REGION)
 
-    delete_bucket = OSSDeleteBucketOperator(task_id="task2")
+    delete_bucket = OSSDeleteBucketOperator(task_id="task2", region=REGION)
 
     create_bucket >> delete_bucket
 
diff --git a/tests/system/providers/alibaba/example_oss_object.py 
b/tests/system/providers/alibaba/example_oss_object.py
index 315983d622..002b23d943 100644
--- a/tests/system/providers/alibaba/example_oss_object.py
+++ b/tests/system/providers/alibaba/example_oss_object.py
@@ -16,8 +16,6 @@
 # under the License.
 from __future__ import annotations
 
-# Ignore missing args provided by default_args
-# type: ignore[call-arg]
 import os
 from datetime import datetime
 
@@ -29,6 +27,9 @@ from airflow.providers.alibaba.cloud.operators.oss import (
     OSSUploadObjectOperator,
 )
 
+# Ignore missing args provided by default_args
+# mypy: disable-error-code="call-arg"
+
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
 DAG_ID = "oss_object_dag"
 with DAG(
diff --git a/tests/system/providers/amazon/aws/example_dynamodb.py 
b/tests/system/providers/amazon/aws/example_dynamodb.py
index 5f63db5a6c..8316cab0e6 100644
--- a/tests/system/providers/amazon/aws/example_dynamodb.py
+++ b/tests/system/providers/amazon/aws/example_dynamodb.py
@@ -27,6 +27,9 @@ from airflow.providers.amazon.aws.sensors.dynamodb import 
DynamoDBValueSensor
 from airflow.utils.trigger_rule import TriggerRule
 from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, 
SystemTestContextBuilder
 
+# TODO: FIXME The argument types here seems somewhat tricky to fix
+# mypy: disable-error-code="arg-type"
+
 DAG_ID = "example_dynamodbvaluesensor"
 sys_test_context_task = SystemTestContextBuilder().build()
 
diff --git a/tests/system/providers/amazon/aws/example_eks_templated.py 
b/tests/system/providers/amazon/aws/example_eks_templated.py
index e51783b53e..7ad8bc77c2 100644
--- a/tests/system/providers/amazon/aws/example_eks_templated.py
+++ b/tests/system/providers/amazon/aws/example_eks_templated.py
@@ -29,9 +29,6 @@ from airflow.providers.amazon.aws.operators.eks import (
     EksPodOperator,
 )
 from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, 
EksNodegroupStateSensor
-
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 from tests.system.providers.amazon.aws.utils import SystemTestContextBuilder
 
 sys_test_context_task = SystemTestContextBuilder().build()
diff --git 
a/tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py 
b/tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py
index 9e19c7594f..9cce50b9ba 100644
--- a/tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py
+++ b/tests/system/providers/amazon/aws/example_eks_with_fargate_profile.py
@@ -29,9 +29,6 @@ from airflow.providers.amazon.aws.operators.eks import (
     EksPodOperator,
 )
 from airflow.providers.amazon.aws.sensors.eks import EksClusterStateSensor, 
EksFargateProfileStateSensor
-
-# Ignore missing args provided by default_args
-# type: ignore[call-arg]
 from airflow.utils.trigger_rule import TriggerRule
 from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, 
SystemTestContextBuilder
 from tests.system.providers.amazon.aws.utils.k8s import 
get_describe_pod_operator
diff --git a/tests/system/providers/amazon/aws/example_eks_with_nodegroups.py 
b/tests/system/providers/amazon/aws/example_eks_with_nodegroups.py
index a0d7167290..78a72b68d1 100644
--- a/tests/system/providers/amazon/aws/example_eks_with_nodegroups.py
+++ b/tests/system/providers/amazon/aws/example_eks_with_nodegroups.py
@@ -36,9 +36,6 @@ from airflow.utils.trigger_rule import TriggerRule
 from tests.system.providers.amazon.aws.utils import ENV_ID_KEY, 
SystemTestContextBuilder
 from tests.system.providers.amazon.aws.utils.k8s import 
get_describe_pod_operator
 
-# Ignore missing args provided by default_args
-# type: ignore[call-arg]
-
 DAG_ID = "example_eks_with_nodegroups"
 
 # Externally fetched variables:
diff --git a/tests/system/providers/amazon/aws/example_emr.py 
b/tests/system/providers/amazon/aws/example_emr.py
index b24f21dda7..32f10f6f1b 100644
--- a/tests/system/providers/amazon/aws/example_emr.py
+++ b/tests/system/providers/amazon/aws/example_emr.py
@@ -20,6 +20,7 @@ from __future__ import annotations
 
 import json
 from datetime import datetime
+from typing import Any
 
 import boto3
 
@@ -68,7 +69,7 @@ SPARK_STEPS = [
     }
 ]
 
-JOB_FLOW_OVERRIDES = {
+JOB_FLOW_OVERRIDES: dict[str, Any] = {
     "Name": "PiCalc",
     "ReleaseLabel": "emr-6.7.0",
     "Applications": [{"Name": "Spark"}],
diff --git a/tests/system/providers/amazon/aws/example_sagemaker_pipeline.py 
b/tests/system/providers/amazon/aws/example_sagemaker_pipeline.py
index 07eac5bae5..48bf92fa7f 100644
--- a/tests/system/providers/amazon/aws/example_sagemaker_pipeline.py
+++ b/tests/system/providers/amazon/aws/example_sagemaker_pipeline.py
@@ -104,7 +104,7 @@ with DAG(
     chain(
         # TEST SETUP
         test_context,
-        create_pipeline,
+        create_pipeline,  # type: ignore[arg-type]
         # TEST BODY
         start_pipeline1,
         start_pipeline2,
diff --git a/tests/system/providers/apache/cassandra/example_cassandra_dag.py 
b/tests/system/providers/apache/cassandra/example_cassandra_dag.py
index 757d0f7e88..1a305c5014 100644
--- a/tests/system/providers/apache/cassandra/example_cassandra_dag.py
+++ b/tests/system/providers/apache/cassandra/example_cassandra_dag.py
@@ -29,11 +29,12 @@ from airflow.providers.apache.cassandra.sensors.record 
import CassandraRecordSen
 from airflow.providers.apache.cassandra.sensors.table import 
CassandraTableSensor
 
 # Ignore missing args provided by default_args
-# type: ignore[call-arg]
+# mypy: disable-error-code="call-arg"
 
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
 DAG_ID = "example_cassandra_operator"
+
 # [START howto_operator_cassandra_sensors]
 with DAG(
     dag_id=DAG_ID,
diff --git a/tests/system/providers/apache/spark/example_pyspark.py 
b/tests/system/providers/apache/spark/example_pyspark.py
index c671cb40e3..cc18911a38 100644
--- a/tests/system/providers/apache/spark/example_pyspark.py
+++ b/tests/system/providers/apache/spark/example_pyspark.py
@@ -67,7 +67,8 @@ def example_pyspark():
 
 
 # work around pre-commit
-dag = example_pyspark()
+dag = example_pyspark()  # type: ignore
+
 
 from tests.system.utils import get_test_run  # noqa: E402
 
diff --git a/tests/system/providers/asana/example_asana.py 
b/tests/system/providers/asana/example_asana.py
index 9f0e6801c1..4a528ffcf2 100644
--- a/tests/system/providers/asana/example_asana.py
+++ b/tests/system/providers/asana/example_asana.py
@@ -31,7 +31,7 @@ from airflow.providers.asana.operators.asana_tasks import (
 )
 
 # Ignore missing args provided by default_args
-# type: ignore[call-arg]
+# mypy: disable-error-code="call-arg"
 
 
 ASANA_TASK_TO_UPDATE = os.environ.get("ASANA_TASK_TO_UPDATE", "update_task")
@@ -41,7 +41,7 @@ ASANA_TASK_TO_DELETE = os.environ.get("ASANA_TASK_TO_DELETE", 
"delete_task")
 # project ID in the AsanaFindTaskOperator example below
 ASANA_PROJECT_ID_OVERRIDE = os.environ.get("ASANA_PROJECT_ID_OVERRIDE", 
"test_project")
 # This connection should specify a personal access token and a default project 
ID
-CONN_ID = os.environ.get("ASANA_CONNECTION_ID")
+CONN_ID = os.environ.get("ASANA_CONNECTION_ID", "asana_default")
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
 DAG_ID = "example_asana"
 
diff --git 
a/tests/system/providers/google/cloud/automl/example_automl_translation.py 
b/tests/system/providers/google/cloud/automl/example_automl_translation.py
index 2814da0cfe..d3e55a64bd 100644
--- a/tests/system/providers/google/cloud/automl/example_automl_translation.py
+++ b/tests/system/providers/google/cloud/automl/example_automl_translation.py
@@ -24,7 +24,8 @@ import os
 from datetime import datetime
 from typing import cast
 
-from google.cloud import storage
+# The storage module cannot be imported yet 
https://github.com/googleapis/python-storage/issues/393
+from google.cloud import storage  # type: ignore[attr-defined]
 
 from airflow.decorators import task
 from airflow.models.dag import DAG
diff --git 
a/tests/system/providers/google/cloud/automl/example_automl_vision_classification.py
 
b/tests/system/providers/google/cloud/automl/example_automl_vision_classification.py
index 9ff541893d..39a93f254b 100644
--- 
a/tests/system/providers/google/cloud/automl/example_automl_vision_classification.py
+++ 
b/tests/system/providers/google/cloud/automl/example_automl_vision_classification.py
@@ -24,7 +24,7 @@ import os
 from datetime import datetime
 from typing import cast
 
-from google.cloud import storage
+from google.cloud import storage  # type: ignore[attr-defined]
 
 from airflow.decorators import task
 from airflow.models.dag import DAG
diff --git 
a/tests/system/providers/google/cloud/azure/example_azure_blob_to_gcs.py 
b/tests/system/providers/google/cloud/azure/example_azure_blob_to_gcs.py
index 0e07256fdb..265b9e0a4a 100644
--- a/tests/system/providers/google/cloud/azure/example_azure_blob_to_gcs.py
+++ b/tests/system/providers/google/cloud/azure/example_azure_blob_to_gcs.py
@@ -26,10 +26,6 @@ from airflow.providers.microsoft.azure.sensors.wasb import (
     WasbBlobSensor,
 )
 
-# Ignore missing args provided by default_args
-# type: ignore[call-arg]
-
-
 BLOB_NAME = os.environ.get("AZURE_BLOB_NAME", "file.txt")
 AZURE_CONTAINER_NAME = os.environ.get("AZURE_CONTAINER_NAME", "airflow")
 GCP_BUCKET_FILE_PATH = os.environ.get("GCP_BUCKET_FILE_PATH", "file.txt")
diff --git a/tests/system/providers/google/cloud/bigtable/example_bigtable.py 
b/tests/system/providers/google/cloud/bigtable/example_bigtable.py
index 16b63ee2a7..af06226637 100644
--- a/tests/system/providers/google/cloud/bigtable/example_bigtable.py
+++ b/tests/system/providers/google/cloud/bigtable/example_bigtable.py
@@ -46,6 +46,8 @@ from __future__ import annotations
 import os
 from datetime import datetime
 
+from google.cloud.bigtable import enums
+
 from airflow.decorators import task_group
 from airflow.models.dag import DAG
 from airflow.providers.google.cloud.operators.bigtable import (
@@ -67,15 +69,15 @@ DAG_ID = "bigtable"
 CBT_INSTANCE_ID = f"bigtable-instance-id-{ENV_ID}"
 CBT_INSTANCE_DISPLAY_NAME = "Instance-name"
 CBT_INSTANCE_DISPLAY_NAME_UPDATED = f"{CBT_INSTANCE_DISPLAY_NAME} - updated"
-CBT_INSTANCE_TYPE = 2
+CBT_INSTANCE_TYPE = enums.Instance.Type.DEVELOPMENT
 CBT_INSTANCE_TYPE_PROD = 1
-CBT_INSTANCE_LABELS = {}
+CBT_INSTANCE_LABELS: dict[str, str] = {}
 CBT_INSTANCE_LABELS_UPDATED = {"env": "prod"}
 CBT_CLUSTER_ID = f"bigtable-cluster-id-{ENV_ID}"
 CBT_CLUSTER_ZONE = "europe-west1-b"
 CBT_CLUSTER_NODES = 3
 CBT_CLUSTER_NODES_UPDATED = 5
-CBT_CLUSTER_STORAGE_TYPE = 2
+CBT_CLUSTER_STORAGE_TYPE = enums.StorageType.HDD
 CBT_TABLE_ID = f"bigtable-table-id{ENV_ID}"
 CBT_POKE_INTERVAL = 60
 
@@ -94,10 +96,10 @@ with DAG(
         main_cluster_id=CBT_CLUSTER_ID,
         main_cluster_zone=CBT_CLUSTER_ZONE,
         instance_display_name=CBT_INSTANCE_DISPLAY_NAME,
-        instance_type=CBT_INSTANCE_TYPE,
+        instance_type=CBT_INSTANCE_TYPE,  # type: ignore[arg-type]
         instance_labels=CBT_INSTANCE_LABELS,
         cluster_nodes=None,
-        cluster_storage_type=CBT_CLUSTER_STORAGE_TYPE,
+        cluster_storage_type=CBT_CLUSTER_STORAGE_TYPE,  # type: 
ignore[arg-type]
         task_id="create_instance_task",
     )
     create_instance_task2 = BigtableCreateInstanceOperator(
@@ -105,10 +107,10 @@ with DAG(
         main_cluster_id=CBT_CLUSTER_ID,
         main_cluster_zone=CBT_CLUSTER_ZONE,
         instance_display_name=CBT_INSTANCE_DISPLAY_NAME,
-        instance_type=CBT_INSTANCE_TYPE,
+        instance_type=CBT_INSTANCE_TYPE,  # type: ignore[arg-type]
         instance_labels=CBT_INSTANCE_LABELS,
         cluster_nodes=CBT_CLUSTER_NODES,
-        cluster_storage_type=CBT_CLUSTER_STORAGE_TYPE,
+        cluster_storage_type=CBT_CLUSTER_STORAGE_TYPE,  # type: 
ignore[arg-type]
         task_id="create_instance_task2",
     )
     # [END howto_operator_gcp_bigtable_instance_create]
diff --git 
a/tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py 
b/tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py
index 3241070b79..362b0cc1ba 100644
--- a/tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py
+++ b/tests/system/providers/google/cloud/cloud_batch/example_cloud_batch.py
@@ -35,7 +35,7 @@ from airflow.providers.google.cloud.operators.cloud_batch 
import (
 )
 from airflow.utils.trigger_rule import TriggerRule
 
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
 DAG_ID = "example_cloud_batch"
 
 region = "us-central1"
diff --git a/tests/system/providers/google/cloud/cloud_run/example_cloud_run.py 
b/tests/system/providers/google/cloud/cloud_run/example_cloud_run.py
index 08c82d6eb0..4230e69cf7 100644
--- a/tests/system/providers/google/cloud/cloud_run/example_cloud_run.py
+++ b/tests/system/providers/google/cloud/cloud_run/example_cloud_run.py
@@ -37,7 +37,7 @@ from airflow.providers.google.cloud.operators.cloud_run 
import (
 )
 from airflow.utils.trigger_rule import TriggerRule
 
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
 DAG_ID = "example_cloud_run"
 
 region = "us-central1"
diff --git 
a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_mysql.py
 
b/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_mysql.py
index f705a93eac..f66242dd2a 100644
--- 
a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_mysql.py
+++ 
b/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_mysql.py
@@ -25,7 +25,6 @@ import os
 from collections import namedtuple
 from copy import deepcopy
 from datetime import datetime
-from typing import TYPE_CHECKING
 
 from googleapiclient import discovery
 
@@ -42,8 +41,8 @@ from airflow.providers.google.cloud.operators.cloud_sql 
import (
 )
 from airflow.utils.trigger_rule import TriggerRule
 
-if TYPE_CHECKING:
-    from airflow.settings import Session
+# mypy: disable-error-code="call-overload"
+
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
 PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
@@ -203,10 +202,11 @@ with DAG(
             for ip_item in response.get("ipAddresses", []):
                 if ip_item["type"] == "PRIMARY":
                     return ip_item["ipAddress"]
+            return None
 
     @task
     def create_connection(connection_id: str, connection_kwargs: dict, 
use_public_ip: bool, **kwargs) -> None:
-        session: Session = settings.Session()
+        session = settings.Session()
         if session.query(Connection).filter(Connection.conn_id == 
connection_id).first():
             log.warning("Connection '%s' already exists", connection_id)
             return None
diff --git 
a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_postgres.py
 
b/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_postgres.py
index b826302bc2..fae820ed59 100644
--- 
a/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_postgres.py
+++ 
b/tests/system/providers/google/cloud/cloud_sql/example_cloud_sql_query_postgres.py
@@ -25,7 +25,6 @@ import os
 from collections import namedtuple
 from copy import deepcopy
 from datetime import datetime
-from typing import TYPE_CHECKING
 
 from googleapiclient import discovery
 
@@ -42,9 +41,6 @@ from airflow.providers.google.cloud.operators.cloud_sql 
import (
 )
 from airflow.utils.trigger_rule import TriggerRule
 
-if TYPE_CHECKING:
-    from airflow.settings import Session
-
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
 PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
 DAG_ID = "cloudsql-query-pg"
@@ -208,10 +204,11 @@ with DAG(
             for ip_item in response.get("ipAddresses", []):
                 if ip_item["type"] == "PRIMARY":
                     return ip_item["ipAddress"]
+            return None
 
     @task
     def create_connection(connection_id: str, connection_kwargs: dict, 
use_public_ip: bool, **kwargs) -> None:
-        session: Session = settings.Session()
+        session = settings.Session()
         if session.query(Connection).filter(Connection.conn_id == 
connection_id).first():
             log.warning("Connection '%s' already exists", connection_id)
             return None
diff --git 
a/tests/system/providers/google/cloud/composer/example_cloud_composer.py 
b/tests/system/providers/google/cloud/composer/example_cloud_composer.py
index e2dd19179b..ab9c6a0e1b 100644
--- a/tests/system/providers/google/cloud/composer/example_cloud_composer.py
+++ b/tests/system/providers/google/cloud/composer/example_cloud_composer.py
@@ -33,7 +33,7 @@ from airflow.providers.google.cloud.operators.cloud_composer 
import (
 from airflow.utils.trigger_rule import TriggerRule
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
 
 DAG_ID = "example_composer"
 
diff --git 
a/tests/system/providers/google/cloud/composer/example_cloud_composer_deferrable.py
 
b/tests/system/providers/google/cloud/composer/example_cloud_composer_deferrable.py
index 7a2ebc43c5..1a69e12d7b 100644
--- 
a/tests/system/providers/google/cloud/composer/example_cloud_composer_deferrable.py
+++ 
b/tests/system/providers/google/cloud/composer/example_cloud_composer_deferrable.py
@@ -31,7 +31,7 @@ from airflow.providers.google.cloud.sensors.cloud_composer 
import CloudComposerE
 from airflow.utils.trigger_rule import TriggerRule
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
 
 DAG_ID = "example_composer_deferrable"
 
diff --git 
a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py
 
b/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py
index ae8f7f6646..8bced99ff8 100644
--- 
a/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py
+++ 
b/tests/system/providers/google/cloud/data_loss_prevention/example_dlp_info_types.py
@@ -26,6 +26,7 @@ import os
 from datetime import datetime
 from pathlib import Path
 
+from google.cloud.dlp_v2 import StoredInfoTypeConfig
 from google.cloud.dlp_v2.types import ContentItem, InspectConfig, 
InspectTemplate
 
 from airflow.models.dag import DAG
@@ -64,16 +65,18 @@ OBJECT_GCS_URI = f"gs://{BUCKET_NAME}/{FILE_SET}"
 OBJECT_GCS_OUTPUT_URI = OBJECT_GCS_URI + FILE_NAME
 
 CUSTOM_INFO_TYPE_ID = "custom_info_type"
-CUSTOM_INFO_TYPES = {
-    "large_custom_dictionary": {
-        "output_path": {"path": OBJECT_GCS_OUTPUT_URI},
-        "cloud_storage_file_set": {"url": OBJECT_GCS_URI + "*"},
+CUSTOM_INFO_TYPES = StoredInfoTypeConfig(
+    {
+        "large_custom_dictionary": {
+            "output_path": {"path": OBJECT_GCS_OUTPUT_URI},
+            "cloud_storage_file_set": {"url": f"{OBJECT_GCS_URI}*"},
+        }
     }
-}
+)
 UPDATE_CUSTOM_INFO_TYPE = {
     "large_custom_dictionary": {
         "output_path": {"path": OBJECT_GCS_OUTPUT_URI},
-        "cloud_storage_file_set": {"url": OBJECT_GCS_URI + "*"},
+        "cloud_storage_file_set": {"url": f"{OBJECT_GCS_URI}*"},
     }
 }
 
diff --git 
a/tests/system/providers/google/cloud/datafusion/example_datafusion.py 
b/tests/system/providers/google/cloud/datafusion/example_datafusion.py
index 71eb4aba6d..38f443a706 100644
--- a/tests/system/providers/google/cloud/datafusion/example_datafusion.py
+++ b/tests/system/providers/google/cloud/datafusion/example_datafusion.py
@@ -159,7 +159,10 @@ PIPELINE = {
 }
 # [END howto_data_fusion_env_variables]
 
-CloudDataFusionCreatePipelineOperator.template_fields += ("pipeline",)
+CloudDataFusionCreatePipelineOperator.template_fields = (
+    *CloudDataFusionCreatePipelineOperator.template_fields,
+    "pipeline",
+)
 
 
 with DAG(
@@ -316,7 +319,7 @@ with DAG(
         # TEST BODY
         >> create_instance
         >> get_instance
-        >> get_artifacts_versions()
+        >> get_artifacts_versions()  # type: ignore[call-arg]
         >> restart_instance
         >> update_instance
         >> create_pipeline
diff --git a/tests/system/providers/google/cloud/dataplex/example_dataplex.py 
b/tests/system/providers/google/cloud/dataplex/example_dataplex.py
index c1166ab658..9edf4e75f4 100644
--- a/tests/system/providers/google/cloud/dataplex/example_dataplex.py
+++ b/tests/system/providers/google/cloud/dataplex/example_dataplex.py
@@ -41,7 +41,7 @@ from airflow.providers.google.cloud.sensors.dataplex import 
DataplexTaskStateSen
 from airflow.utils.trigger_rule import TriggerRule
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "project_id")
 
 DAG_ID = "example_dataplex"
 
diff --git 
a/tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py 
b/tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
index df503d531e..b92a41128b 100644
--- a/tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
+++ b/tests/system/providers/google/cloud/dataplex/example_dataplex_dq.py
@@ -23,6 +23,7 @@ import os
 from datetime import datetime
 
 from google.cloud import dataplex_v1
+from google.cloud.dataplex_v1 import DataQualitySpec
 from google.protobuf.field_mask_pb2 import FieldMask
 
 from airflow.models.baseoperator import chain
@@ -50,7 +51,7 @@ from airflow.providers.google.cloud.sensors.dataplex import 
DataplexDataQualityJ
 from airflow.utils.trigger_rule import TriggerRule
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
 
 DAG_ID = "example_dataplex_data_quality"
 
@@ -109,18 +110,20 @@ EXAMPLE_DATA_SCAN.data.entity = (
 EXAMPLE_DATA_SCAN.data.resource = (
     
f"//bigquery.googleapis.com/projects/{PROJECT_ID}/datasets/{DATASET}/tables/{TABLE_1}"
 )
-EXAMPLE_DATA_SCAN.data_quality_spec = {
-    "rules": [
-        {
-            "range_expectation": {
-                "min_value": "0",
-                "max_value": "10000",
-            },
-            "column": "value",
-            "dimension": "VALIDITY",
-        }
-    ],
-}
+EXAMPLE_DATA_SCAN.data_quality_spec = DataQualitySpec(
+    {
+        "rules": [
+            {
+                "range_expectation": {
+                    "min_value": "0",
+                    "max_value": "10000",
+                },
+                "column": "value",
+                "dimension": "VALIDITY",
+            }
+        ],
+    }
+)
 # [END howto_dataplex_data_quality_configuration]
 UPDATE_MASK = FieldMask(paths=["data_quality_spec"])
 ENTITY = 
f"projects/{PROJECT_ID}/locations/{REGION}/lakes/{LAKE_ID}/zones/{ZONE_ID}/entities/{TABLE_1}"
diff --git a/tests/system/providers/google/cloud/dataprep/example_dataprep.py 
b/tests/system/providers/google/cloud/dataprep/example_dataprep.py
index 126e3f4c9b..1ac9d98082 100644
--- a/tests/system/providers/google/cloud/dataprep/example_dataprep.py
+++ b/tests/system/providers/google/cloud/dataprep/example_dataprep.py
@@ -100,7 +100,7 @@ with models.DAG(
             conn_type="dataprep",
             extra={"token": DATAPREP_TOKEN},
         )
-        session: Session = Session()
+        session = Session()
         if session.query(Connection).filter(Connection.conn_id == 
CONNECTION_ID).first():
             log.warning("Connection %s already exists", CONNECTION_ID)
             return None
diff --git 
a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py
 
b/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py
index ed96efa4b0..5c639e5ede 100644
--- 
a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py
+++ 
b/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py
@@ -34,7 +34,7 @@ from airflow.utils.trigger_rule import TriggerRule
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
 DAG_ID = "dataproc_batch_ps"
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
 BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
 REGION = "europe-west1"
 CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
diff --git 
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py
 
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py
index 9761326c4c..8f4ddb6a10 100644
--- 
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py
+++ 
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py
@@ -37,7 +37,7 @@ from airflow.utils.trigger_rule import TriggerRule
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
 DAG_ID = "dataproc_cluster_generation"
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
 
 BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
 CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
diff --git a/tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py 
b/tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py
index 37e2f0265d..dfd904a1b9 100644
--- a/tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py
+++ b/tests/system/providers/google/cloud/gcs/example_calendar_to_gcs.py
@@ -66,7 +66,7 @@ with DAG(
         conn_extra_json = json.dumps(conn_extra)
         conn.set_extra(conn_extra_json)
 
-        session: Session = Session()
+        session = Session()
         session.add(conn)
         session.commit()
 
diff --git a/tests/system/providers/google/cloud/gcs/example_firestore.py 
b/tests/system/providers/google/cloud/gcs/example_firestore.py
index acc67d80ed..b3eaac6c23 100644
--- a/tests/system/providers/google/cloud/gcs/example_firestore.py
+++ b/tests/system/providers/google/cloud/gcs/example_firestore.py
@@ -43,7 +43,7 @@ from airflow.providers.google.firebase.operators.firestore 
import CloudFirestore
 from airflow.utils.trigger_rule import TriggerRule
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
 DAG_ID = "example_firestore_to_gcp"
 
 BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py 
b/tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py
index 7b4baba166..8aef24ea91 100644
--- a/tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py
+++ b/tests/system/providers/google/cloud/gcs/example_gcs_to_gdrive.py
@@ -81,7 +81,7 @@ with DAG(
         )
         conn.set_extra(conn_extra_json)
 
-        session: Session = Session()
+        session = Session()
         if session.query(Connection).filter(Connection.conn_id == 
CONNECTION_ID).first():
             log.warning("Connection %s already exists", CONNECTION_ID)
             return None
diff --git a/tests/system/providers/google/cloud/gcs/example_gcs_to_sheets.py 
b/tests/system/providers/google/cloud/gcs/example_gcs_to_sheets.py
index 116b3792b6..e728eb668f 100644
--- a/tests/system/providers/google/cloud/gcs/example_gcs_to_sheets.py
+++ b/tests/system/providers/google/cloud/gcs/example_gcs_to_sheets.py
@@ -69,7 +69,7 @@ with DAG(
         conn_extra_json = json.dumps(conn_extra)
         conn.set_extra(conn_extra_json)
 
-        session: Session = Session()
+        session = Session()
         session.add(conn)
         session.commit()
 
diff --git a/tests/system/providers/google/cloud/gcs/example_gdrive_to_gcs.py 
b/tests/system/providers/google/cloud/gcs/example_gdrive_to_gcs.py
index 883cb33054..65cbcf18a2 100644
--- a/tests/system/providers/google/cloud/gcs/example_gdrive_to_gcs.py
+++ b/tests/system/providers/google/cloud/gcs/example_gdrive_to_gcs.py
@@ -75,7 +75,7 @@ with DAG(
         )
         conn.set_extra(conn_extra_json)
 
-        session: Session = Session()
+        session = Session()
         if session.query(Connection).filter(Connection.conn_id == 
CONNECTION_ID).first():
             log.warning("Connection %s already exists", CONNECTION_ID)
             return None
diff --git a/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py 
b/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py
index 6d88dad384..94a77199cd 100644
--- a/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py
+++ b/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py
@@ -199,7 +199,7 @@ with DAG(
             password=DB_USER_PASSWORD,
             schema=DB_NAME,
         )
-        session: Session = Session()
+        session = Session()
         if session.query(Connection).filter(Connection.conn_id == 
CONNECTION_ID).first():
             log.warning("Connection %s already exists", CONNECTION_ID)
             return None
diff --git a/tests/system/providers/google/cloud/gcs/example_sheets.py 
b/tests/system/providers/google/cloud/gcs/example_sheets.py
index 7079c47680..37b3441263 100644
--- a/tests/system/providers/google/cloud/gcs/example_sheets.py
+++ b/tests/system/providers/google/cloud/gcs/example_sheets.py
@@ -70,7 +70,7 @@ with DAG(
         conn_extra_json = json.dumps(conn_extra)
         conn.set_extra(conn_extra_json)
 
-        session: Session = Session()
+        session = Session()
         session.add(conn)
         session.commit()
 
diff --git a/tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py 
b/tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py
index 3564d2c1e0..bb6264d2b5 100644
--- a/tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py
+++ b/tests/system/providers/google/cloud/gcs/example_sheets_to_gcs.py
@@ -68,7 +68,7 @@ with DAG(
         conn_extra_json = json.dumps(conn_extra)
         conn.set_extra(conn_extra_json)
 
-        session: Session = Session()
+        session = Session()
         session.add(conn)
         session.commit()
 
diff --git a/tests/system/providers/google/cloud/pubsub/example_pubsub.py 
b/tests/system/providers/google/cloud/pubsub/example_pubsub.py
index 167cbf04ac..3cb94f6d38 100644
--- a/tests/system/providers/google/cloud/pubsub/example_pubsub.py
+++ b/tests/system/providers/google/cloud/pubsub/example_pubsub.py
@@ -37,7 +37,7 @@ from airflow.providers.google.cloud.sensors.pubsub import 
PubSubPullSensor
 from airflow.utils.trigger_rule import TriggerRule
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
 
 DAG_ID = "pubsub"
 
diff --git 
a/tests/system/providers/google/cloud/pubsub/example_pubsub_deferrable.py 
b/tests/system/providers/google/cloud/pubsub/example_pubsub_deferrable.py
index f40b03b698..fb9dace2db 100644
--- a/tests/system/providers/google/cloud/pubsub/example_pubsub_deferrable.py
+++ b/tests/system/providers/google/cloud/pubsub/example_pubsub_deferrable.py
@@ -35,7 +35,7 @@ from airflow.providers.google.cloud.sensors.pubsub import 
PubSubPullSensor
 from airflow.utils.trigger_rule import TriggerRule
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
 
 DAG_ID = "pubsub_async"
 
diff --git 
a/tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py 
b/tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py
index 5afc86e64c..76ec51881a 100644
--- 
a/tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py
+++ 
b/tests/system/providers/google/cloud/speech_to_text/example_speech_to_text.py
@@ -20,6 +20,8 @@ from __future__ import annotations
 import os
 from datetime import datetime
 
+from google.cloud.speech_v1 import RecognitionAudio, RecognitionConfig
+
 from airflow.models.dag import DAG
 from airflow.providers.google.cloud.operators.gcs import 
GCSCreateBucketOperator, GCSDeleteBucketOperator
 from airflow.providers.google.cloud.operators.speech_to_text import 
CloudSpeechToTextRecognizeSpeechOperator
@@ -43,8 +45,8 @@ AUDIO_CONFIG = {"audio_encoding": "LINEAR16"}
 # [END howto_operator_text_to_speech_api_arguments]
 
 # [START howto_operator_speech_to_text_api_arguments]
-CONFIG = {"encoding": "LINEAR16", "language_code": "en_US"}
-AUDIO = {"uri": f"gs://{BUCKET_NAME}/{FILE_NAME}"}
+CONFIG = RecognitionConfig({"encoding": "LINEAR16", "language_code": "en_US"})
+AUDIO = RecognitionAudio({"uri": f"gs://{BUCKET_NAME}/{FILE_NAME}"})
 # [END howto_operator_speech_to_text_api_arguments]
 
 with DAG(
diff --git 
a/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py 
b/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py
index 6d25c506a9..f166dadfaf 100644
--- a/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py
+++ b/tests/system/providers/google/cloud/sql_to_sheets/example_sql_to_sheets.py
@@ -228,7 +228,7 @@ with DAG(
             schema=DB_NAME,
             port=DB_PORT,
         )
-        session: Session = Session()
+        session = Session()
         if session.query(Connection).filter(Connection.conn_id == 
DB_CONNECTION_ID).first():
             log.warning("Connection %s already exists", DB_CONNECTION_ID)
             return None
@@ -252,7 +252,7 @@ with DAG(
         conn_extra_json = json.dumps(conn_extra)
         conn.set_extra(conn_extra_json)
 
-        session: Session = Session()
+        session = Session()
         session.add(conn)
         session.commit()
 
diff --git 
a/tests/system/providers/google/cloud/transfers/example_gdrive_to_local.py 
b/tests/system/providers/google/cloud/transfers/example_gdrive_to_local.py
index a989af9cfd..12b491da52 100644
--- a/tests/system/providers/google/cloud/transfers/example_gdrive_to_local.py
+++ b/tests/system/providers/google/cloud/transfers/example_gdrive_to_local.py
@@ -78,7 +78,7 @@ with DAG(
         )
         conn.set_extra(conn_extra_json)
 
-        session: Session = Session()
+        session = Session()
         if session.query(Connection).filter(Connection.conn_id == 
CONNECTION_ID).first():
             log.warning("Connection %s already exists", CONNECTION_ID)
             return None
diff --git 
a/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py 
b/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py
index a89be7e131..33e70c51ff 100644
--- a/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py
+++ b/tests/system/providers/google/cloud/transfers/example_postgres_to_gcs.py
@@ -197,7 +197,7 @@ with DAG(
             schema=DB_NAME,
             port=DB_PORT,
         )
-        session: Session = Session()
+        session = Session()
         if session.query(Connection).filter(Connection.conn_id == 
CONNECTION_ID).first():
             log.warning("Connection %s already exists", CONNECTION_ID)
             return None
diff --git 
a/tests/system/providers/google/cloud/translate_speech/example_translate_speech.py
 
b/tests/system/providers/google/cloud/translate_speech/example_translate_speech.py
index ba58833079..ff85ddc841 100644
--- 
a/tests/system/providers/google/cloud/translate_speech/example_translate_speech.py
+++ 
b/tests/system/providers/google/cloud/translate_speech/example_translate_speech.py
@@ -20,6 +20,8 @@ from __future__ import annotations
 import os
 from datetime import datetime
 
+from google.cloud.speech_v1 import RecognitionAudio, RecognitionConfig
+
 from airflow.models.dag import DAG
 from airflow.providers.google.cloud.operators.gcs import 
GCSCreateBucketOperator, GCSDeleteBucketOperator
 from airflow.providers.google.cloud.operators.text_to_speech import 
CloudTextToSpeechSynthesizeOperator
@@ -27,7 +29,7 @@ from 
airflow.providers.google.cloud.operators.translate_speech import CloudTrans
 from airflow.utils.trigger_rule import TriggerRule
 
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
 
 DAG_ID = "example_gcp_translate_speech"
 
@@ -44,8 +46,8 @@ AUDIO_CONFIG = {"audio_encoding": "LINEAR16"}
 # [END howto_operator_text_to_speech_api_arguments]
 
 # [START howto_operator_translate_speech_arguments]
-CONFIG = {"encoding": "LINEAR16", "language_code": "en_US"}
-AUDIO = {"uri": f"gs://{BUCKET_NAME}/{FILE_NAME}"}
+CONFIG = RecognitionConfig({"encoding": "LINEAR16", "language_code": "en_US"})
+AUDIO = RecognitionAudio({"uri": f"gs://{BUCKET_NAME}/{FILE_NAME}"})
 TARGET_LANGUAGE = "pl"
 FORMAT = "text"
 MODEL = "base"
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py
 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py
index a68da5a069..de75840bf5 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_forecasting_training.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Auto ML operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py
 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py
index 9ec4049479..e204cf8f06 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_image_training.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Auto ML operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py
 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py
index 306c44edaa..f8cfc87324 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_list_training.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Auto ML operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py
 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py
index 633a357a29..2a967cba29 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_tabular_training.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Auto ML operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_text_training.py
 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_text_training.py
index 64cd2a54ea..e5336b6943 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_text_training.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_text_training.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Auto ML operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py
 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py
index ec83aa9fae..9ffe674045 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_auto_ml_video_training.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Auto ML operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py
 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py
index 1b2c3e8733..6bf932dafb 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_batch_prediction_job.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Batch Prediction 
operations.
@@ -91,7 +89,7 @@ COLUMN_SPECS = {
 
 BIGQUERY_SOURCE = 
f"bq://{PROJECT_ID}.test_iowa_liquor_sales_forecasting_us.2021_sales_predict"
 GCS_DESTINATION_PREFIX = f"gs://{DATA_SAMPLE_GCS_BUCKET_NAME}/output"
-MODEL_PARAMETERS = ParseDict({}, Value())
+MODEL_PARAMETERS: dict[str, str] = {}
 
 
 with DAG(
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py
 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py
index 50f62fbf3c..9e1459c072 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_container.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Custom Jobs 
operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py
index 6fa52e873a..7d19a26604 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Custom Jobs 
operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py
 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py
index e0e677c16a..e113377113 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_custom_job_python_package.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Custom Jobs 
operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py
index 71717bcf03..21b369f7c3 100644
--- a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py
+++ b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_dataset.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Dataset operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py
index b3a471776a..1e2c348ee4 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_endpoint.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Endpoint Service 
operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py
 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py
index 9430eaa0b8..52baa494b2 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_hyperparameter_tuning_job.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Hyperparameter Tuning 
Job operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py
 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py
index ab093392e3..7fab55ca0d 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_list_custom_jobs.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Custom Jobs 
operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py
 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py
index e52d42edaa..5ddd6b6c29 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_model_service.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Model Service 
operations.
diff --git 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py
 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py
index 16bc8c0216..2175d0b33e 100644
--- 
a/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py
+++ 
b/tests/system/providers/google/cloud/vertex_ai/example_vertex_ai_pipeline_job.py
@@ -16,8 +16,6 @@
 # specific language governing permissions and limitations
 # under the License.
 
-# mypy ignore arg types (for templated fields)
-# type: ignore[arg-type]
 
 """
 Example Airflow DAG for Google Vertex AI service testing Pipeline Job 
operations.
diff --git a/tests/system/providers/google/suite/example_local_to_drive.py 
b/tests/system/providers/google/suite/example_local_to_drive.py
index 2738fd5607..7a94fdfa64 100644
--- a/tests/system/providers/google/suite/example_local_to_drive.py
+++ b/tests/system/providers/google/suite/example_local_to_drive.py
@@ -72,7 +72,7 @@ with DAG(
         conn_extra_json = json.dumps({"scope": 
"https://www.googleapis.com/auth/drive"})
         conn.set_extra(conn_extra_json)
 
-        session: Session = Session()
+        session = Session()
         if session.query(Connection).filter(Connection.conn_id == 
CONNECTION_ID).first():
             log.warning("Connection %s already exists", CONNECTION_ID)
             return None
diff --git a/tests/system/providers/http/example_http.py 
b/tests/system/providers/http/example_http.py
index 9e88614b98..3cd52edf84 100644
--- a/tests/system/providers/http/example_http.py
+++ b/tests/system/providers/http/example_http.py
@@ -122,6 +122,7 @@ def get_next_page_cursor(response) -> dict | None:
     next_cursor = response.json().get("cursor")
     if next_cursor:
         return dict(data={"cursor": next_cursor})
+    return None
 
 
 task_get_paginated = HttpOperator(
diff --git a/tests/system/providers/microsoft/azure/example_adf_run_pipeline.py 
b/tests/system/providers/microsoft/azure/example_adf_run_pipeline.py
index ee1b7d92a7..a90f8c261b 100644
--- a/tests/system/providers/microsoft/azure/example_adf_run_pipeline.py
+++ b/tests/system/providers/microsoft/azure/example_adf_run_pipeline.py
@@ -23,6 +23,9 @@ from typing import cast
 from airflow.models import DAG
 from airflow.models.xcom_arg import XComArg
 
+# Ignore missing args provided by default_args
+# mypy: disable-error-code="call-arg"
+
 try:
     from airflow.operators.empty import EmptyOperator
 except ModuleNotFoundError:
@@ -73,7 +76,7 @@ with DAG(
     )
 
     # Performs polling on the Airflow Triggerer thus freeing up resources on 
Airflow Worker
-    pipeline_run_sensor = AzureDataFactoryPipelineRunStatusSensor(
+    pipeline_run_sensor_deferred = AzureDataFactoryPipelineRunStatusSensor(
         task_id="pipeline_run_sensor_defered",
         run_id=cast(str, XComArg(run_pipeline2, key="run_id")),
         deferrable=True,
@@ -88,15 +91,24 @@ with DAG(
 
     # [START howto_operator_adf_run_pipeline_with_deferrable_flag]
     run_pipeline3 = AzureDataFactoryRunPipelineOperator(
-        task_id="run_pipeline3", pipeline_name="pipeline1", 
parameters={"myParam": "value"}, deferrable=True
+        task_id="run_pipeline3",
+        pipeline_name="pipeline1",
+        parameters={"myParam": "value"},
+        deferrable=True,
     )
     # [END howto_operator_adf_run_pipeline_with_deferrable_flag]
 
     begin >> Label("No async wait") >> run_pipeline1
     begin >> Label("Do async wait with sensor") >> run_pipeline2
     begin >> Label("Do async wait with deferrable operator") >> run_pipeline3
-    [run_pipeline1, pipeline_run_sensor, pipeline_run_async_sensor, 
run_pipeline3] >> end
-    [run_pipeline1, pipeline_run_sensor, pipeline_run_async_sensor] >> end
+    [
+        run_pipeline1,
+        pipeline_run_sensor,
+        pipeline_run_sensor_deferred,
+        pipeline_run_async_sensor,
+        run_pipeline3,
+    ] >> end
+    [run_pipeline1, pipeline_run_sensor, pipeline_run_sensor_deferred, 
pipeline_run_async_sensor] >> end
 
     # Task dependency created via `XComArgs`:
     #   run_pipeline2 >> pipeline_run_sensor
diff --git a/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py 
b/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py
index 6d089c22b5..b331863554 100644
--- a/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py
+++ b/tests/system/providers/microsoft/azure/example_azure_cosmosdb.py
@@ -33,10 +33,6 @@ from airflow import DAG
 from airflow.providers.microsoft.azure.operators.cosmos import 
AzureCosmosInsertDocumentOperator
 from airflow.providers.microsoft.azure.sensors.cosmos import 
AzureCosmosDocumentSensor
 
-# Ignore missing args provided by default_args
-# type: ignore[call-arg]
-
-
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
 DAG_ID = "example_azure_cosmosdb_sensor"
 
@@ -53,6 +49,7 @@ with DAG(
         task_id="check_cosmos_file",
         collection_name="airflow_example_coll",
         document_id="airflow_checkid",
+        database_name="database_name",
     )
     # [END cosmos_document_sensor]
 
@@ -60,6 +57,7 @@ with DAG(
         task_id="insert_cosmos_file",
         collection_name="new-collection",
         document={"id": "someuniqueid", "param1": "value1", "param2": 
"value2"},
+        database_name="database_name",
     )
 
     t1 >> t2
diff --git a/tests/system/providers/microsoft/azure/example_local_to_wasb.py 
b/tests/system/providers/microsoft/azure/example_local_to_wasb.py
index 6d69dbaa03..b03c11e6b3 100644
--- a/tests/system/providers/microsoft/azure/example_local_to_wasb.py
+++ b/tests/system/providers/microsoft/azure/example_local_to_wasb.py
@@ -23,10 +23,6 @@ from airflow.models import DAG
 from airflow.providers.microsoft.azure.operators.wasb_delete_blob import 
WasbDeleteBlobOperator
 from airflow.providers.microsoft.azure.transfers.local_to_wasb import 
LocalFilesystemToWasbOperator
 
-# Ignore missing args provided by default_args
-# type: ignore[call-arg]
-
-
 PATH_TO_UPLOAD_FILE = os.environ.get("AZURE_PATH_TO_UPLOAD_FILE", 
"example-text.txt")
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
 DAG_ID = "example_local_to_wasb"
diff --git a/tests/system/providers/opensearch/example_opensearch.py 
b/tests/system/providers/opensearch/example_opensearch.py
index cc1f4a0367..6a8660557c 100644
--- a/tests/system/providers/opensearch/example_opensearch.py
+++ b/tests/system/providers/opensearch/example_opensearch.py
@@ -16,6 +16,9 @@
 # specific language governing permissions and limitations
 # under the License.
 
+# TODO: FIXME - therea are a number of typing issues in those opensearch 
examples and they should be fixed
+# mypy: disable-error-code="call-arg,attr-defined"
+
 from __future__ import annotations
 
 from datetime import datetime, timedelta
@@ -96,7 +99,7 @@ with DAG(
 
     add_document_by_class = OpenSearchAddDocumentOperator(
         task_id="add_document_by_class",
-        doc_class=LogDocument(meta={"id": 2}, log_group_id=2, 
logger="airflow", message="hello airflow"),
+        doc_class=LogDocument(log_group_id=2, logger="airflow", message="hello 
airflow"),
     )
     # [END howto_operator_opensearch_add_document]
 
@@ -106,9 +109,9 @@ with DAG(
         index_name="system_test",
         query={"query": {"bool": {"must": {"match": {"message": "hello 
world"}}}}},
     )
-    search_object = (
-        Search().index(INDEX_NAME).filter("term", 
logger="airflow").query("match", message="hello airflow")
-    )
+    search = Search()
+    search.index = INDEX_NAME
+    search_object = search.filter("term", logger="airflow").query("match", 
message="hello airflow")
 
     search_high_level = OpenSearchQueryOperator(task_id="high_level_query", 
search_object=search_object)
     # [END howto_operator_opensearch_query]
diff --git a/tests/system/providers/sftp/example_sftp_sensor.py 
b/tests/system/providers/sftp/example_sftp_sensor.py
index 924ae945c4..934556637b 100644
--- a/tests/system/providers/sftp/example_sftp_sensor.py
+++ b/tests/system/providers/sftp/example_sftp_sensor.py
@@ -46,7 +46,11 @@ with DAG(
     tags=["example", "sftp"],
 ) as dag:
     # [START howto_operator_sftp_sensor_decorator]
-    @task.sftp_sensor(task_id="sftp_sensor", path=FULL_FILE_PATH, 
poke_interval=10)
+    @task.sftp_sensor(  # type: ignore[attr-defined]
+        task_id="sftp_sensor",  # type: ignore[attr-defined]
+        path=FULL_FILE_PATH,
+        poke_interval=10,
+    )
     def sftp_sensor_decorator():
         print("Files were successfully found!")
         # add your logic

Reply via email to