This is an automated email from the ASF dual-hosted git repository.

husseinawala pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 90e2b12d6b Upgrade mypy to 1.8.0 (#36428)
90e2b12d6b is described below

commit 90e2b12d6b99d2f7db43e45f5e8b97d3b8a43b36
Author: Hussein Awala <huss...@awala.fr>
AuthorDate: Thu Feb 8 02:01:35 2024 +0100

    Upgrade mypy to 1.8.0 (#36428)
    
    * Upgrade mypy to 1.8.0
    
    * Type annotation improvements for Mypy 1.8
    
    * Remove unneeded alias
    
    * Work around tenacity annotation
    
    * workaround for Never not callable
    
    * revert the workaround
    
    * ignore mypy on _retry_obj
    
    ---------
    
    Co-authored-by: Tzu-ping Chung <uranu...@gmail.com>
---
 .pre-commit-config.yaml                                           | 2 +-
 airflow/models/dagrun.py                                          | 3 +--
 airflow/providers/amazon/aws/hooks/base_aws.py                    | 2 +-
 airflow/providers/amazon/aws/utils/connection_wrapper.py          | 5 ++++-
 .../google/cloud/triggers/cloud_storage_transfer_service.py       | 8 ++++----
 airflow/providers/http/hooks/http.py                              | 3 ++-
 pyproject.toml                                                    | 2 +-
 7 files changed, 14 insertions(+), 11 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index f1697038d4..3ada6668dd 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -172,7 +172,7 @@ repos:
         entry: 
./scripts/ci/pre_commit/pre_commit_update_common_sql_api_stubs.py
         language: python
         files: 
^scripts/ci/pre_commit/pre_commit_update_common_sql_api\.py|^airflow/providers/common/sql/.*\.pyi?$
-        additional_dependencies: ['rich>=12.4.4', 'mypy==1.2.0', 
'black==23.10.0', 'jinja2']
+        additional_dependencies: ['rich>=12.4.4', 'mypy==1.8.0', 
'black==23.10.0', 'jinja2']
         pass_filenames: false
         require_serial: true
       - id: update-black-version
diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py
index 501470fd56..6b4fdbd0cf 100644
--- a/airflow/models/dagrun.py
+++ b/airflow/models/dagrun.py
@@ -83,7 +83,6 @@ if TYPE_CHECKING:
     from airflow.utils.types import ArgNotSet
 
     CreatedTasks = TypeVar("CreatedTasks", Iterator["dict[str, Any]"], 
Iterator[TI])
-    TaskCreator = Callable[[Operator, Iterable[int]], CreatedTasks]
 
 RUN_ID_REGEX = 
r"^(?:manual|scheduled|dataset_triggered)__(?:\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\+00:00)$"
 
@@ -1217,7 +1216,7 @@ class DagRun(Base, LoggingMixin):
     def _create_tasks(
         self,
         tasks: Iterable[Operator],
-        task_creator: TaskCreator,
+        task_creator: Callable[[Operator, Iterable[int]], CreatedTasks],
         *,
         session: Session,
     ) -> CreatedTasks:
diff --git a/airflow/providers/amazon/aws/hooks/base_aws.py 
b/airflow/providers/amazon/aws/hooks/base_aws.py
index 6a42c4768f..5f1e075cc3 100644
--- a/airflow/providers/amazon/aws/hooks/base_aws.py
+++ b/airflow/providers/amazon/aws/hooks/base_aws.py
@@ -314,7 +314,7 @@ class BaseSessionFactory(LoggingMixin):
             idp_request_retry_kwargs = saml_config["idp_request_retry_kwargs"]
             self.log.info("idp_request_retry_kwargs= %s", 
idp_request_retry_kwargs)
             from requests.adapters import HTTPAdapter
-            from requests.packages.urllib3.util.retry import Retry
+            from urllib3.util.retry import Retry
 
             retry_strategy = Retry(**idp_request_retry_kwargs)
             adapter = HTTPAdapter(max_retries=retry_strategy)
diff --git a/airflow/providers/amazon/aws/utils/connection_wrapper.py 
b/airflow/providers/amazon/aws/utils/connection_wrapper.py
index 30d199f08a..c26318130b 100644
--- a/airflow/providers/amazon/aws/utils/connection_wrapper.py
+++ b/airflow/providers/amazon/aws/utils/connection_wrapper.py
@@ -166,7 +166,7 @@ class AwsConnectionWrapper(LoggingMixin):
 
         return service_config.get("endpoint_url", global_endpoint_url)
 
-    def __post_init__(self, conn: Connection):
+    def __post_init__(self, conn: Connection | AwsConnectionWrapper | 
_ConnectionMetadata | None) -> None:
         if isinstance(conn, type(self)):
             # For every field with init=False we copy reference value from 
original wrapper
             # For every field with init=True we use init values if it not 
equal default
@@ -193,6 +193,9 @@ class AwsConnectionWrapper(LoggingMixin):
         elif not conn:
             return
 
+        if TYPE_CHECKING:
+            assert isinstance(conn, (Connection, _ConnectionMetadata))
+
         # Assign attributes from AWS Connection
         self.conn_id = conn.conn_id
         self.conn_type = conn.conn_type or "aws"
diff --git 
a/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py 
b/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py
index aff5c71041..32cb855f17 100644
--- a/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py
+++ b/airflow/providers/google/cloud/triggers/cloud_storage_transfer_service.py
@@ -18,7 +18,7 @@
 from __future__ import annotations
 
 import asyncio
-from typing import Any, AsyncIterator
+from typing import Any, AsyncIterator, Iterable
 
 from google.api_core.exceptions import GoogleAPIError
 from google.cloud.storage_transfer_v1.types import TransferOperation
@@ -67,11 +67,11 @@ class 
CloudStorageTransferServiceCreateJobsTrigger(BaseTrigger):
                 jobs_pager = await 
async_hook.get_jobs(job_names=self.job_names)
                 jobs, awaitable_operations = [], []
                 async for job in jobs_pager:
-                    operation = async_hook.get_latest_operation(job)
+                    awaitable_operation = async_hook.get_latest_operation(job)
                     jobs.append(job)
-                    awaitable_operations.append(operation)
+                    awaitable_operations.append(awaitable_operation)
 
-                operations: list[TransferOperation] = await 
asyncio.gather(*awaitable_operations)
+                operations: Iterable[TransferOperation | None] = await 
asyncio.gather(*awaitable_operations)
 
                 for job, operation in zip(jobs, operations):
                     if operation is None:
diff --git a/airflow/providers/http/hooks/http.py 
b/airflow/providers/http/hooks/http.py
index 8ba5d08ab3..91c0d07c1b 100644
--- a/airflow/providers/http/hooks/http.py
+++ b/airflow/providers/http/hooks/http.py
@@ -258,7 +258,8 @@ class HttpHook(BaseHook):
         """
         self._retry_obj = tenacity.Retrying(**_retry_args)
 
-        return self._retry_obj(self.run, *args, **kwargs)
+        # TODO: remove ignore type when 
https://github.com/jd/tenacity/issues/428 is resolved
+        return self._retry_obj(self.run, *args, **kwargs)  # type: ignore
 
     def url_from_endpoint(self, endpoint: str | None) -> str:
         """Combine base url with endpoint."""
diff --git a/pyproject.toml b/pyproject.toml
index e3a73c67cf..0f7652dd24 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -295,7 +295,7 @@ devel-mypy = [
     # TODO: upgrade to newer versions of MyPy continuously as they are released
     # Make sure to upgrade the mypy version in update-common-sql-api-stubs in 
.pre-commit-config.yaml
     # when you upgrade it here !!!!
-    "mypy==1.2.0",
+    "mypy==1.8.0",
     "types-Deprecated",
     "types-Markdown",
     "types-PyMySQL",

Reply via email to