This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch v2-11-test
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/v2-11-test by this push:
new a3ce8c4f580 Backport #52117 (#62356)
a3ce8c4f580 is described below
commit a3ce8c4f580714f7d2389e293659441fe56af5d2
Author: Dev-iL <[email protected]>
AuthorDate: Tue Feb 24 16:51:11 2026 +0200
Backport #52117 (#62356)
---
airflow/providers_manager.py | 26 ++++++++++----------------
1 file changed, 10 insertions(+), 16 deletions(-)
diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py
index ab8906941a0..5a7572315d1 100644
--- a/airflow/providers_manager.py
+++ b/airflow/providers_manager.py
@@ -36,8 +36,6 @@ from typing import TYPE_CHECKING, Any, Callable,
MutableMapping, NamedTuple, NoR
from packaging.utils import canonicalize_name
from airflow.exceptions import AirflowOptionalProviderFeatureException
-from airflow.hooks.filesystem import FSHook
-from airflow.hooks.package_index import PackageIndexHook
from airflow.typing_compat import ParamSpec
from airflow.utils import yaml
from airflow.utils.entry_points import entry_points_with_dist
@@ -423,8 +421,6 @@ class ProvidersManager(LoggingMixin, metaclass=Singleton):
self._initialized_cache: dict[str, bool] = {}
# Keeps dict of providers keyed by module name
self._provider_dict: dict[str, ProviderInfo] = {}
- # Keeps dict of hooks keyed by connection type
- self._hooks_dict: dict[str, HookInfo] = {}
self._fs_set: set[str] = set()
self._dataset_uri_handlers: dict[str, Callable[[SplitResult],
SplitResult]] = {}
self._dataset_factories: dict[str, Callable[..., Dataset]] = {}
@@ -453,7 +449,6 @@ class ProvidersManager(LoggingMixin, metaclass=Singleton):
)
# Set of plugins contained in providers
self._plugins_set: set[PluginInfo] = set()
- self._init_airflow_core_hooks()
def _init_airflow_core_hooks(self):
"""Initialize the hooks dict with default hooks from Airflow core."""
@@ -470,19 +465,18 @@ class ProvidersManager(LoggingMixin, metaclass=Singleton):
connection_type=None,
connection_testable=False,
)
- for cls in [FSHook, PackageIndexHook]:
- package_name = cls.__module__
- hook_class_name = f"{cls.__module__}.{cls.__name__}"
- hook_info = self._import_hook(
+ for conn_type, class_name in (
+ ("fs", "airflow.hooks.filesystem.FSHook"),
+ ("package_index", "airflow.hooks.package_index.PackageIndexHook"),
+ ):
+ package_name = class_name.rsplit(".", 1)[0]
+ self._hooks_lazy_dict[conn_type] = functools.partial(
+ self._import_hook,
connection_type=None,
- provider_info=None,
- hook_class_name=hook_class_name,
package_name=package_name,
+ hook_class_name=class_name,
+ provider_info=None, # type: ignore[argument]
)
- self._hook_provider_dict[hook_info.connection_type] =
HookClassProvider(
- hook_class_name=hook_class_name, package_name=package_name
- )
- self._hooks_lazy_dict[hook_info.connection_type] = hook_info
@provider_info_cache("list")
def initialize_providers_list(self):
@@ -515,6 +509,7 @@ class ProvidersManager(LoggingMixin, metaclass=Singleton):
@provider_info_cache("hooks")
def initialize_providers_hooks(self):
"""Lazy initialization of providers hooks."""
+ self._init_airflow_core_hooks()
self.initialize_providers_list()
self._discover_hooks()
self._hook_provider_dict =
dict(sorted(self._hook_provider_dict.items()))
@@ -1352,7 +1347,6 @@ class ProvidersManager(LoggingMixin, metaclass=Singleton):
def _cleanup(self):
self._initialized_cache.clear()
self._provider_dict.clear()
- self._hooks_dict.clear()
self._fs_set.clear()
self._taskflow_decorators.clear()
self._hook_provider_dict.clear()