This is an automated email from the ASF dual-hosted git repository.

husseinawala pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 85d48bb85c bump ruff to 0.5.5 (#41032)
85d48bb85c is described below

commit 85d48bb85c55331d920c04f4e011512f2e75166c
Author: Hussein Awala <huss...@awala.fr>
AuthorDate: Sat Aug 3 16:57:05 2024 +0200

    bump ruff to 0.5.5 (#41032)
---
 .pre-commit-config.yaml                                      | 4 ++--
 airflow/models/abstractoperator.py                           | 6 +++---
 airflow/models/dag.py                                        | 2 +-
 airflow/models/expandinput.py                                | 2 +-
 airflow/models/xcom_arg.py                                   | 2 +-
 airflow/ti_deps/deps/task_not_running_dep.py                 | 2 +-
 airflow/utils/log/file_task_handler.py                       | 2 +-
 hatch_build.py                                               | 2 +-
 tests/api_experimental/client/test_json_client.py            | 2 +-
 tests/core/test_configuration.py                             | 8 ++++----
 tests/core/test_stats.py                                     | 4 ++--
 tests/datasets/test_dataset.py                               | 2 +-
 tests/decorators/test_task_group.py                          | 2 +-
 tests/providers/amazon/aws/deferrable/hooks/test_base_aws.py | 2 +-
 tests/providers/asana/hooks/test_asana.py                    | 2 +-
 tests/providers/cncf/kubernetes/utils/test_pod_manager.py    | 8 ++++----
 16 files changed, 26 insertions(+), 26 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index ca7aed9183..b4b6ec9c3d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -343,7 +343,7 @@ repos:
         types_or: [python, pyi]
         args: [--fix]
         require_serial: true
-        additional_dependencies: ["ruff==0.4.5"]
+        additional_dependencies: ["ruff==0.5.5"]
         exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py
       - id: ruff-format
         name: Run 'ruff format' for extremely fast Python formatting
@@ -353,7 +353,7 @@ repos:
         types_or: [python, pyi]
         args: []
         require_serial: true
-        additional_dependencies: ["ruff==0.4.5"]
+        additional_dependencies: ["ruff==0.5.5"]
         exclude: ^.*/.*_vendor/|^tests/dags/test_imports.py|^airflow/contrib/
       - id: replace-bad-characters
         name: Replace bad characters
diff --git a/airflow/models/abstractoperator.py 
b/airflow/models/abstractoperator.py
index 9cf1830bb4..5e5d13d5dc 100644
--- a/airflow/models/abstractoperator.py
+++ b/airflow/models/abstractoperator.py
@@ -466,11 +466,11 @@ class AbstractOperator(Templater, DAGNode):
             _UpstreamPriorityWeightStrategy,
         )
 
-        if type(self.weight_rule) == _AbsolutePriorityWeightStrategy:
+        if isinstance(self.weight_rule, _AbsolutePriorityWeightStrategy):
             return self.priority_weight
-        elif type(self.weight_rule) == _DownstreamPriorityWeightStrategy:
+        elif isinstance(self.weight_rule, _DownstreamPriorityWeightStrategy):
             upstream = False
-        elif type(self.weight_rule) == _UpstreamPriorityWeightStrategy:
+        elif isinstance(self.weight_rule, _UpstreamPriorityWeightStrategy):
             upstream = True
         else:
             upstream = False
diff --git a/airflow/models/dag.py b/airflow/models/dag.py
index 1cc46eb269..6c5948a46b 100644
--- a/airflow/models/dag.py
+++ b/airflow/models/dag.py
@@ -859,7 +859,7 @@ class DAG(LoggingMixin):
         return f"<DAG: {self.dag_id}>"
 
     def __eq__(self, other):
-        if type(self) == type(other):
+        if type(self) is type(other):
             # Use getattr() instead of __dict__ as __dict__ doesn't return
             # correct values for properties.
             return all(getattr(self, c, None) == getattr(other, c, None) for c 
in self._comps)
diff --git a/airflow/models/expandinput.py b/airflow/models/expandinput.py
index 2b8c76f3f8..7feb7f76a6 100644
--- a/airflow/models/expandinput.py
+++ b/airflow/models/expandinput.py
@@ -305,7 +305,7 @@ def get_map_type_key(expand_input: ExpandInput | 
_ExpandInputRef) -> str:
 
     if isinstance(expand_input, _ExpandInputRef):
         return expand_input.key
-    return next(k for k, v in _EXPAND_INPUT_TYPES.items() if v == 
type(expand_input))
+    return next(k for k, v in _EXPAND_INPUT_TYPES.items() if 
isinstance(expand_input, v))
 
 
 def create_expand_input(kind: str, value: Any) -> ExpandInput:
diff --git a/airflow/models/xcom_arg.py b/airflow/models/xcom_arg.py
index 46ee4d12f4..590327eaa9 100644
--- a/airflow/models/xcom_arg.py
+++ b/airflow/models/xcom_arg.py
@@ -725,7 +725,7 @@ _XCOM_ARG_TYPES: Mapping[str, type[XComArg]] = {
 
 def serialize_xcom_arg(value: XComArg) -> dict[str, Any]:
     """DAG serialization interface."""
-    key = next(k for k, v in _XCOM_ARG_TYPES.items() if v == type(value))
+    key = next(k for k, v in _XCOM_ARG_TYPES.items() if isinstance(value, v))
     if key:
         return {"type": key, **value._serialize()}
     return value._serialize()
diff --git a/airflow/ti_deps/deps/task_not_running_dep.py 
b/airflow/ti_deps/deps/task_not_running_dep.py
index 3319fb69bf..bf56df43c6 100644
--- a/airflow/ti_deps/deps/task_not_running_dep.py
+++ b/airflow/ti_deps/deps/task_not_running_dep.py
@@ -32,7 +32,7 @@ class TaskNotRunningDep(BaseTIDep):
 
     def __eq__(self, other):
         """Check if two task instance dependencies are of the same type."""
-        return type(self) == type(other)
+        return type(self) is type(other)
 
     def __hash__(self):
         """Compute the hash value based on the type of the task instance 
dependency."""
diff --git a/airflow/utils/log/file_task_handler.py 
b/airflow/utils/log/file_task_handler.py
index fa43d7f8d6..2ae15b454a 100644
--- a/airflow/utils/log/file_task_handler.py
+++ b/airflow/utils/log/file_task_handler.py
@@ -203,7 +203,7 @@ class FileTaskHandler(logging.Handler):
                 RemovedInAirflow3Warning,
                 # We want to reference the stack that actually instantiates the
                 # handler, not the one that calls super()__init__.
-                stacklevel=(2 if type(self) == FileTaskHandler else 3),
+                stacklevel=(2 if isinstance(self, FileTaskHandler) else 3),
             )
         self.maintain_propagate: bool = False
         self.max_bytes = max_bytes
diff --git a/hatch_build.py b/hatch_build.py
index ab18c04495..110ebdb772 100644
--- a/hatch_build.py
+++ b/hatch_build.py
@@ -251,7 +251,7 @@ DEVEL_EXTRAS: dict[str, list[str]] = {
     "devel-static-checks": [
         "black>=23.12.0",
         "pre-commit>=3.5.0",
-        "ruff==0.4.5",
+        "ruff==0.5.5",
         "yamllint>=1.33.0",
     ],
     "devel-tests": [
diff --git a/tests/api_experimental/client/test_json_client.py 
b/tests/api_experimental/client/test_json_client.py
index baf6027ae7..ec35c61a0e 100644
--- a/tests/api_experimental/client/test_json_client.py
+++ b/tests/api_experimental/client/test_json_client.py
@@ -44,7 +44,7 @@ class TestJsonClient:
         mock_get.return_value = Response(status_code=500, json={"get_ok": 
"no"})
         with pytest.raises(OSError) as exc_info:
             self.client._request("/test/except", {"dag_id": "foo"})
-        assert exc_info.type == OSError
+        assert exc_info.type is OSError
         assert "Server error" in str(exc_info.value)
 
     @patch.object(httpx.Client, "post")
diff --git a/tests/core/test_configuration.py b/tests/core/test_configuration.py
index e6ea725db1..62548a3f26 100644
--- a/tests/core/test_configuration.py
+++ b/tests/core/test_configuration.py
@@ -1484,7 +1484,7 @@ sql_alchemy_conn=sqlite://test
             test_conf.items("scheduler")
         assert len(captured) == 1
         c = captured[0]
-        assert c.category == FutureWarning
+        assert c.category is FutureWarning
         assert (
             "you should use[scheduler/parsing_cleanup_interval] "
             "instead. Please update your `conf.get*`" in str(c.message)
@@ -1494,7 +1494,7 @@ sql_alchemy_conn=sqlite://test
                 test_conf.items("scheduler")
         assert len(captured) == 1
         c = captured[0]
-        assert c.category == DeprecationWarning
+        assert c.category is DeprecationWarning
         assert (
             "deactivate_stale_dags_interval option in [scheduler] "
             "has been renamed to parsing_cleanup_interval" in str(c.message)
@@ -1518,12 +1518,12 @@ sql_alchemy_conn=sqlite://test
 
         w = captured.pop()
         assert "the old setting has been used, but please update" in 
str(w.message)
-        assert w.category == DeprecationWarning
+        assert w.category is DeprecationWarning
         # only if we use old value, do we also get a warning about code update
         if key == old_val:
             w = captured.pop()
             assert "your `conf.get*` call to use the new name" in 
str(w.message)
-            assert w.category == FutureWarning
+            assert w.category is FutureWarning
 
     def test_as_dict_raw(self):
         test_conf = AirflowConfigParser()
diff --git a/tests/core/test_stats.py b/tests/core/test_stats.py
index 80570b9ddd..902a0ed003 100644
--- a/tests/core/test_stats.py
+++ b/tests/core/test_stats.py
@@ -403,7 +403,7 @@ class TestPatternOrBasicValidatorConfigOption:
                     match="The basic metric validator will be deprecated in 
the future in favor of pattern-matching.  You can try this now by setting 
config option metrics_use_pattern_match to True.",
                 ):
                     assert isinstance(airflow.stats.Stats.statsd, 
statsd.StatsClient)
-            assert type(airflow.stats.Stats.instance.metrics_validator) == 
expected
+            assert isinstance(airflow.stats.Stats.instance.metrics_validator, 
expected)
 
     @conf_vars({**stats_on, **block_list, ("metrics", "metrics_allow_list"): 
"bax,qux"})
     def test_setting_allow_and_block_logs_warning(self, caplog):
@@ -414,7 +414,7 @@ class TestPatternOrBasicValidatorConfigOption:
             match="The basic metric validator will be deprecated in the future 
in favor of pattern-matching.  You can try this now by setting config option 
metrics_use_pattern_match to True.",
         ):
             assert isinstance(airflow.stats.Stats.statsd, statsd.StatsClient)
-        assert type(airflow.stats.Stats.instance.metrics_validator) == 
AllowListValidator
+        assert isinstance(airflow.stats.Stats.instance.metrics_validator, 
AllowListValidator)
         with caplog.at_level(logging.WARNING):
             assert "Ignoring metrics_block_list" in caplog.text
 
diff --git a/tests/datasets/test_dataset.py b/tests/datasets/test_dataset.py
index 7d5228438d..19e880ff97 100644
--- a/tests/datasets/test_dataset.py
+++ b/tests/datasets/test_dataset.py
@@ -359,7 +359,7 @@ def test_dag_with_complex_dataset_condition(session, 
dag_maker):
 
 
 def datasets_equal(d1: BaseDataset, d2: BaseDataset) -> bool:
-    if type(d1) != type(d2):
+    if type(d1) is not type(d2):
         return False
 
     if isinstance(d1, Dataset) and isinstance(d2, Dataset):
diff --git a/tests/decorators/test_task_group.py 
b/tests/decorators/test_task_group.py
index 39e6fcf464..709a9135f5 100644
--- a/tests/decorators/test_task_group.py
+++ b/tests/decorators/test_task_group.py
@@ -111,7 +111,7 @@ def test_partial_evolves_factory():
 
     assert d.task_group_dict == {}  # Calling partial() without expanding does 
not create a task group.
 
-    assert type(tgp) == type(tg)
+    assert type(tgp) is type(tg)
     assert tgp.partial_kwargs == {"a": 1}  # Partial kwargs are saved.
 
     # Warn if the partial object goes out of scope without being mapped.
diff --git a/tests/providers/amazon/aws/deferrable/hooks/test_base_aws.py 
b/tests/providers/amazon/aws/deferrable/hooks/test_base_aws.py
index 1e99065a71..1e9526ef89 100644
--- a/tests/providers/amazon/aws/deferrable/hooks/test_base_aws.py
+++ b/tests/providers/amazon/aws/deferrable/hooks/test_base_aws.py
@@ -34,7 +34,7 @@ with contextlib.suppress(ImportError):
 class TestAwsBaseAsyncHook:
     @staticmethod
     def compare_aio_cred(first, second):
-        if type(first) != type(second):
+        if type(first) is not type(second):
             return False
         if first.access_key != second.access_key:
             return False
diff --git a/tests/providers/asana/hooks/test_asana.py 
b/tests/providers/asana/hooks/test_asana.py
index 7d6311785e..af539cf5cd 100644
--- a/tests/providers/asana/hooks/test_asana.py
+++ b/tests/providers/asana/hooks/test_asana.py
@@ -41,7 +41,7 @@ class TestAsanaHook:
         ):
             hook = AsanaHook()
         client = hook.get_conn()
-        assert type(client) == Client
+        assert isinstance(client, Client)
 
     def test_missing_password_raises(self):
         """
diff --git a/tests/providers/cncf/kubernetes/utils/test_pod_manager.py 
b/tests/providers/cncf/kubernetes/utils/test_pod_manager.py
index b432ef04c0..3e4f2d086f 100644
--- a/tests/providers/cncf/kubernetes/utils/test_pod_manager.py
+++ b/tests/providers/cncf/kubernetes/utils/test_pod_manager.py
@@ -60,7 +60,7 @@ class TestPodManager:
         mock.sentinel.metadata = mock.MagicMock()
         self.mock_kube_client.read_namespaced_pod_log.return_value = 
mock.sentinel.logs
         logs = self.pod_manager.read_pod_logs(pod=mock.sentinel, 
container_name="base")
-        assert type(logs) == PodLogsConsumer
+        assert isinstance(logs, PodLogsConsumer)
         assert logs.response == mock.sentinel.logs
 
     def test_read_pod_logs_retries_successfully(self):
@@ -70,7 +70,7 @@ class TestPodManager:
             mock.sentinel.logs,
         ]
         logs = self.pod_manager.read_pod_logs(pod=mock.sentinel, 
container_name="base")
-        assert type(logs) == PodLogsConsumer
+        assert isinstance(logs, PodLogsConsumer)
         assert mock.sentinel.logs == logs.response
         self.mock_kube_client.read_namespaced_pod_log.assert_has_calls(
             [
@@ -125,7 +125,7 @@ class TestPodManager:
         mock.sentinel.metadata = mock.MagicMock()
         self.mock_kube_client.read_namespaced_pod_log.side_effect = 
[mock.sentinel.logs]
         logs = self.pod_manager.read_pod_logs(pod=mock.sentinel, 
container_name="base", tail_lines=100)
-        assert type(logs) == PodLogsConsumer
+        assert isinstance(logs, PodLogsConsumer)
         assert mock.sentinel.logs == logs.response
         self.mock_kube_client.read_namespaced_pod_log.assert_has_calls(
             [
@@ -145,7 +145,7 @@ class TestPodManager:
         mock.sentinel.metadata = mock.MagicMock()
         self.mock_kube_client.read_namespaced_pod_log.side_effect = 
[mock.sentinel.logs]
         logs = self.pod_manager.read_pod_logs(mock.sentinel, "base", 
since_seconds=2)
-        assert type(logs) == PodLogsConsumer
+        assert isinstance(logs, PodLogsConsumer)
         assert mock.sentinel.logs == logs.response
         self.mock_kube_client.read_namespaced_pod_log.assert_has_calls(
             [

Reply via email to