This is an automated email from the ASF dual-hosted git repository.

jasonliu pushed a commit to branch v3-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/v3-1-test by this push:
     new 5376f26599c [v3-1-test] Enable ruff PLW0120 rule (#57456) (#57525)
5376f26599c is described below

commit 5376f26599c0e03c63f58cc78ab3144a626c07b4
Author: Jens Scheffler <[email protected]>
AuthorDate: Fri Oct 31 02:42:08 2025 +0100

    [v3-1-test] Enable ruff PLW0120 rule (#57456) (#57525)
    
    * [v3-1-test] Enable ruff PLW0120 rule (#57456)
    (cherry picked from commit 944a9f5219453a0968d89493f461f31ba490b79c)
    
    Co-authored-by: Jens Scheffler <[email protected]>
    
    * fix static checks
---
 airflow-core/src/airflow/cli/commands/db_command.py     |  3 +--
 airflow-core/src/airflow/configuration.py               |  3 +--
 airflow-core/src/airflow/settings.py                    |  4 ++--
 dev/breeze/src/airflow_breeze/utils/packages.py         |  3 +--
 .../airflow/providers/amazon/aws/hooks/batch_client.py  | 10 ++++------
 .../src/airflow/providers/amazon/aws/hooks/datasync.py  |  3 +--
 .../sql/tests/unit/common/sql/operators/test_sql.py     |  2 +-
 .../unit/elasticsearch/log/elasticmock/__init__.py      |  3 +--
 .../airflow/providers/google/cloud/hooks/bigquery.py    | 17 ++++++++---------
 .../src/airflow/providers/google/cloud/hooks/gcs.py     |  3 +--
 .../providers/jenkins/operators/jenkins_job_trigger.py  |  8 +++-----
 pyproject.toml                                          |  3 +++
 scripts/ci/prek/common_prek_utils.py                    |  3 +--
 scripts/ci/prek/update_providers_dependencies.py        |  3 +--
 scripts/in_container/in_container_utils.py              |  3 +--
 15 files changed, 30 insertions(+), 41 deletions(-)

diff --git a/airflow-core/src/airflow/cli/commands/db_command.py 
b/airflow-core/src/airflow/cli/commands/db_command.py
index a0a02ade87b..ea3241320fd 100644
--- a/airflow-core/src/airflow/cli/commands/db_command.py
+++ b/airflow-core/src/airflow/cli/commands/db_command.py
@@ -81,8 +81,7 @@ def _get_version_revision(version: str, revision_heads_map: 
dict[str, str] | Non
 
         if current < wanted:
             return head
-    else:
-        return None
+    return None
 
 
 def run_db_migrate_command(args, command, revision_heads_map: dict[str, str]):
diff --git a/airflow-core/src/airflow/configuration.py 
b/airflow-core/src/airflow/configuration.py
index 042d6c84834..18b376ccba5 100644
--- a/airflow-core/src/airflow/configuration.py
+++ b/airflow-core/src/airflow/configuration.py
@@ -1717,8 +1717,7 @@ class AirflowConfigParser(ConfigParser):
                     deprecated_section_array = 
config.items(section=deprecated_section, raw=True)
                     if any(key == deprecated_key for key, _ in 
deprecated_section_array):
                         return True
-        else:
-            return False
+        return False
 
     @staticmethod
     def _deprecated_variable_is_set(deprecated_section: str, deprecated_key: 
str) -> bool:
diff --git a/airflow-core/src/airflow/settings.py 
b/airflow-core/src/airflow/settings.py
index 5a873844876..9a54060ea5c 100644
--- a/airflow-core/src/airflow/settings.py
+++ b/airflow-core/src/airflow/settings.py
@@ -19,7 +19,7 @@ from __future__ import annotations
 
 import atexit
 import functools
-import json
+import json as json_lib
 import logging
 import os
 import sys
@@ -122,7 +122,7 @@ async_engine: AsyncEngine
 AsyncSession: Callable[..., SAAsyncSession]
 
 # The JSON library to use for DAG Serialization and De-Serialization
-json = json
+json = json_lib
 
 # Display alerts on the dashboard
 # Useful for warning about setup issues or announcing changes to end users
diff --git a/dev/breeze/src/airflow_breeze/utils/packages.py 
b/dev/breeze/src/airflow_breeze/utils/packages.py
index 340831d6b6e..1cd6ec9fc77 100644
--- a/dev/breeze/src/airflow_breeze/utils/packages.py
+++ b/dev/breeze/src/airflow_breeze/utils/packages.py
@@ -185,8 +185,7 @@ def get_provider_id_from_path(file_path: Path) -> str | 
None:
             for providers_root_candidate in parent.parents:
                 if providers_root_candidate.name == "providers":
                     return 
parent.relative_to(providers_root_candidate).as_posix().replace("/", ".")
-            else:
-                return None
+            return None
     return None
 
 
diff --git 
a/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py 
b/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py
index fcf7f4c547c..28426270d46 100644
--- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py
+++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/batch_client.py
@@ -386,8 +386,7 @@ class BatchClientHook(AwsBaseHook):
             )
             if job_status in match_status:
                 return True
-        else:
-            raise AirflowException(f"AWS Batch job ({job_id}) status checks 
exceed max_retries")
+        raise AirflowException(f"AWS Batch job ({job_id}) status checks exceed 
max_retries")
 
     def get_job_description(self, job_id: str) -> dict:
         """
@@ -426,10 +425,9 @@ class BatchClientHook(AwsBaseHook):
                     "check Amazon Provider AWS Connection documentation for 
more details.",
                     str(err),
                 )
-        else:
-            raise AirflowException(
-                f"AWS Batch job ({job_id}) description error: exceeded 
status_retries ({self.status_retries})"
-            )
+        raise AirflowException(
+            f"AWS Batch job ({job_id}) description error: exceeded 
status_retries ({self.status_retries})"
+        )
 
     @staticmethod
     def parse_job_description(job_id: str, response: dict) -> dict:
diff --git 
a/providers/amazon/src/airflow/providers/amazon/aws/hooks/datasync.py 
b/providers/amazon/src/airflow/providers/amazon/aws/hooks/datasync.py
index aca571fa62c..8074978bbb1 100644
--- a/providers/amazon/src/airflow/providers/amazon/aws/hooks/datasync.py
+++ b/providers/amazon/src/airflow/providers/amazon/aws/hooks/datasync.py
@@ -319,5 +319,4 @@ class DataSyncHook(AwsBaseHook):
             else:
                 raise AirflowException(f"Unknown status: {status}")  # Should 
never happen
             time.sleep(self.wait_interval_seconds)
-        else:
-            raise AirflowTaskTimeout("Max iterations exceeded!")
+        raise AirflowTaskTimeout("Max iterations exceeded!")
diff --git a/providers/common/sql/tests/unit/common/sql/operators/test_sql.py 
b/providers/common/sql/tests/unit/common/sql/operators/test_sql.py
index a5c1d3819f0..1524464ffac 100644
--- a/providers/common/sql/tests/unit/common/sql/operators/test_sql.py
+++ b/providers/common/sql/tests/unit/common/sql/operators/test_sql.py
@@ -362,7 +362,7 @@ class TestColumnCheckOperator:
             ("X", "min", -1),
             ("X", "max", 20),
         ]
-        operator = operator = self._construct_operator(monkeypatch, 
self.valid_column_mapping, records)
+        operator = self._construct_operator(monkeypatch, 
self.valid_column_mapping, records)
         with pytest.raises(AirflowException):
             operator.execute(context=MagicMock())
 
diff --git 
a/providers/elasticsearch/tests/unit/elasticsearch/log/elasticmock/__init__.py 
b/providers/elasticsearch/tests/unit/elasticsearch/log/elasticmock/__init__.py
index 1393e42e775..a612e16630f 100644
--- 
a/providers/elasticsearch/tests/unit/elasticsearch/log/elasticmock/__init__.py
+++ 
b/providers/elasticsearch/tests/unit/elasticsearch/log/elasticmock/__init__.py
@@ -83,8 +83,7 @@ def _normalize_hosts(hosts):
             h["url_prefix"] = parsed_url.path
 
         out.append(h)
-    else:
-        out.append(host)
+    out.append(host)
     return out
 
 
diff --git 
a/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py 
b/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py
index 5fec3a31b8d..35d7579fd05 100644
--- a/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py
+++ b/providers/google/src/airflow/providers/google/cloud/hooks/bigquery.py
@@ -1802,15 +1802,14 @@ class BigQueryCursor(BigQueryBaseCursor):
                             "must be a dict with {'projectId':'', "
                             "'datasetId':'', 'tableId':''}"
                         )
-                else:
-                    configuration["query"].update(
-                        {
-                            "allowLargeResults": allow_large_results,
-                            "flattenResults": flatten_results,
-                            "writeDisposition": write_disposition,
-                            "createDisposition": create_disposition,
-                        }
-                    )
+                configuration["query"].update(
+                    {
+                        "allowLargeResults": allow_large_results,
+                        "flattenResults": flatten_results,
+                        "writeDisposition": write_disposition,
+                        "createDisposition": create_disposition,
+                    }
+                )
 
         if (
             "useLegacySql" in configuration["query"]
diff --git a/providers/google/src/airflow/providers/google/cloud/hooks/gcs.py 
b/providers/google/src/airflow/providers/google/cloud/hooks/gcs.py
index a063b39b1a3..42dee568eb9 100644
--- a/providers/google/src/airflow/providers/google/cloud/hooks/gcs.py
+++ b/providers/google/src/airflow/providers/google/cloud/hooks/gcs.py
@@ -371,8 +371,7 @@ class GCSHook(GoogleBaseHook):
                         num_max_attempts,
                     )
                     raise
-        else:
-            raise NotImplementedError  # should not reach this, but makes mypy 
happy
+        raise NotImplementedError  # should not reach this, but makes mypy 
happy
 
     def download_as_byte_array(
         self,
diff --git 
a/providers/jenkins/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py
 
b/providers/jenkins/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py
index 3f9ccddc0ab..fe399df721f 100644
--- 
a/providers/jenkins/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py
+++ 
b/providers/jenkins/src/airflow/providers/jenkins/operators/jenkins_job_trigger.py
@@ -179,11 +179,9 @@ class JenkinsJobTriggerOperator(BaseOperator):
                         build_number = json_response["executable"]["number"]
                         self.log.info("Job executed on Jenkins side with the 
build number %s", build_number)
                         return build_number
-        else:
-            raise AirflowException(
-                f"The job hasn't been executed after polling the queue "
-                f"{self.max_try_before_job_appears} times"
-            )
+        raise AirflowException(
+            f"The job hasn't been executed after polling the queue 
{self.max_try_before_job_appears} times"
+        )
 
     @cached_property
     def hook(self) -> JenkinsHook:
diff --git a/pyproject.toml b/pyproject.toml
index d0fba5df9a5..538f08db153 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -587,6 +587,9 @@ extend-select = [
     "TID25", # flake8-tidy-imports rules
     "E", # pycodestyle rules
     "W", # pycodestyle rules
+    # Warning (PLW) re-implemented in ruff from Pylint
+    "PLW0120", # else clause on loop without a break statement; remove the 
else and dedent its contents
+    "PLW0127", # Self-assignment of variable
     # Per rule enables
     "RUF006", # Checks for asyncio dangling task
     "RUF015", # Checks for unnecessary iterable allocation for first element
diff --git a/scripts/ci/prek/common_prek_utils.py 
b/scripts/ci/prek/common_prek_utils.py
index 03c22391187..1477c35dc9e 100644
--- a/scripts/ci/prek/common_prek_utils.py
+++ b/scripts/ci/prek/common_prek_utils.py
@@ -317,8 +317,7 @@ def get_provider_id_from_path(file_path: Path) -> str | 
None:
             for providers_root_candidate in parent.parents:
                 if providers_root_candidate.name == "providers":
                     return 
parent.relative_to(providers_root_candidate).as_posix().replace("/", ".")
-            else:
-                return None
+            return None
     return None
 
 
diff --git a/scripts/ci/prek/update_providers_dependencies.py 
b/scripts/ci/prek/update_providers_dependencies.py
index ea4a39e8f7b..1fa03637807 100755
--- a/scripts/ci/prek/update_providers_dependencies.py
+++ b/scripts/ci/prek/update_providers_dependencies.py
@@ -145,8 +145,7 @@ def get_provider_id_from_path(file_path: Path) -> str | 
None:
             for providers_root_candidate in parent.parents:
                 if providers_root_candidate.name == "providers":
                     return 
parent.relative_to(providers_root_candidate).as_posix().replace("/", ".")
-            else:
-                return None
+            return None
     return None
 
 
diff --git a/scripts/in_container/in_container_utils.py 
b/scripts/in_container/in_container_utils.py
index 2083f00c537..b6f5bd4d95f 100644
--- a/scripts/in_container/in_container_utils.py
+++ b/scripts/in_container/in_container_utils.py
@@ -122,8 +122,7 @@ def get_provider_id_from_path(file_path: Path) -> str | 
None:
             for providers_root_candidate in parent.parents:
                 if providers_root_candidate.name == "providers":
                     return 
parent.relative_to(providers_root_candidate).as_posix().replace("/", ".")
-            else:
-                return None
+            return None
     return None
 
 

Reply via email to