This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 9a0f54dbf9 Replace lambdas with comprehensions (#33745)
9a0f54dbf9 is described below
commit 9a0f54dbf9ddb4b556c66e3d24bf98a0b8c607e8
Author: Miroslav Šedivý <[email protected]>
AuthorDate: Wed Aug 30 21:16:22 2023 +0000
Replace lambdas with comprehensions (#33745)
---
airflow/utils/dates.py | 10 ++++++----
dev/assign_cherry_picked_prs_with_milestone.py | 2 +-
.../commands/release_management_commands.py | 2 +-
dev/breeze/src/airflow_breeze/utils/github.py | 2 +-
dev/breeze/src/airflow_breeze/utils/parallel.py | 2 +-
dev/example_dags/update_example_dags_paths.py | 16 +++++++---------
kubernetes_tests/test_kubernetes_pod_operator.py | 4 ++--
setup.py | 4 +---
8 files changed, 20 insertions(+), 22 deletions(-)
diff --git a/airflow/utils/dates.py b/airflow/utils/dates.py
index bcf8f0a183..14fae03a86 100644
--- a/airflow/utils/dates.py
+++ b/airflow/utils/dates.py
@@ -243,12 +243,14 @@ def infer_time_unit(time_seconds_arr: Collection[float])
-> TimeUnit:
def scale_time_units(time_seconds_arr: Collection[float], unit: TimeUnit) ->
Collection[float]:
"""Convert an array of time durations in seconds to the specified time
unit."""
if unit == "minutes":
- return [x / 60 for x in time_seconds_arr]
+ factor = 60
elif unit == "hours":
- return [x / (60 * 60) for x in time_seconds_arr]
+ factor = 60 * 60
elif unit == "days":
- return [x / (24 * 60 * 60) for x in time_seconds_arr]
- return time_seconds_arr
+ factor = 24 * 60 * 60
+ else:
+ factor = 1
+ return [x / factor for x in time_seconds_arr]
def days_ago(n, hour=0, minute=0, second=0, microsecond=0):
diff --git a/dev/assign_cherry_picked_prs_with_milestone.py
b/dev/assign_cherry_picked_prs_with_milestone.py
index f48ddc188a..8a98e0b4ee 100755
--- a/dev/assign_cherry_picked_prs_with_milestone.py
+++ b/dev/assign_cherry_picked_prs_with_milestone.py
@@ -261,7 +261,7 @@ def assign_prs(
output_folder: str,
):
changes = get_changes(verbose, previous_release, current_release)
- changes = list(filter(lambda change: change.pr is not None, changes))
+ changes = [change for change in changes if change.pr is not None]
prs = [change.pr for change in changes]
g = Github(github_token)
diff --git
a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
index b2b66941ac..5b56b9a820 100644
--- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py
@@ -1220,7 +1220,7 @@ def generate_issue_content_providers(
)
continue
prs = get_prs_for_package(package_id)
- provider_prs[package_id] = list(filter(lambda pr: pr not in
excluded_prs, prs))
+ provider_prs[package_id] = [pr for pr in prs if pr not in
excluded_prs]
all_prs.update(provider_prs[package_id])
g = Github(github_token)
repo = g.get_repo("apache/airflow")
diff --git a/dev/breeze/src/airflow_breeze/utils/github.py
b/dev/breeze/src/airflow_breeze/utils/github.py
index 18bf806dee..7ae4f914bb 100644
--- a/dev/breeze/src/airflow_breeze/utils/github.py
+++ b/dev/breeze/src/airflow_breeze/utils/github.py
@@ -100,7 +100,7 @@ def get_active_airflow_versions(confirm: bool = True) ->
list[str]:
match = ACTIVE_TAG_MATCH.match(tag)
if match and match.group(1) == "2":
all_active_tags.append(tag)
- airflow_versions = sorted(all_active_tags, key=lambda x: Version(x))
+ airflow_versions = sorted(all_active_tags, key=Version)
if confirm:
get_console().print(f"All Airflow 2 versions: {all_active_tags}")
answer = user_confirm(
diff --git a/dev/breeze/src/airflow_breeze/utils/parallel.py
b/dev/breeze/src/airflow_breeze/utils/parallel.py
index 7386366381..48fc6ac756 100644
--- a/dev/breeze/src/airflow_breeze/utils/parallel.py
+++ b/dev/breeze/src/airflow_breeze/utils/parallel.py
@@ -357,7 +357,7 @@ def print_async_summary(completed_list: list[ApplyResult])
-> None:
def get_completed_result_list(results: list[ApplyResult]) -> list[ApplyResult]:
"""Return completed results from the list."""
- return list(filter(lambda result: result.ready(), results))
+ return [result for result in results if result.ready()]
class SummarizeAfter(Enum):
diff --git a/dev/example_dags/update_example_dags_paths.py
b/dev/example_dags/update_example_dags_paths.py
index 36eea37158..265a9e98e9 100755
--- a/dev/example_dags/update_example_dags_paths.py
+++ b/dev/example_dags/update_example_dags_paths.py
@@ -102,19 +102,17 @@ def find_matches(_file: Path, provider: str, version:
str):
if __name__ == "__main__":
curdir: Path = Path(os.curdir).resolve()
- dirs: list[Path] = list(filter(os.path.isdir, curdir.iterdir()))
+ dirs: list[Path] = [p for p in curdir.iterdir() if p.is_dir()]
with Progress(console=console) as progress:
task = progress.add_task(f"Updating {len(dirs)}", total=len(dirs))
for directory in dirs:
if directory.name.startswith("apache-airflow-providers-"):
provider = directory.name[len("apache-airflow-providers-") :]
console.print(f"[bright_blue] Processing {directory}")
- version_dirs = list(filter(os.path.isdir, directory.iterdir()))
- for version_dir in version_dirs:
- version = version_dir.name
- console.print(version)
- for file in version_dir.rglob("*.html"):
- candidate_file = file
- if candidate_file.exists():
- find_matches(candidate_file, provider, version)
+ for version_dir in directory.iterdir():
+ if version_dir.is_dir():
+ console.print(version_dir.name)
+ for candidate_file in version_dir.rglob("*.html"):
+ if candidate_file.exists():
+ find_matches(candidate_file, provider,
version_dir.name)
progress.advance(task)
diff --git a/kubernetes_tests/test_kubernetes_pod_operator.py
b/kubernetes_tests/test_kubernetes_pod_operator.py
index 006ddfc788..aba1f95457 100644
--- a/kubernetes_tests/test_kubernetes_pod_operator.py
+++ b/kubernetes_tests/test_kubernetes_pod_operator.py
@@ -80,7 +80,7 @@ def kubeconfig_path():
@pytest.fixture
def test_label(request):
- label = "".join(filter(str.isalnum,
f"{request.node.cls.__name__}.{request.node.name}")).lower()
+ label = "".join(c for c in
f"{request.node.cls.__name__}.{request.node.name}" if c.isalnum()).lower()
return label[-63:]
@@ -284,7 +284,7 @@ class TestKubernetesPodOperatorSystem:
k.execute(context)
actual_pod = k.find_pod("default", context, exclude_checked=False)
actual_pod = self.api_client.sanitize_for_serialization(actual_pod)
- status = next(iter(filter(lambda x: x["name"] == "base",
actual_pod["status"]["containerStatuses"])))
+ status = next(x for x in actual_pod["status"]["containerStatuses"] if
x["name"] == "base")
assert status["state"]["terminated"]["reason"] == "Error"
assert actual_pod["metadata"]["labels"]["already_checked"] == "True"
diff --git a/setup.py b/setup.py
index 1a0f09b5a8..db438112c8 100644
--- a/setup.py
+++ b/setup.py
@@ -681,9 +681,7 @@ EXTRAS_DEPENDENCIES["all_dbs"] = all_dbs
# to separately add providers dependencies - they have been already added as
'providers' extras above
_all_dependencies = get_unique_dependency_list(EXTRAS_DEPENDENCIES.values())
-_all_dependencies_without_airflow_providers = list(
- filter(lambda k: "apache-airflow-" not in k, _all_dependencies)
-)
+_all_dependencies_without_airflow_providers = [k for k in _all_dependencies if
"apache-airflow-" not in k]
# All user extras here
# all is purely development extra and it should contain only direct
dependencies of Airflow