This is an automated email from the ASF dual-hosted git repository.
sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-release.git
The following commit(s) were added to refs/heads/main by this push:
new 9aeeb11 Move the code to move files into the release writer
9aeeb11 is described below
commit 9aeeb11c2136aba3f11cf4172e1cb040d9940634
Author: Sean B. Palmer <[email protected]>
AuthorDate: Thu Sep 11 20:11:16 2025 +0100
Move the code to move files into the release writer
---
atr/routes/finish.py | 125 ++++-------------------------------------
atr/storage/writers/release.py | 113 ++++++++++++++++++++++++++++++++++++-
2 files changed, 121 insertions(+), 117 deletions(-)
diff --git a/atr/routes/finish.py b/atr/routes/finish.py
index 7f02784..4e658a6 100644
--- a/atr/routes/finish.py
+++ b/atr/routes/finish.py
@@ -18,7 +18,7 @@
import dataclasses
import pathlib
from collections.abc import Awaitable, Callable
-from typing import Any, Final
+from typing import Any
import aiofiles.os
import asfquart.base as base
@@ -42,9 +42,6 @@ import atr.storage as storage
import atr.template as template
import atr.util as util
-SPECIAL_SUFFIXES: Final[frozenset[str]] = frozenset({".asc", ".sha256",
".sha512"})
-
-
Respond = Callable[[int, str], Awaitable[tuple[quart_response.Response, int] |
response.Response]]
@@ -267,13 +264,13 @@ async def _delete_empty_directory(
try:
async with storage.write(session.uid) as write:
wacp = await write.as_project_committee_member(project_name)
- created_error = await
wacp.release.delete_empty_directory(project_name, version_name,
dir_to_delete_rel)
+ creation_error = await
wacp.release.delete_empty_directory(project_name, version_name,
dir_to_delete_rel)
except Exception:
log.exception(f"Unexpected error deleting directory
{dir_to_delete_rel} for {project_name}/{version_name}")
return await respond(500, "An unexpected error occurred.")
- if created_error is not None:
- return await respond(400, created_error)
+ if creation_error is not None:
+ return await respond(400, creation_error)
return await respond(200, f"Deleted empty directory
'{dir_to_delete_rel}'.")
@@ -286,23 +283,14 @@ async def _move_file_to_revision(
respond: Respond,
) -> tuple[quart_response.Response, int] | response.Response:
try:
- description = "File move through web interface"
- moved_files_names: list[str] = []
- skipped_files_names: list[str] = []
-
- async with revision.create_and_manage(
- project_name, version_name, session.uid, description=description
- ) as creating:
- await _setup_revision(
- source_files_rel,
- target_dir_rel,
- creating,
- moved_files_names,
- skipped_files_names,
+ async with storage.write(session.uid) as write:
+ wacp = await write.as_project_committee_member(project_name)
+ creation_error, moved_files_names, skipped_files_names = await
wacp.release.move_file(
+ project_name, version_name, source_files_rel, target_dir_rel
)
- if creating.failed is not None:
- return await respond(409, str(creating.failed))
+ if creation_error is not None:
+ return await respond(409, creation_error)
response_messages = []
if moved_files_names:
@@ -329,18 +317,6 @@ async def _move_file_to_revision(
return await respond(500, f"ERROR: {e!s}")
-def _related_files(path: pathlib.Path) -> list[pathlib.Path]:
- base_path = path.with_suffix("") if (path.suffix in SPECIAL_SUFFIXES) else
path
- parent_dir = base_path.parent
- name_without_ext = base_path.name
- return [
- parent_dir / name_without_ext,
- parent_dir / f"{name_without_ext}.asc",
- parent_dir / f"{name_without_ext}.sha256",
- parent_dir / f"{name_without_ext}.sha512",
- ]
-
-
async def _remove_rc_tags(
session: routes.CommitterSession,
project_name: str,
@@ -446,87 +422,6 @@ async def _remove_rc_tags_revision_item(
return False, renamed_count_local
-async def _setup_revision(
- source_files_rel: list[pathlib.Path],
- target_dir_rel: pathlib.Path,
- creating: revision.Creating,
- moved_files_names: list[str],
- skipped_files_names: list[str],
-) -> None:
- target_path = creating.interim_path / target_dir_rel
- try:
- target_path.resolve().relative_to(creating.interim_path.resolve())
- except ValueError:
- # Path traversal detected
- raise revision.FailedError("Paths must be restricted to the release
directory")
-
- if not await aiofiles.os.path.exists(target_path):
- for part in target_path.parts:
- # TODO: This .prefix check could include some existing directory
segment
- if part.startswith("."):
- raise revision.FailedError("Segments must not start with '.'")
- if ".." in part:
- raise revision.FailedError("Segments must not contain '..'")
-
- try:
- # TODO: Move to the storage interface
- await aiofiles.os.makedirs(target_path)
- except OSError:
- raise revision.FailedError("Failed to create target directory")
- elif not await aiofiles.os.path.isdir(target_path):
- raise revision.FailedError("Target path is not a directory")
-
- for source_file_rel in source_files_rel:
- await _setup_revision_item(
- source_file_rel, target_dir_rel, creating, moved_files_names,
skipped_files_names, target_path
- )
-
-
-async def _setup_revision_item(
- source_file_rel: pathlib.Path,
- target_dir_rel: pathlib.Path,
- creating: revision.Creating,
- moved_files_names: list[str],
- skipped_files_names: list[str],
- target_path: pathlib.Path,
-) -> None:
- if source_file_rel.parent == target_dir_rel:
- skipped_files_names.append(source_file_rel.name)
- return
-
- full_source_item_path = creating.interim_path / source_file_rel
-
- if await aiofiles.os.path.isdir(full_source_item_path):
- if (target_dir_rel == source_file_rel) or (creating.interim_path /
target_dir_rel).resolve().is_relative_to(
- full_source_item_path.resolve()
- ):
- raise revision.FailedError("Cannot move a directory into itself or
a subdirectory of itself")
-
- final_target_for_item = target_path / source_file_rel.name
- if await aiofiles.os.path.exists(final_target_for_item):
- raise revision.FailedError("Target name already exists")
-
- # TODO: Move to the storage interface
- await aiofiles.os.rename(full_source_item_path, final_target_for_item)
- moved_files_names.append(source_file_rel.name)
- else:
- related_files = _related_files(source_file_rel)
- bundle = [f for f in related_files if await
aiofiles.os.path.exists(creating.interim_path / f)]
- for f_check in bundle:
- if await aiofiles.os.path.isdir(creating.interim_path / f_check):
- raise revision.FailedError("A related 'file' is actually a
directory")
-
- collisions = [f.name for f in bundle if await
aiofiles.os.path.exists(target_path / f.name)]
- if collisions:
- raise revision.FailedError("A related file already exists in the
target directory")
-
- for f in bundle:
- # TODO: Move to the storage interface
- await aiofiles.os.rename(creating.interim_path / f, target_path /
f.name)
- if f == source_file_rel:
- moved_files_names.append(f.name)
-
-
async def _sources_and_targets(latest_revision_dir: pathlib.Path) ->
tuple[list[pathlib.Path], set[pathlib.Path]]:
source_items_rel: list[pathlib.Path] = []
target_dirs: set[pathlib.Path] = {pathlib.Path(".")}
diff --git a/atr/storage/writers/release.py b/atr/storage/writers/release.py
index be0fb2f..f49c95c 100644
--- a/atr/storage/writers/release.py
+++ b/atr/storage/writers/release.py
@@ -22,7 +22,7 @@ import base64
import contextlib
import datetime
import pathlib
-from typing import TYPE_CHECKING
+from typing import TYPE_CHECKING, Final
import aiofiles.os
import aioshutil
@@ -40,6 +40,9 @@ if TYPE_CHECKING:
from collections.abc import AsyncGenerator
+SPECIAL_SUFFIXES: Final[frozenset[str]] = frozenset({".asc", ".sha256",
".sha512"})
+
+
class GeneralPublic:
def __init__(
self,
@@ -103,7 +106,6 @@ class CommitteeParticipant(FoundationCommitter):
raise revision.FailedError(f"Path '{dir_to_delete_rel}' is not
a directory.")
if await aiofiles.os.listdir(path_to_remove):
raise revision.FailedError(f"Directory '{dir_to_delete_rel}'
is not empty.")
- # TODO: Move to the storage interface
await aiofiles.os.rmdir(path_to_remove)
if creating.failed is not None:
return str(creating.failed)
@@ -162,6 +164,24 @@ class CommitteeParticipant(FoundationCommitter):
await self.__data.refresh(svn_import_task)
return svn_import_task
+ async def move_file(
+ self, project_name: str, version_name: str, source_files_rel:
list[pathlib.Path], target_dir_rel: pathlib.Path
+ ) -> tuple[str | None, list[str], list[str]]:
+ description = "File move through web interface"
+ moved_files_names: list[str] = []
+ skipped_files_names: list[str] = []
+
+ async with self.create_and_manage_revision(project_name, version_name,
description) as creating:
+ await self.__setup_revision(
+ source_files_rel,
+ target_dir_rel,
+ creating,
+ moved_files_names,
+ skipped_files_names,
+ )
+ creation_error = str(creating.failed) if (creating.failed is not None)
else None
+ return creation_error, moved_files_names, skipped_files_names
+
async def start(self, project_name: str, version: str) ->
tuple[sql.Release, sql.Project]:
"""Creates the initial release draft record and revision directory."""
# Get the project from the project name
@@ -238,6 +258,95 @@ class CommitteeParticipant(FoundationCommitter):
number=creating.new.number,
).demand(storage.AccessError("Revision not found"))
+ def __related_files(self, path: pathlib.Path) -> list[pathlib.Path]:
+ base_path = path.with_suffix("") if (path.suffix in SPECIAL_SUFFIXES)
else path
+ parent_dir = base_path.parent
+ name_without_ext = base_path.name
+ return [
+ parent_dir / name_without_ext,
+ parent_dir / f"{name_without_ext}.asc",
+ parent_dir / f"{name_without_ext}.sha256",
+ parent_dir / f"{name_without_ext}.sha512",
+ ]
+
+ async def __setup_revision(
+ self,
+ source_files_rel: list[pathlib.Path],
+ target_dir_rel: pathlib.Path,
+ creating: revision.Creating,
+ moved_files_names: list[str],
+ skipped_files_names: list[str],
+ ) -> None:
+ target_path = creating.interim_path / target_dir_rel
+ try:
+ target_path.resolve().relative_to(creating.interim_path.resolve())
+ except ValueError:
+ # Path traversal detected
+ raise revision.FailedError("Paths must be restricted to the
release directory")
+
+ if not await aiofiles.os.path.exists(target_path):
+ for part in target_path.parts:
+ # TODO: This .prefix check could include some existing
directory segment
+ if part.startswith("."):
+ raise revision.FailedError("Segments must not start with
'.'")
+ if ".." in part:
+ raise revision.FailedError("Segments must not contain
'..'")
+
+ try:
+ await aiofiles.os.makedirs(target_path)
+ except OSError:
+ raise revision.FailedError("Failed to create target directory")
+ elif not await aiofiles.os.path.isdir(target_path):
+ raise revision.FailedError("Target path is not a directory")
+
+ for source_file_rel in source_files_rel:
+ await self.__setup_revision_item(
+ source_file_rel, target_dir_rel, creating, moved_files_names,
skipped_files_names, target_path
+ )
+
+ async def __setup_revision_item(
+ self,
+ source_file_rel: pathlib.Path,
+ target_dir_rel: pathlib.Path,
+ creating: revision.Creating,
+ moved_files_names: list[str],
+ skipped_files_names: list[str],
+ target_path: pathlib.Path,
+ ) -> None:
+ if source_file_rel.parent == target_dir_rel:
+ skipped_files_names.append(source_file_rel.name)
+ return
+
+ full_source_item_path = creating.interim_path / source_file_rel
+
+ if await aiofiles.os.path.isdir(full_source_item_path):
+ if (target_dir_rel == source_file_rel) or (creating.interim_path /
target_dir_rel).resolve().is_relative_to(
+ full_source_item_path.resolve()
+ ):
+ raise revision.FailedError("Cannot move a directory into
itself or a subdirectory of itself")
+
+ final_target_for_item = target_path / source_file_rel.name
+ if await aiofiles.os.path.exists(final_target_for_item):
+ raise revision.FailedError("Target name already exists")
+
+ await aiofiles.os.rename(full_source_item_path,
final_target_for_item)
+ moved_files_names.append(source_file_rel.name)
+ else:
+ related_files = self.__related_files(source_file_rel)
+ bundle = [f for f in related_files if await
aiofiles.os.path.exists(creating.interim_path / f)]
+ for f_check in bundle:
+ if await aiofiles.os.path.isdir(creating.interim_path /
f_check):
+ raise revision.FailedError("A related 'file' is actually a
directory")
+
+ collisions = [f.name for f in bundle if await
aiofiles.os.path.exists(target_path / f.name)]
+ if collisions:
+ raise revision.FailedError("A related file already exists in
the target directory")
+
+ for f in bundle:
+ await aiofiles.os.rename(creating.interim_path / f,
target_path / f.name)
+ if f == source_file_rel:
+ moved_files_names.append(f.name)
+
class CommitteeMember(CommitteeParticipant):
def __init__(
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]