This is an automated email from the ASF dual-hosted git repository.

sbp pushed a commit to branch sbp
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-releases.git


The following commit(s) were added to refs/heads/sbp by this push:
     new b576d354 Migrate revision creators that add new files
b576d354 is described below

commit b576d354a6db924296b776ece565348c978185c4
Author: Sean B. Palmer <[email protected]>
AuthorDate: Wed Feb 18 15:35:50 2026 +0000

    Migrate revision creators that add new files
---
 atr/post/upload.py             | 26 ++++++++++++++++----------
 atr/ssh.py                     | 31 +++++++++++++++++--------------
 atr/storage/writers/release.py | 30 ++++++++++++++++++++----------
 atr/tasks/svn.py               | 27 +++++++++++++--------------
 4 files changed, 66 insertions(+), 48 deletions(-)

diff --git a/atr/post/upload.py b/atr/post/upload.py
index c13548e8..9d6edf42 100644
--- a/atr/post/upload.py
+++ b/atr/post/upload.py
@@ -31,8 +31,10 @@ import atr.db as db
 import atr.form as form
 import atr.get as get
 import atr.log as log
+import atr.models.sql as sql
 import atr.shared as shared
 import atr.storage as storage
+import atr.storage.types as types
 import atr.util as util
 import atr.web as web
 
@@ -68,28 +70,32 @@ async def finalise(
             number_of_files = len(staged_files)
             description = f"Upload of {util.plural(number_of_files, 'file')} 
through web interface"
 
-            async with wacp.release.create_and_manage_revision(project_name, 
version_name, description) as creating:
+            async def modify(path: pathlib.Path, _old_rev: sql.Revision | 
None) -> None:
                 for filename in staged_files:
                     src = staging_dir / filename
-                    dst = creating.interim_path / filename
+                    dst = path / filename
                     await aioshutil.move(str(src), str(dst))
 
-        await aioshutil.rmtree(staging_dir)
-
-        if creating.failed is not None:
-            await quart.flash(str(creating.failed), "error")
-            return await session.redirect(
-                get.upload.selected,
-                project_name=project_name,
-                version_name=version_name,
+            await wacp.revision.create_revision(
+                project_name, version_name, session.uid, 
description=description, modify=modify
             )
 
+        await aioshutil.rmtree(staging_dir)
+
         return await session.redirect(
             get.compose.selected,
             success=f"{util.plural(number_of_files, 'file')} added 
successfully",
             project_name=project_name,
             version_name=version_name,
         )
+    except types.FailedError as e:
+        await aioshutil.rmtree(staging_dir)
+        await quart.flash(str(e), "error")
+        return await session.redirect(
+            get.upload.selected,
+            project_name=project_name,
+            version_name=version_name,
+        )
     except Exception as e:
         log.exception("Error finalising upload:")
         return _json_error(f"Error finalising upload: {e!s}", 500)
diff --git a/atr/ssh.py b/atr/ssh.py
index b04031a7..78876cba 100644
--- a/atr/ssh.py
+++ b/atr/ssh.py
@@ -22,6 +22,7 @@ import asyncio.subprocess
 import datetime
 import glob
 import os
+import pathlib
 import stat
 import string
 import time
@@ -564,22 +565,21 @@ async def _step_07b_process_validated_rsync_write(
     description = "File synchronisation through ssh, using rsync"
     async with storage.write(asf_uid) as write:
         wacp = await write.as_project_committee_participant(project_name)
-        async with wacp.revision.create_and_manage(
-            project_name, version_name, asf_uid, description=description
-        ) as creating:
-            # Uses new_revision_number for logging only
-            if creating.old is not None:
-                log.info(f"Using old revision {creating.old.number} and 
interim path {creating.interim_path}")
+
+        async def modify(path: pathlib.Path, old_rev: sql.Revision | None) -> 
None:
+            nonlocal exit_status
+            if old_rev is not None:
+                log.info(f"Using old revision {old_rev.number} and interim 
path {path}")
             # Update the rsync command path to the new revision directory
-            argv[-1] = str(creating.interim_path)
+            argv[-1] = str(path)
 
             ###################################################
             ### Calls _step_08_execute_rsync_upload_command ###
             ###################################################
             exit_status = await _step_08_execute_rsync(process, argv)
             if exit_status != 0:
-                if creating.old is not None:
-                    for_revision = f"successor of revision 
{creating.old.number}"
+                if old_rev is not None:
+                    for_revision = f"successor of revision {old_rev.number}"
                 else:
                     for_revision = f"initial revision for release 
{release_name}"
                 log.error(
@@ -588,20 +588,23 @@ async def _step_07b_process_validated_rsync_write(
                 )
                 raise types.FailedError(f"rsync upload failed with exit status 
{exit_status} for {for_revision}")
 
-        if creating.new is not None:
+        try:
+            new_revision = await wacp.revision.create_revision(
+                project_name, version_name, asf_uid, description=description, 
modify=modify
+            )
             github_payload = server._get_github_payload(process)
             if github_payload is not None:
                 await attestable.github_tp_payload_write(
-                    project_name, version_name, creating.new.number, 
github_payload
+                    project_name, version_name, new_revision.number, 
github_payload
                 )
-            log.info(f"rsync upload successful for revision 
{creating.new.number}")
+            log.info(f"rsync upload successful for revision 
{new_revision.number}")
             host = config.get().APP_HOST
-            message = f"\nATR: Created revision {creating.new.number} of 
{project_name} {version_name}\n"
+            message = f"\nATR: Created revision {new_revision.number} of 
{project_name} {version_name}\n"
             message += f"ATR: 
https://{host}/compose/{project_name}/{version_name}\n";
             if not process.stderr.is_closing():
                 process.stderr.write(message.encode())
                 await process.stderr.drain()
-        else:
+        except types.FailedError:
             log.info(f"rsync upload unsuccessful for release {release_name}")
 
         # If we got here, there was no exception
diff --git a/atr/storage/writers/release.py b/atr/storage/writers/release.py
index 72d9f3b4..31a78695 100644
--- a/atr/storage/writers/release.py
+++ b/atr/storage/writers/release.py
@@ -461,20 +461,23 @@ class CommitteeParticipant(FoundationCommitter):
         if validated_path is None:
             raise storage.AccessError("Invalid file path")
         description = f"Upload via API: {validated_path}"
-        async with self.create_and_manage_revision(args.project, args.version, 
description) as creating:
-            target_path = creating.interim_path / validated_path
+
+        async def modify(path: pathlib.Path, _old_rev: sql.Revision | None) -> 
None:
+            target_path = path / validated_path
             await aiofiles.os.makedirs(target_path.parent, exist_ok=True)
-            if target_path.exists():
+            if await aiofiles.os.path.exists(target_path):
                 raise storage.AccessError("File already exists")
             async with aiofiles.open(target_path, "wb") as f:
                 await f.write(file_bytes)
-        if creating.new is None:
-            raise storage.AccessError("Failed to create revision")
+
+        revision = await self.__write_as.revision.create_revision(
+            args.project, args.version, self.__asf_uid, 
description=description, modify=modify
+        )
         async with db.session() as data:
             release_name = sql.release_name(args.project, args.version)
             return await data.revision(
                 release_name=release_name,
-                number=creating.new.number,
+                number=revision.number,
             ).demand(storage.AccessError("Revision not found"))
 
     async def upload_files(
@@ -487,7 +490,8 @@ class CommitteeParticipant(FoundationCommitter):
         """Process and save the uploaded files into a new draft revision."""
         number_of_files = len(files)
         description = f"Upload of {util.plural(number_of_files, 'file')} 
through web interface"
-        async with self.create_and_manage_revision(project_name, version_name, 
description) as creating:
+
+        async def modify(path: pathlib.Path, _old_rev: sql.Revision | None) -> 
None:
             # Save each uploaded file to the new revision directory
             for file in files:
                 # Determine the target path within the new revision directory
@@ -504,12 +508,18 @@ class CommitteeParticipant(FoundationCommitter):
                     relative_file_path = file_name
 
                 # Construct path inside the new revision directory
-                target_path = creating.interim_path / relative_file_path
+                target_path = path / relative_file_path
                 # Ensure parent directories exist within the new revision
                 await aiofiles.os.makedirs(target_path.parent, exist_ok=True)
                 await self.__save_file(file, target_path)
-        creation_error = str(creating.failed) if (creating.failed is not None) 
else None
-        return creation_error, len(files)
+
+        try:
+            await self.__write_as.revision.create_revision(
+                project_name, version_name, self.__asf_uid, 
description=description, modify=modify
+            )
+        except types.FailedError as e:
+            return str(e), len(files)
+        return None, len(files)
 
     async def __current_paths(self, interim_path: pathlib.Path) -> 
list[pathlib.Path]:
         all_current_paths_interim: list[pathlib.Path] = []
diff --git a/atr/tasks/svn.py b/atr/tasks/svn.py
index 87f532a0..f8ec1bab 100644
--- a/atr/tasks/svn.py
+++ b/atr/tasks/svn.py
@@ -25,6 +25,7 @@ import aioshutil
 import atr.log as log
 import atr.models.results as results
 import atr.models.schema as schema
+import atr.models.sql as sql
 import atr.storage as storage
 import atr.tasks.checks as checks
 
@@ -74,24 +75,21 @@ async def _import_files_core(args: SvnImport) -> str:
     description = "Import of files from subversion"
     async with storage.write(args.asf_uid) as write:
         wacp = await write.as_project_committee_participant(args.project_name)
-        async with wacp.revision.create_and_manage(
-            args.project_name, args.version_name, args.asf_uid, 
description=description
-        ) as creating:
-            # Uses creating.new after this block
-            log.debug(f"Created revision directory: {creating.interim_path}")
 
-            final_target_path = creating.interim_path
+        async def modify(path: pathlib.Path, _old_rev: sql.Revision | None) -> 
None:
+            log.debug(f"Created revision directory: {path}")
+
+            final_target_path = path
             if args.target_subdirectory:
-                final_target_path = creating.interim_path / 
args.target_subdirectory
+                final_target_path = path / args.target_subdirectory
                 # Validate that final_target_path is a subdirectory of 
new_revision_dir
-                if not final_target_path.is_relative_to(creating.interim_path):
+                if not final_target_path.is_relative_to(path):
                     raise SvnImportError(
-                        f"Target subdirectory {args.target_subdirectory}"
-                        f" is not a subdirectory of {creating.interim_path}"
+                        f"Target subdirectory {args.target_subdirectory} is 
not a subdirectory of {path}"
                     )
                 await aiofiles.os.makedirs(final_target_path, exist_ok=True)
 
-            temp_export_path = creating.interim_path / temp_export_dir_name
+            temp_export_path = path / temp_export_dir_name
 
             svn_command = [
                 "svn",
@@ -123,9 +121,10 @@ async def _import_files_core(args: SvnImport) -> str:
             await aiofiles.os.rmdir(temp_export_path)
             log.info(f"Removed temporary export directory: {temp_export_path}")
 
-        if creating.new is None:
-            raise SvnImportError("Internal error: New revision not found")
-        return f"Successfully imported files from SVN into revision 
{creating.new.number}"
+        new_revision = await wacp.revision.create_revision(
+            args.project_name, args.version_name, args.asf_uid, 
description=description, modify=modify
+        )
+        return f"Successfully imported files from SVN into revision 
{new_revision.number}"
 
 
 async def _import_files_core_run_svn_export(svn_command: list[str], 
temp_export_path: pathlib.Path) -> None:


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to