This is an automated email from the ASF dual-hosted git repository.

sbp pushed a commit to branch sbp
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-releases.git


The following commit(s) were added to refs/heads/sbp by this push:
     new ca216273 Constrain allowed phases when creating a revision
ca216273 is described below

commit ca216273282c090afca95e686be03d26064eb2d4
Author: Sean B. Palmer <[email protected]>
AuthorDate: Wed Apr 1 20:02:56 2026 +0100

    Constrain allowed phases when creating a revision
---
 atr/get/test.py                               |  48 ++++++------
 atr/post/draft.py                             |  86 ++++++++++++----------
 atr/post/revisions.py                         |   7 +-
 atr/post/upload.py                            |   7 +-
 atr/ssh.py                                    |   7 +-
 atr/storage/types.py                          |   4 +
 atr/storage/writers/keys.py                   |   7 +-
 atr/storage/writers/release.py                |  55 ++++++++++++--
 atr/storage/writers/revision.py               |  14 ++++
 atr/storage/writers/vote.py                   |  12 ++-
 atr/tasks/sbom.py                             |  50 ++++++++-----
 atr/tasks/svn.py                              |   7 +-
 tests/unit/test_create_revision.py            |  28 +++++--
 tests/unit/test_create_revision_quarantine.py | 101 +++++++++++++++++++++++++-
 14 files changed, 329 insertions(+), 104 deletions(-)

diff --git a/atr/get/test.py b/atr/get/test.py
index 3c3a5346..d030478f 100644
--- a/atr/get/test.py
+++ b/atr/get/test.py
@@ -111,29 +111,6 @@ async def test_login_banned(
     return await web.redirect(root.index)
 
 
[email protected]
-async def test_recheck_session(
-    _session: web.Public, _test_recheck_session: 
Literal["test/recheck-session"]
-) -> web.WerkzeugResponse:
-    """
-    URL: /test/recheck-session
-
-    Reset the last_account_check to epoch so the next request triggers a 
re-check.
-    """
-    if not config.get().ALLOW_TESTS:
-        return quart.abort(404)
-
-    import asfquart.session as asfquart_session
-
-    existing = await asfquart_session.read()
-    if existing is None:
-        raise base.ASFQuartException("No session to recheck", errorcode=400)
-
-    existing["metadata"]["last_account_check"] = 0
-    asfquart_session.write(existing)
-    return await web.redirect(root.index)
-
-
 @get.typed
 async def test_merge(
     session: web.Committer,
@@ -165,6 +142,7 @@ async def test_merge(
                     project_key,
                     version_key,
                     session.uid,
+                    
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
                     description="Test merge: prior revision",
                     modify=modify_prior,
                 )
@@ -173,6 +151,7 @@ async def test_merge(
             project_key,
             version_key,
             session.uid,
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
             description="Test merge: new revision",
             modify=modify_new,
         )
@@ -219,6 +198,29 @@ async def test_multiple(_session: web.Public, 
_test_multiple: Literal["test/mult
     return await template.blank(title="Test multiple forms", 
content=forms_html)
 
 
[email protected]
+async def test_recheck_session(
+    _session: web.Public, _test_recheck_session: 
Literal["test/recheck-session"]
+) -> web.WerkzeugResponse:
+    """
+    URL: /test/recheck-session
+
+    Reset the last_account_check to epoch so the next request triggers a 
re-check.
+    """
+    if not config.get().ALLOW_TESTS:
+        return quart.abort(404)
+
+    import asfquart.session as asfquart_session
+
+    existing = await asfquart_session.read()
+    if existing is None:
+        raise base.ASFQuartException("No session to recheck", errorcode=400)
+
+    existing["metadata"]["last_account_check"] = 0
+    asfquart_session.write(existing)
+    return await web.redirect(root.index)
+
+
 @get.typed
 async def test_single(session: web.Public, _test_single: 
Literal["test/single"]) -> str:
     """
diff --git a/atr/post/draft.py b/atr/post/draft.py
index ebfd9ec3..020a4987 100644
--- a/atr/post/draft.py
+++ b/atr/post/draft.py
@@ -61,6 +61,7 @@ async def cache_reset(
             project_key,
             version_key,
             session.uid,
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
             description=description,
             reset_to_global_cache=True,
         )
@@ -215,6 +216,7 @@ async def recheck(
             project_key,
             version_key,
             session.uid,
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
             description=description,
             set_local_cache=True,
         )
@@ -231,39 +233,32 @@ async def recheck(
 
 
 @post.typed
-async def sbomgen(
+async def sbomconvert(
     session: web.Committer,
-    _draft_sbomgen: Literal["draft/sbomgen"],
+    _draft_sbomconvert: Literal["draft/sbomconvert"],
     project_key: safe.ProjectKey,
     version_key: safe.VersionKey,
     file_path: safe.RelPath,
     empty_form: form.Empty,
 ) -> web.WerkzeugResponse:
     """
-    URL: /draft/sbomgen/<project_key>/<version_key>/<file_path>
-    Generate a CycloneDX SBOM file for a candidate draft file, creating a new 
revision.
+    URL: /draft/sbomconvert/<project_key>/<version_key>/<file_path>
+    Convert an XML CycloneDX SBOM file into JSON, creating a new revision.
     """
     rel_path = file_path.as_path()
 
-    # Check that the file is a .tar.gz archive before creating a revision
-    if not (
-        rel_path.name.endswith(".tar.gz")
-        or rel_path.name.endswith(".tgz")
-        or rel_path.name.endswith(".zip")
-        or rel_path.name.endswith(".jar")
-    ):
-        raise base.ASFQuartException(
-            f"SBOM generation requires .tar.gz, .tgz, .zip or .jar files. 
Received: {rel_path.name}", errorcode=400
-        )
+    # Check that the file is a .cdx.xml file before continuing
+    if not rel_path.name.endswith(".cdx.xml"):
+        raise base.ASFQuartException(f"SBOM converter requires .cdx.xml file. 
Received: {rel_path.name}", errorcode=400)
 
     try:
-        description = "SBOM generation through web interface"
+        description = "SBOM conversion through web interface"
         async with storage.write(session) as write:
             wacp = await write.as_project_committee_participant(project_key)
 
             async def modify(path: pathlib.Path, old_rev: sql.Revision | None) 
-> None:
                 path_in_new_revision = path / rel_path
-                sbom_path_rel = rel_path.with_suffix(rel_path.suffix + 
".cdx.json").name
+                sbom_path_rel = rel_path.with_suffix(".cdx.json").name
                 sbom_path_in_new_revision = path / rel_path.parent / 
sbom_path_rel
 
                 # Check that the source file exists in the new revision
@@ -280,19 +275,24 @@ async def sbomgen(
                     raise web.FlashError("Internal error: Revision not found")
 
                 # Create and queue the task, using paths within the new 
revision
-                sbom_task = await wacp.sbom.generate_cyclonedx(
+                sbom_task = await wacp.sbom.convert_cyclonedx(
                     project_key,
                     version_key,
-                    old_rev.number,
-                    path_in_new_revision,
-                    sbom_path_in_new_revision,
+                    old_rev.safe_number,
+                    str(path_in_new_revision),
+                    str(sbom_path_in_new_revision),
                 )
                 success = await interaction.wait_for_task(sbom_task)
                 if not success:
-                    raise web.FlashError("Internal error: SBOM generation 
timed out")
+                    raise web.FlashError("Internal error: SBOM conversion 
timed out")
 
             result = await wacp.revision.create_revision_with_quarantine(
-                project_key, version_key, session.uid, 
description=description, modify=modify
+                project_key,
+                version_key,
+                session.uid,
+                
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+                description=description,
+                modify=modify,
             )
 
     except Exception as e:
@@ -312,32 +312,39 @@ async def sbomgen(
 
 
 @post.typed
-async def sbomconvert(
+async def sbomgen(
     session: web.Committer,
-    _draft_sbomconvert: Literal["draft/sbomconvert"],
+    _draft_sbomgen: Literal["draft/sbomgen"],
     project_key: safe.ProjectKey,
     version_key: safe.VersionKey,
     file_path: safe.RelPath,
     empty_form: form.Empty,
 ) -> web.WerkzeugResponse:
     """
-    URL: /draft/sbomconvert/<project_key>/<version_key>/<file_path>
-    Convert an XML CycloneDX SBOM file into JSON, creating a new revision.
+    URL: /draft/sbomgen/<project_key>/<version_key>/<file_path>
+    Generate a CycloneDX SBOM file for a candidate draft file, creating a new 
revision.
     """
     rel_path = file_path.as_path()
 
-    # Check that the file is a .cdx.xml file before continuing
-    if not rel_path.name.endswith(".cdx.xml"):
-        raise base.ASFQuartException(f"SBOM converter requires .cdx.xml file. 
Received: {rel_path.name}", errorcode=400)
+    # Check that the file is a .tar.gz archive before creating a revision
+    if not (
+        rel_path.name.endswith(".tar.gz")
+        or rel_path.name.endswith(".tgz")
+        or rel_path.name.endswith(".zip")
+        or rel_path.name.endswith(".jar")
+    ):
+        raise base.ASFQuartException(
+            f"SBOM generation requires .tar.gz, .tgz, .zip or .jar files. 
Received: {rel_path.name}", errorcode=400
+        )
 
     try:
-        description = "SBOM conversion through web interface"
+        description = "SBOM generation through web interface"
         async with storage.write(session) as write:
             wacp = await write.as_project_committee_participant(project_key)
 
             async def modify(path: pathlib.Path, old_rev: sql.Revision | None) 
-> None:
                 path_in_new_revision = path / rel_path
-                sbom_path_rel = rel_path.with_suffix(".cdx.json").name
+                sbom_path_rel = rel_path.with_suffix(rel_path.suffix + 
".cdx.json").name
                 sbom_path_in_new_revision = path / rel_path.parent / 
sbom_path_rel
 
                 # Check that the source file exists in the new revision
@@ -354,19 +361,24 @@ async def sbomconvert(
                     raise web.FlashError("Internal error: Revision not found")
 
                 # Create and queue the task, using paths within the new 
revision
-                sbom_task = await wacp.sbom.convert_cyclonedx(
+                sbom_task = await wacp.sbom.generate_cyclonedx(
                     project_key,
                     version_key,
-                    old_rev.safe_number,
-                    str(path_in_new_revision),
-                    str(sbom_path_in_new_revision),
+                    old_rev.number,
+                    path_in_new_revision,
+                    sbom_path_in_new_revision,
                 )
                 success = await interaction.wait_for_task(sbom_task)
                 if not success:
-                    raise web.FlashError("Internal error: SBOM conversion 
timed out")
+                    raise web.FlashError("Internal error: SBOM generation 
timed out")
 
             result = await wacp.revision.create_revision_with_quarantine(
-                project_key, version_key, session.uid, 
description=description, modify=modify
+                project_key,
+                version_key,
+                session.uid,
+                
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+                description=description,
+                modify=modify,
             )
 
     except Exception as e:
diff --git a/atr/post/revisions.py b/atr/post/revisions.py
index 2cd6ff7e..780a494c 100644
--- a/atr/post/revisions.py
+++ b/atr/post/revisions.py
@@ -74,7 +74,12 @@ async def _set_revision(
     async with storage.write(session) as write:
         wacp = await write.as_project_committee_participant(project_key)
         result = await wacp.revision.create_revision_with_quarantine(
-            project_key, version_key, session.uid, description=description, 
clone_from=selected_revision_number
+            project_key,
+            version_key,
+            session.uid,
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT, 
sql.ReleasePhase.RELEASE_PREVIEW}),
+            description=description,
+            clone_from=selected_revision_number,
         )
         if isinstance(result, sql.Quarantined):
             success = f"Revision copy from {selected_revision_number} 
received. Archive validation in progress."
diff --git a/atr/post/upload.py b/atr/post/upload.py
index 1b63e13e..7d71c35b 100644
--- a/atr/post/upload.py
+++ b/atr/post/upload.py
@@ -84,7 +84,12 @@ async def finalise(
                     await aioshutil.move(str(src), str(dst))
 
             result = await wacp.revision.create_revision_with_quarantine(
-                project_key, version_key, session.uid, 
description=description, modify=modify
+                project_key,
+                version_key,
+                session.uid,
+                
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+                description=description,
+                modify=modify,
             )
 
         await aioshutil.rmtree(staging_dir)
diff --git a/atr/ssh.py b/atr/ssh.py
index 0472d587..e596769b 100644
--- a/atr/ssh.py
+++ b/atr/ssh.py
@@ -593,7 +593,12 @@ async def _step_07b_process_validated_rsync_write(
 
         try:
             result = await wacp.revision.create_revision_with_quarantine(
-                project_key, version_key, asf_uid, description=description, 
modify=modify
+                project_key,
+                version_key,
+                asf_uid,
+                
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+                description=description,
+                modify=modify,
             )
             if isinstance(result, sql.Quarantined):
                 log.info(f"rsync upload quarantined for release {release_key}")
diff --git a/atr/storage/types.py b/atr/storage/types.py
index c78ec358..8dbb5e56 100644
--- a/atr/storage/types.py
+++ b/atr/storage/types.py
@@ -122,3 +122,7 @@ class PublicKeyError(Exception):
 
 class FailedError(Exception):
     pass
+
+
+class PhaseMismatchError(FailedError):
+    pass
diff --git a/atr/storage/writers/keys.py b/atr/storage/writers/keys.py
index f7e27315..1caca975 100644
--- a/atr/storage/writers/keys.py
+++ b/atr/storage/writers/keys.py
@@ -494,7 +494,12 @@ class CommitteeParticipant(FoundationCommitter):
                 await aiofiles.os.remove(path_in_new_revision)
 
             await self.__write_as.revision.create_revision_with_quarantine(
-                project_key, version_key, self.__asf_uid, 
description=description, modify=modify
+                project_key,
+                version_key,
+                self.__asf_uid,
+                
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+                description=description,
+                modify=modify,
             )
         return outcomes
 
diff --git a/atr/storage/writers/release.py b/atr/storage/writers/release.py
index eaebfe8a..4668b6c7 100644
--- a/atr/storage/writers/release.py
+++ b/atr/storage/writers/release.py
@@ -183,7 +183,12 @@ class CommitteeParticipant(FoundationCommitter):
 
         try:
             await self.__write_as.revision.create_revision_with_quarantine(
-                project_key, version_key, self.__asf_uid, 
description=description, modify=modify
+                project_key,
+                version_key,
+                self.__asf_uid,
+                allowed_phases=frozenset({sql.ReleasePhase.RELEASE_PREVIEW}),
+                description=description,
+                modify=modify,
             )
         except types.FailedError as e:
             return str(e)
@@ -225,7 +230,12 @@ class CommitteeParticipant(FoundationCommitter):
             await aiofiles.os.remove(path_in_new_revision)
 
         await self.__write_as.revision.create_revision_with_quarantine(
-            project_key, version, self.__asf_uid, description=description, 
modify=modify
+            project_key,
+            version,
+            self.__asf_uid,
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+            description=description,
+            modify=modify,
         )
         return metadata_files_deleted
 
@@ -266,7 +276,12 @@ class CommitteeParticipant(FoundationCommitter):
                 await f.write(f"{hash_value}  {rel_path.name}\n")
 
         await self.__write_as.revision.create_revision_with_quarantine(
-            project_key, version_key, self.__asf_uid, description=description, 
modify=modify
+            project_key,
+            version_key,
+            self.__asf_uid,
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+            description=description,
+            modify=modify,
         )
 
     async def import_from_svn(
@@ -321,7 +336,12 @@ class CommitteeParticipant(FoundationCommitter):
 
         try:
             await self.__write_as.revision.create_revision_with_quarantine(
-                project_key, version_key, self.__asf_uid, 
description=description, modify=modify
+                project_key,
+                version_key,
+                self.__asf_uid,
+                
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+                description=description,
+                modify=modify,
             )
         except types.FailedError as e:
             return str(e), moved_files_names, skipped_files_names
@@ -405,7 +425,12 @@ class CommitteeParticipant(FoundationCommitter):
 
         try:
             await self.__write_as.revision.create_revision_with_quarantine(
-                project_key, version_key, self.__asf_uid, 
description=description, modify=modify
+                project_key,
+                version_key,
+                self.__asf_uid,
+                allowed_phases=frozenset({sql.ReleasePhase.RELEASE_PREVIEW}),
+                description=description,
+                modify=modify,
             )
         except types.FailedError as e:
             return str(e), renamed_count, error_messages
@@ -473,7 +498,11 @@ class CommitteeParticipant(FoundationCommitter):
 
         description = "Creation of empty release candidate draft through web 
interface"
         await self.__write_as.revision.create_revision_with_quarantine(
-            project_key, version, self.__asf_uid, description=description
+            project_key,
+            version,
+            self.__asf_uid,
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+            description=description,
         )
         self.__write_as.append_to_audit_log(
             asf_uid=self.__asf_uid,
@@ -497,7 +526,12 @@ class CommitteeParticipant(FoundationCommitter):
                 await f.write(file_bytes)
 
         result = await 
self.__write_as.revision.create_revision_with_quarantine(
-            args.project, args.version, self.__asf_uid, 
description=description, modify=modify
+            args.project,
+            args.version,
+            self.__asf_uid,
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+            description=description,
+            modify=modify,
         )
         if isinstance(result, sql.Quarantined):
             return result
@@ -529,7 +563,12 @@ class CommitteeParticipant(FoundationCommitter):
 
         try:
             result = await 
self.__write_as.revision.create_revision_with_quarantine(
-                project_key, version_key, self.__asf_uid, 
description=description, modify=modify
+                project_key,
+                version_key,
+                self.__asf_uid,
+                
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+                description=description,
+                modify=modify,
             )
         except types.FailedError as e:
             return str(e), len(files), False
diff --git a/atr/storage/writers/revision.py b/atr/storage/writers/revision.py
index 51809f7c..3cc352df 100644
--- a/atr/storage/writers/revision.py
+++ b/atr/storage/writers/revision.py
@@ -373,6 +373,8 @@ class CommitteeParticipant(FoundationCommitter):
         project_key: safe.ProjectKey,
         version_key: safe.VersionKey,
         asf_uid: str,
+        *,
+        allowed_phases: frozenset[sql.ReleasePhase],
         description: str | None = None,
         set_local_cache: bool = False,
         reset_to_global_cache: bool = False,
@@ -385,6 +387,11 @@ class CommitteeParticipant(FoundationCommitter):
             release = await data.release(key=release_key, 
_release_policy=True, _project_release_policy=True).demand(
                 RuntimeError("Release does not exist for new revision 
creation")
             )
+            if release.phase not in allowed_phases:
+                raise types.PhaseMismatchError(
+                    f"Cannot create revision: release phase is 
{release.phase.value}, "
+                    f"allowed: {', '.join(sorted(p.value for p in 
allowed_phases))}"
+                )
             if clone_from is not None:
                 old_revision = await data.revision(release_key=release_key, 
number=str(clone_from)).demand(
                     RuntimeError(f"Revision {clone_from} does not exist")
@@ -483,6 +490,13 @@ class CommitteeParticipant(FoundationCommitter):
                 await aioshutil.rmtree(temp_dir)
                 raise
 
+            if merged_release.phase not in allowed_phases:
+                await aioshutil.rmtree(temp_dir)
+                raise types.PhaseMismatchError(
+                    f"Cannot create revision: release phase is 
{merged_release.phase.value}, "
+                    f"allowed: {', '.join(sorted(p.value for p in 
allowed_phases))}"
+                )
+
             if set_local_cache:
                 merged_release.check_cache_key = str(uuid.uuid4())
             if reset_to_global_cache:
diff --git a/atr/storage/writers/vote.py b/atr/storage/writers/vote.py
index 0c6aed3e..fd21ff41 100644
--- a/atr/storage/writers/vote.py
+++ b/atr/storage/writers/vote.py
@@ -320,7 +320,11 @@ class CommitteeMember(CommitteeParticipant):
 
                 description = "Create a preview revision from the last 
candidate draft"
                 await self.__write_as.revision.create_revision_with_quarantine(
-                    project_key, release.safe_version_key, self.__asf_uid, 
description=description
+                    project_key,
+                    release.safe_version_key,
+                    self.__asf_uid,
+                    
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_PREVIEW}),
+                    description=description,
                 )
             case "failed" | "cancelled":
                 release.phase = sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT
@@ -411,7 +415,11 @@ class CommitteeMember(CommitteeParticipant):
 
             description = "Create a preview revision from the last candidate 
draft"
             await self.__write_as.revision.create_revision_with_quarantine(
-                project_key, release.safe_version_key, self.__asf_uid, 
description=description
+                project_key,
+                release.safe_version_key,
+                self.__asf_uid,
+                allowed_phases=frozenset({sql.ReleasePhase.RELEASE_PREVIEW}),
+                description=description,
             )
             if (voting_round == 2) and (release.podling_thread_id is not None):
                 round_one_email_address, round_one_message_id = await 
util.email_mid_from_thread_id(
diff --git a/atr/tasks/sbom.py b/atr/tasks/sbom.py
index 83b5e70e..79f4e502 100644
--- a/atr/tasks/sbom.py
+++ b/atr/tasks/sbom.py
@@ -134,7 +134,12 @@ async def augment(args: FileArgs) -> results.Results | 
None:
                     await f.write(merged.dumps())
 
             await wacp.revision.create_revision_with_quarantine(
-                args.project_key, args.version_key, args.asf_uid or "unknown", 
description=description, modify=modify
+                args.project_key,
+                args.version_key,
+                args.asf_uid or "unknown",
+                
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+                description=description,
+                modify=modify,
             )
 
     return results.SBOMAugment(
@@ -223,7 +228,12 @@ async def osv_scan(args: FileArgs) -> results.Results | 
None:
                 await f.write(merged.dumps())
 
         await wacp.revision.create_revision_with_quarantine(
-            args.project_key, args.version_key, args.asf_uid or "unknown", 
description=description, modify=modify
+            args.project_key,
+            args.version_key,
+            args.asf_uid or "unknown",
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+            description=description,
+            modify=modify,
         )
 
     return results.SBOMOSVScan(
@@ -343,24 +353,6 @@ async def score_tool(args: ScoreArgs) -> results.Results | 
None:
     )
 
 
-def _extracted_dir(temp_dir: str) -> str | None:
-    # Loop through all the dirs in temp_dir
-    extract_dir = None
-    log.info(f"Checking directories in {temp_dir}: {os.listdir(temp_dir)}")
-    for dir_name in os.listdir(temp_dir):
-        if dir_name.startswith("."):
-            continue
-        dir_path = os.path.join(temp_dir, dir_name)
-        if os.path.isdir(dir_path):
-            if extract_dir is None:
-                extract_dir = dir_path
-            else:
-                return temp_dir
-    if extract_dir is None:
-        extract_dir = temp_dir
-    return extract_dir
-
-
 async def _convert_cyclonedx_core(artifact_path: str, output_path: str, 
revision_str: str) -> dict[str, Any]:
     """Core logic to convert XML CycloneDX SBOM to JSON."""
     log.info(f"Generating CycloneDX JSON SBOM for {artifact_path} -> 
{output_path}")
@@ -389,6 +381,24 @@ async def _convert_cyclonedx_core(artifact_path: str, 
output_path: str, revision
     }
 
 
+def _extracted_dir(temp_dir: str) -> str | None:
+    # Loop through all the dirs in temp_dir
+    extract_dir = None
+    log.info(f"Checking directories in {temp_dir}: {os.listdir(temp_dir)}")
+    for dir_name in os.listdir(temp_dir):
+        if dir_name.startswith("."):
+            continue
+        dir_path = os.path.join(temp_dir, dir_name)
+        if os.path.isdir(dir_path):
+            if extract_dir is None:
+                extract_dir = dir_path
+            else:
+                return temp_dir
+    if extract_dir is None:
+        extract_dir = temp_dir
+    return extract_dir
+
+
 async def _generate_cyclonedx_core(artifact_path: str, output_path: str) -> 
dict[str, Any]:
     """Core logic to generate CycloneDX SBOM on failure."""
     log.info(f"Generating CycloneDX SBOM for {artifact_path} -> {output_path}")
diff --git a/atr/tasks/svn.py b/atr/tasks/svn.py
index acd67a25..829a31ef 100644
--- a/atr/tasks/svn.py
+++ b/atr/tasks/svn.py
@@ -131,7 +131,12 @@ async def _import_files_core(args: SvnImport) -> str:
             log.info(f"Removed temporary export directory: {temp_export_path}")
 
         result = await wacp.revision.create_revision_with_quarantine(
-            args.project_key, args.version_key, args.asf_uid, 
description=description, modify=modify
+            args.project_key,
+            args.version_key,
+            args.asf_uid,
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+            description=description,
+            modify=modify,
         )
         if isinstance(result, sql.Quarantined):
             log.info(f"SVN import quarantined for {project_str}-{version_str}")
diff --git a/tests/unit/test_create_revision.py 
b/tests/unit/test_create_revision.py
index a377baa7..f852aadc 100644
--- a/tests/unit/test_create_revision.py
+++ b/tests/unit/test_create_revision.py
@@ -159,7 +159,11 @@ async def 
test_clone_from_older_revision_skips_merge_without_intervening_change(
         mock.patch.object(revision.paths, "release_directory_base", 
return_value=tmp_path / "releases"),
     ):
         await participant.create_revision_with_quarantine(
-            safe.ProjectKey("proj"), safe.VersionKey("1.0"), "test", 
clone_from=safe.RevisionNumber("00002")
+            safe.ProjectKey("proj"),
+            safe.VersionKey("1.0"),
+            "test",
+            allowed_phases=frozenset({sql.ReleasePhase.RELEASE_PREVIEW}),
+            clone_from=safe.RevisionNumber("00002"),
         )
 
     if merge_mock.called:
@@ -251,7 +255,10 @@ async def 
test_intervening_revision_triggers_merge_and_uses_latest_parent(tmp_pa
         mock.patch.object(revision.paths, "release_directory_base", 
return_value=tmp_path / "releases"),
     ):
         created_revision = await participant.create_revision_with_quarantine(
-            safe.ProjectKey("proj"), safe.VersionKey("1.0"), "test"
+            safe.ProjectKey("proj"),
+            safe.VersionKey("1.0"),
+            "test",
+            allowed_phases=frozenset({sql.ReleasePhase.RELEASE_PREVIEW}),
         )
 
     assert isinstance(created_revision, FakeRevision)
@@ -273,7 +280,9 @@ async def 
test_modify_failed_error_propagates_and_cleans_up(tmp_path: pathlib.Pa
         (path / "file.txt").write_text("Should be cleaned up.")
         raise types.FailedError("Intentional error")
 
-    mock_session = _mock_db_session(mock.MagicMock())
+    release = mock.MagicMock()
+    release.phase = sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT
+    mock_session = _mock_db_session(release)
     participant = _make_participant()
 
     with (
@@ -283,7 +292,11 @@ async def 
test_modify_failed_error_propagates_and_cleans_up(tmp_path: pathlib.Pa
     ):
         with pytest.raises(types.FailedError, match="Intentional error"):
             await participant.create_revision_with_quarantine(
-                safe.ProjectKey("proj"), safe.VersionKey("1.0"), "test", 
modify=modify
+                safe.ProjectKey("proj"),
+                safe.VersionKey("1.0"),
+                "test",
+                
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+                modify=modify,
             )
 
     assert isinstance(received_args["path"], pathlib.Path)
@@ -349,7 +362,12 @@ async def 
test_v1_previous_attestable_suppresses_file_state_rows(tmp_path: pathl
     with contextlib.ExitStack() as stack:
         for patch in patches:
             stack.enter_context(patch)
-        await participant.create_revision_with_quarantine("proj", "1.0", 
"test")
+        await participant.create_revision_with_quarantine(
+            "proj",
+            "1.0",
+            "test",
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+        )
 
     added_objects = [call.args[0] for call in safe_data.add.call_args_list]
     file_state_rows = [obj for obj in added_objects if isinstance(obj, 
sql.ReleaseFileState)]
diff --git a/tests/unit/test_create_revision_quarantine.py 
b/tests/unit/test_create_revision_quarantine.py
index 91aae5dc..04940179 100644
--- a/tests/unit/test_create_revision_quarantine.py
+++ b/tests/unit/test_create_revision_quarantine.py
@@ -23,6 +23,7 @@ from typing import Final
 import pytest
 
 import atr.models.sql as sql
+import atr.storage.types as types
 import atr.storage.writers.revision as revision
 
 _QUARANTINE_TOKEN_ALPHABET: Final[str] = "qpzry9x8gf2tvdw0s3jn54khce6mua7b"
@@ -150,11 +151,88 @@ async def 
test_no_quarantine_returns_revision_when_no_archives(tmp_path: pathlib
 
     with contextlib.ExitStack() as stack:
         _apply_patches(stack, patches)
-        result = await participant.create_revision_with_quarantine("proj", 
"1.0", "test")
+        result = await participant.create_revision_with_quarantine(
+            "proj",
+            "1.0",
+            "test",
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+        )
 
     assert result is fake_revision
 
 
[email protected]
+async def test_phase_gate_allows_matching_phase(tmp_path: pathlib.Path):
+    release = mock.MagicMock()
+    release.phase = sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT
+    release.project = mock.MagicMock()
+    release.project.release_policy = None
+    release.release_policy = None
+    release.key = sql.release_key("proj", "1.0")
+
+    mock_session = _mock_db_session(release)
+    participant = _make_participant()
+    fake_revision = mock.MagicMock(spec=sql.Revision)
+
+    patches = [
+        mock.patch.object(revision.aiofiles.os, "makedirs", 
new_callable=mock.AsyncMock),
+        mock.patch.object(revision.aiofiles.os, "rename", 
new_callable=mock.AsyncMock),
+        mock.patch.object(
+            revision.attestable,
+            "paths_to_hashes_and_sizes",
+            new_callable=mock.AsyncMock,
+            return_value=({"README.md": "hash1"}, {"README.md": 100}),
+        ),
+        mock.patch.object(revision.attestable, "write_files_data", 
new_callable=mock.AsyncMock),
+        mock.patch.object(revision.db, "session", return_value=mock_session),
+        mock.patch.object(revision.detection, "validate_directory", 
return_value=[]),
+        mock.patch.object(revision.detection, 
"detect_archives_requiring_quarantine", return_value=[]),
+        mock.patch.object(revision.interaction, "latest_revision", 
new_callable=mock.AsyncMock, return_value=None),
+        mock.patch.object(revision, "_commit_new_revision", 
new_callable=mock.AsyncMock, return_value=fake_revision),
+        mock.patch.object(
+            revision, "_lock_and_merge", new_callable=mock.AsyncMock, 
return_value=(None, None, None, release)
+        ),
+        mock.patch.object(revision, "SafeSession", 
return_value=MockQuarantineSession(MockQuarantineData(None))),
+        mock.patch.object(revision.paths, "get_tmp_dir", 
return_value=tmp_path),
+        mock.patch.object(revision.util, "chmod_directories"),
+        mock.patch.object(revision.util, "chmod_files"),
+        mock.patch.object(revision.util, "paths_to_inodes", return_value={}),
+        mock.patch.object(revision.attestable, "load", 
new_callable=mock.AsyncMock, return_value=None),
+    ]
+
+    with contextlib.ExitStack() as stack:
+        _apply_patches(stack, patches)
+        result = await participant.create_revision_with_quarantine(
+            "proj",
+            "1.0",
+            "test",
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+        )
+
+    assert result is fake_revision
+
+
[email protected]
+async def test_phase_gate_rejects_mismatched_phase():
+    release = mock.MagicMock()
+    release.phase = sql.ReleasePhase.RELEASE_CANDIDATE
+
+    mock_session = _mock_db_session(release)
+    participant = _make_participant()
+
+    with mock.patch.object(revision.db, "session", return_value=mock_session):
+        with pytest.raises(
+            types.PhaseMismatchError,
+            match="release phase is release_candidate",
+        ):
+            await participant.create_revision_with_quarantine(
+                "proj",
+                "1.0",
+                "test",
+                
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+            )
+
+
 @pytest.mark.asyncio
 async def 
test_quarantine_branch_returns_quarantined_when_archives_detected(tmp_path: 
pathlib.Path):
     release = mock.MagicMock()
@@ -204,7 +282,12 @@ async def 
test_quarantine_branch_returns_quarantined_when_archives_detected(tmp_
 
     with contextlib.ExitStack() as stack:
         _apply_patches(stack, patches)
-        result = await participant.create_revision_with_quarantine("proj", 
"1.0", "test")
+        result = await participant.create_revision_with_quarantine(
+            "proj",
+            "1.0",
+            "test",
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+        )
 
     assert isinstance(result, FakeQuarantined)
     assert result.status == sql.QuarantineStatus.PENDING
@@ -273,7 +356,12 @@ async def 
test_quarantine_dedup_applied_to_task_args(tmp_path: pathlib.Path):
 
     with contextlib.ExitStack() as stack:
         _apply_patches(stack, patches)
-        result = await participant.create_revision_with_quarantine("proj", 
"1.0", "test")
+        result = await participant.create_revision_with_quarantine(
+            "proj",
+            "1.0",
+            "test",
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+        )
 
     assert isinstance(result, FakeQuarantined)
 
@@ -354,7 +442,12 @@ async def 
test_quarantine_stores_prior_revision_key_from_lock(tmp_path: pathlib.
 
     with contextlib.ExitStack() as stack:
         _apply_patches(stack, patches)
-        result = await participant.create_revision_with_quarantine("proj", 
"1.0", "test")
+        result = await participant.create_revision_with_quarantine(
+            "proj",
+            "1.0",
+            "test",
+            
allowed_phases=frozenset({sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT}),
+        )
 
     assert isinstance(result, FakeQuarantined)
     assert result.prior_revision_key == f"{release.key} 00003"


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to