This is an automated email from the ASF dual-hosted git repository.

arm pushed a commit to branch arm
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-releases.git

commit 82bae60a134a3da95cb28421707e1a711229af67
Author: Alastair McFarlane <[email protected]>
AuthorDate: Tue Mar 24 15:04:48 2026 +0000

    Allow safe types to be used in task args in the database.
---
 atr/get/sbom.py                | 12 +++++---
 atr/models/results.py          | 28 ++++++++---------
 atr/models/sql.py              | 44 ++++++++++++++++++++++++--
 atr/paths.py                   | 10 ++++++
 atr/post/sbom.py               | 43 ++++++++++++--------------
 atr/storage/writers/release.py |  6 ++--
 atr/storage/writers/sbom.py    | 24 +++++++--------
 atr/tasks/__init__.py          |  4 +--
 atr/tasks/keys.py              | 10 +++---
 atr/tasks/sbom.py              | 70 +++++++++++++++++++++---------------------
 atr/tasks/svn.py               | 23 +++++++-------
 11 files changed, 159 insertions(+), 115 deletions(-)

diff --git a/atr/get/sbom.py b/atr/get/sbom.py
index 156e473c..d3436283 100644
--- a/atr/get/sbom.py
+++ b/atr/get/sbom.py
@@ -463,7 +463,7 @@ def _report_header(
     ]
     if not is_release_candidate:
         block.p[
-            "This report is for revision ", 
htm.code[task_result.revision_number], "."
+            "This report is for revision ", 
htm.code[str(task_result.revision_number)], "."
         ]  # TODO: Mark if a subsequent score has failed
     elif release.phase == sql.ReleasePhase.RELEASE_CANDIDATE:
         block.p[f"This report is for the latest {release.version} release 
candidate."]
@@ -671,7 +671,9 @@ def _vulnerability_scan_button(block: htm.Block) -> None:
     )
 
 
-def _vulnerability_scan_find_completed_task(osv_tasks: Sequence[sql.Task], 
revision_number: str) -> sql.Task | None:
+def _vulnerability_scan_find_completed_task(
+    osv_tasks: Sequence[sql.Task], revision_number: safe.RevisionNumber
+) -> sql.Task | None:
     """Find the most recent completed OSV scan task for the given revision."""
     for task in osv_tasks:
         if (task.status == sql.TaskStatus.COMPLETED) and (task.result is not 
None):
@@ -681,10 +683,12 @@ def _vulnerability_scan_find_completed_task(osv_tasks: 
Sequence[sql.Task], revis
     return None
 
 
-def _vulnerability_scan_find_in_progress_task(osv_tasks: Sequence[sql.Task], 
revision_number: str) -> sql.Task | None:
+def _vulnerability_scan_find_in_progress_task(
+    osv_tasks: Sequence[sql.Task], revision_number: safe.RevisionNumber
+) -> sql.Task | None:
     """Find the most recent in-progress OSV scan task for the given 
revision."""
     for task in osv_tasks:
-        if task.revision_number == revision_number:
+        if task.revision_number == str(revision_number):
             if task.status in (sql.TaskStatus.QUEUED, sql.TaskStatus.ACTIVE, 
sql.TaskStatus.FAILED):
                 return task
     return None
diff --git a/atr/models/results.py b/atr/models/results.py
index b3f14a7f..b46ff4d8 100644
--- a/atr/models/results.py
+++ b/atr/models/results.py
@@ -19,7 +19,7 @@ from typing import Annotated, Any, Literal
 
 import pydantic
 
-from . import schema
+from . import safe, schema
 
 
 class DistributionStatusCheck(schema.Strict):
@@ -105,14 +105,14 @@ class OSVComponent(schema.Strict):
 
 class SBOMOSVScan(schema.Strict):
     kind: Literal["sbom_osv_scan"] = schema.Field(alias="kind")
-    project_key: str = schema.description("Project name")
-    version_key: str = schema.description("Version name")
-    revision_number: str = schema.description("Revision number")
+    project_key: safe.ProjectKey = schema.description("Project name")
+    version_key: safe.VersionKey = schema.description("Version name")
+    revision_number: safe.RevisionNumber = schema.description("Revision 
number")
     bom_version: int | None = schema.Field(
         default=None, strict=False, description="BOM Version produced with 
scan results"
     )
-    file_path: str = schema.description("Relative path to the scanned SBOM 
file")
-    new_file_path: str = schema.Field(default="", strict=False, 
description="Relative path to the updated SBOM file")
+    file_path: str = schema.description("Absolute path to the scanned SBOM 
file")
+    new_file_path: str = schema.Field(default="", strict=False, 
description="Absolute path to the updated SBOM file")
     components: list[OSVComponent] = schema.description("Components with 
vulnerabilities")
     ignored: list[str] = schema.description("Components ignored")
 
@@ -165,23 +165,23 @@ class SBOMAugment(schema.Strict):
 
 class SBOMQsScore(schema.Strict):
     kind: Literal["sbom_qs_score"] = schema.Field(alias="kind")
-    project_key: str = schema.description("Project name")
-    version_key: str = schema.description("Version name")
-    revision_number: str = schema.description("Revision number")
-    file_path: str = schema.description("Relative path to the scored SBOM 
file")
+    project_key: safe.ProjectKey = schema.description("Project name")
+    version_key: safe.VersionKey = schema.description("Version name")
+    revision_number: safe.RevisionNumber = schema.description("Revision 
number")
+    file_path: safe.RelPath = schema.description("Relative path to the scored 
SBOM file")
     report: SbomQsReport
 
 
 class SBOMToolScore(schema.Strict):
     kind: Literal["sbom_tool_score"] = schema.Field(alias="kind")
-    project_key: str = schema.description("Project name")
-    version_key: str = schema.description("Version name")
-    revision_number: str = schema.description("Revision number")
+    project_key: safe.ProjectKey = schema.description("Project name")
+    version_key: safe.VersionKey = schema.description("Version name")
+    revision_number: safe.RevisionNumber = schema.description("Revision 
number")
     bom_version: int | None = schema.Field(default=None, strict=False, 
description="BOM Version scanned")
     prev_bom_version: int | None = schema.Field(
         default=None, strict=False, description="BOM Version from previous 
release"
     )
-    file_path: str = schema.description("Relative path to the scored SBOM 
file")
+    file_path: safe.RelPath = schema.description("Relative path to the scored 
SBOM file")
     warnings: list[str] = schema.description("Warnings from the SBOM tool")
     errors: list[str] = schema.description("Errors from the SBOM tool")
     outdated: list[str] | str | None = schema.description("Outdated tool(s) 
from the SBOM tool")
diff --git a/atr/models/sql.py b/atr/models/sql.py
index d0a1a817..9320c23e 100644
--- a/atr/models/sql.py
+++ b/atr/models/sql.py
@@ -303,6 +303,44 @@ class UTCDateTime(sqlalchemy.types.TypeDecorator):
             return value
 
 
+class SafeJSON(sqlalchemy.types.TypeDecorator):
+    """JSON column that serialises SafeType values to plain strings.
+
+    Use instead of sqlalchemy.JSON whenever the stored value may contain
+    atr.models.safe.SafeType instances (which are not JSON-serialisable by
+    the standard library encoder).
+    """
+
+    impl = sqlalchemy.JSON
+    cache_ok = True
+
+    def process_bind_param(self, value, dialect):
+        if value is None:
+            return None
+        if hasattr(value, "model_dump"):
+            return value.model_dump(mode="json")
+        return _safe_json_encode(value)
+
+    def process_result_value(self, value, dialect):
+        return value
+
+
+def _safe_json_encode(value: Any) -> Any:
+    """Recursively convert SafeType instances to plain strings."""
+    from . import safe
+
+    if isinstance(value, safe.SafeType):
+        return str(value)
+    if isinstance(value, dict):
+        for k in value:
+            if not isinstance(k, str):
+                raise TypeError(f"Dict key must be str, got 
{type(k).__name__!r}: {k!r}")
+        return {k: _safe_json_encode(v) for k, v in value.items()}
+    if isinstance(value, list):
+        return [_safe_json_encode(v) for v in value]
+    return value
+
+
 class ResultsJSON(sqlalchemy.types.TypeDecorator):
     impl = sqlalchemy.JSON
     cache_ok = True
@@ -311,7 +349,7 @@ class ResultsJSON(sqlalchemy.types.TypeDecorator):
         if value is None:
             return None
         if hasattr(value, "model_dump"):
-            return value.model_dump()
+            return value.model_dump(mode="json")
         if isinstance(value, dict):
             return value
         raise ValueError("Unsupported value for Results column")
@@ -336,7 +374,7 @@ class 
QuarantineFileMetadataJSON(sqlalchemy.types.TypeDecorator):
     def process_bind_param(self, value, dialect):
         if value is None:
             return None
-        return _QUARANTINE_FILE_METADATA_ADAPTER.dump_python(value)
+        return _QUARANTINE_FILE_METADATA_ADAPTER.dump_python(value, 
mode="json")
 
     def process_result_value(self, value, dialect):
         if value is None:
@@ -394,7 +432,7 @@ class Task(sqlmodel.SQLModel, table=True):
     id: int = sqlmodel.Field(default=None, primary_key=True)
     status: TaskStatus = sqlmodel.Field(default=TaskStatus.QUEUED, index=True)
     task_type: TaskType
-    task_args: Any = 
sqlmodel.Field(sa_column=sqlalchemy.Column(sqlalchemy.JSON))
+    task_args: Any = sqlmodel.Field(sa_column=sqlalchemy.Column(SafeJSON))
     inputs_hash: str | None = sqlmodel.Field(
         default=None,
         **example("blake3:7f83b1657ff1fc..."),
diff --git a/atr/paths.py b/atr/paths.py
index a9ddefa7..6da2558c 100644
--- a/atr/paths.py
+++ b/atr/paths.py
@@ -44,6 +44,10 @@ def get_finished_dir() -> pathlib.Path:
     return pathlib.Path(config.get().FINISHED_STORAGE_DIR)
 
 
+def get_finished_dir_for(project_key: safe.ProjectKey, version_key: 
safe.VersionKey) -> pathlib.Path:
+    return pathlib.Path(config.get().FINISHED_STORAGE_DIR) / str(project_key) 
/ str(version_key)
+
+
 def get_quarantined_dir() -> pathlib.Path:
     return pathlib.Path(config.get().STATE_DIR) / "quarantined"
 
@@ -57,6 +61,12 @@ def get_unfinished_dir() -> pathlib.Path:
     return pathlib.Path(config.get().UNFINISHED_STORAGE_DIR)
 
 
+def get_unfinished_dir_for(
+    project_key: safe.ProjectKey, version_key: safe.VersionKey, revision: 
safe.RevisionNumber
+) -> pathlib.Path:
+    return pathlib.Path(config.get().UNFINISHED_STORAGE_DIR) / 
str(project_key) / str(version_key) / str(revision)
+
+
 def get_upload_staging_dir(session_token: str) -> pathlib.Path:
     if not session_token.isalnum():
         raise ValueError("Invalid session token")
diff --git a/atr/post/sbom.py b/atr/post/sbom.py
index bde31ba0..04db67e4 100644
--- a/atr/post/sbom.py
+++ b/atr/post/sbom.py
@@ -58,9 +58,9 @@ async def _augment(
     session: web.Committer, project_key: safe.ProjectKey, version_key: 
safe.VersionKey, rel_path: safe.RelPath
 ) -> web.WerkzeugResponse:
     """Augment a CycloneDX SBOM file."""
-    path = rel_path.as_path()
     # Check that the file is a .cdx.json archive before creating a revision
-    if not (path.name.endswith(".cdx.json")):
+    file_name = rel_path.as_path().name
+    if not (file_name.endswith(".cdx.json")):
         raise base.ASFQuartException("SBOM augmentation is only supported for 
.cdx.json files", errorcode=400)
 
     try:
@@ -68,16 +68,14 @@ async def _augment(
             release = await data.release(project_key=str(project_key), 
version=str(version_key)).demand(
                 RuntimeError("Release does not exist for new revision 
creation")
             )
-            revision_number = release.latest_revision_number
-            if revision_number is None:
-                raise RuntimeError("No revision number found for new revision 
creation")
-            log.info(f"Augmenting SBOM for {project_key} {version_key} 
{revision_number} {path}")
+            revision_number = release.safe_latest_revision_number
+            log.info(f"Augmenting SBOM for {project_key} {version_key} 
{revision_number!s} {rel_path!s}")
         async with storage.write_as_project_committee_member(project_key) as 
wacm:
             sbom_task = await wacm.sbom.augment_cyclonedx(
                 project_key,
                 version_key,
                 revision_number,
-                path,
+                rel_path,
             )
 
     except Exception as e:
@@ -85,16 +83,16 @@ async def _augment(
         await quart.flash(f"Error augmenting SBOM: {e!s}", "error")
         return await session.redirect(
             get.sbom.report,
-            project_key=project_key,
-            version_key=version_key,
+            project_key=str(project_key),
+            version_key=str(version_key),
             file_path=str(rel_path),
         )
 
     return await session.redirect(
         get.sbom.report,
-        success=f"SBOM augmentation task queued for {path.name} (task ID: 
{util.unwrap(sbom_task.id)})",
-        project_key=project_key,
-        version_key=version_key,
+        success=f"SBOM augmentation task queued for {file_name} (task ID: 
{util.unwrap(sbom_task.id)})",
+        project_key=str(project_key),
+        version_key=str(version_key),
         file_path=str(rel_path),
     )
 
@@ -103,8 +101,7 @@ async def _scan(
     session: web.Committer, project_key: safe.ProjectKey, version_key: 
safe.VersionKey, rel_path: safe.RelPath
 ) -> web.WerkzeugResponse:
     """Scan a CycloneDX SBOM file for vulnerabilities using OSV."""
-    path = rel_path.as_path()
-    if not (path.name.endswith(".cdx.json")):
+    if not (rel_path.as_path().name.endswith(".cdx.json")):
         raise base.ASFQuartException("OSV scanning is only supported for 
.cdx.json files", errorcode=400)
 
     try:
@@ -112,16 +109,14 @@ async def _scan(
             release = await data.release(project_key=str(project_key), 
version=str(version_key)).demand(
                 RuntimeError("Release does not exist for OSV scan")
             )
-            revision_number = release.latest_revision_number
-            if revision_number is None:
-                raise RuntimeError("No revision number found for OSV scan")
-            log.info(f"Starting OSV scan for {project_key} {version_key} 
{revision_number} {path}")
+            revision_number = release.safe_latest_revision_number
+            log.info(f"Starting OSV scan for {project_key!s} {version_key!s} 
{revision_number!s} {rel_path!s}")
         async with storage.write_as_project_committee_member(project_key) as 
wacm:
             sbom_task = await wacm.sbom.osv_scan_cyclonedx(
                 project_key,
                 version_key,
                 revision_number,
-                path,
+                rel_path,
             )
 
     except Exception as e:
@@ -129,15 +124,15 @@ async def _scan(
         await quart.flash(f"Error starting OSV scan: {e!s}", "error")
         return await session.redirect(
             get.sbom.report,
-            project_key=project_key,
-            version_key=version_key,
+            project_key=str(project_key),
+            version_key=str(version_key),
             file_path=str(rel_path),
         )
 
     return await session.redirect(
         get.sbom.report,
-        success=f"OSV vulnerability scan queued for {path.name} (task ID: 
{util.unwrap(sbom_task.id)})",
-        project_key=project_key,
-        version_key=version_key,
+        success=f"OSV vulnerability scan queued for {rel_path!s} (task ID: 
{util.unwrap(sbom_task.id)})",
+        project_key=str(project_key),
+        version_key=str(version_key),
         file_path=str(rel_path),
     )
diff --git a/atr/storage/writers/release.py b/atr/storage/writers/release.py
index facdd57e..eaebfe8a 100644
--- a/atr/storage/writers/release.py
+++ b/atr/storage/writers/release.py
@@ -278,11 +278,11 @@ class CommitteeParticipant(FoundationCommitter):
         target_subdirectory: safe.RelPath | None,
     ) -> sql.Task:
         task_args = {
-            "svn_url": str(svn_url),
+            "svn_url": svn_url,
             "revision": svn_revision,
             "target_subdirectory": str(target_subdirectory) if 
target_subdirectory else None,
-            "project_key": str(project_key),
-            "version_key": str(version_key),
+            "project_key": project_key,
+            "version_key": version_key,
             "asf_uid": self.__asf_uid,
         }
         svn_import_task = sql.Task(
diff --git a/atr/storage/writers/sbom.py b/atr/storage/writers/sbom.py
index c64f3168..326a0d0b 100644
--- a/atr/storage/writers/sbom.py
+++ b/atr/storage/writers/sbom.py
@@ -80,16 +80,16 @@ class CommitteeParticipant(FoundationCommitter):
         self,
         project_key: safe.ProjectKey,
         version_key: safe.VersionKey,
-        revision_number: str,
-        rel_path: pathlib.Path,
+        revision_number: safe.RevisionNumber,
+        rel_path: safe.RelPath,
     ) -> sql.Task:
         sbom_task = sql.Task(
             task_type=sql.TaskType.SBOM_AUGMENT,
             task_args=sbom.FileArgs(
-                project_key=str(project_key),
-                version_key=str(version_key),
+                project_key=project_key,
+                version_key=version_key,
                 revision_number=revision_number,
-                file_path=str(rel_path),
+                file_path=rel_path,
                 asf_uid=util.unwrap(self.__asf_uid),
             ).model_dump(),
             asf_uid=util.unwrap(self.__asf_uid),
@@ -97,7 +97,7 @@ class CommitteeParticipant(FoundationCommitter):
             status=sql.TaskStatus.QUEUED,
             project_key=str(project_key),
             version_key=str(version_key),
-            revision_number=revision_number,
+            revision_number=str(revision_number),
             primary_rel_path=str(rel_path),
         )
         self.__data.add(sbom_task)
@@ -139,16 +139,16 @@ class CommitteeParticipant(FoundationCommitter):
         self,
         project_key: safe.ProjectKey,
         version_key: safe.VersionKey,
-        revision_number: str,
-        rel_path: pathlib.Path,
+        revision_number: safe.RevisionNumber,
+        rel_path: safe.RelPath,
     ) -> sql.Task:
         sbom_task = sql.Task(
             task_type=sql.TaskType.SBOM_OSV_SCAN,
             task_args=sbom.FileArgs(
-                project_key=str(project_key),
-                version_key=str(version_key),
+                project_key=project_key,
+                version_key=version_key,
                 revision_number=revision_number,
-                file_path=str(rel_path),
+                file_path=rel_path,
                 asf_uid=util.unwrap(self.__asf_uid),
             ).model_dump(),
             asf_uid=util.unwrap(self.__asf_uid),
@@ -156,7 +156,7 @@ class CommitteeParticipant(FoundationCommitter):
             status=sql.TaskStatus.QUEUED,
             project_key=str(project_key),
             version_key=str(version_key),
-            revision_number=revision_number,
+            revision_number=str(revision_number),
             primary_rel_path=str(rel_path),
         )
         self.__data.add(sbom_task)
diff --git a/atr/tasks/__init__.py b/atr/tasks/__init__.py
index ee43c8f4..7f3879b4 100644
--- a/atr/tasks/__init__.py
+++ b/atr/tasks/__init__.py
@@ -219,8 +219,8 @@ async def keys_import_file(
                 task_type=sql.TaskType.KEYS_IMPORT_FILE,
                 task_args=keys.ImportFile(
                     asf_uid=asf_uid,
-                    project_key=str(project_key),
-                    version_key=str(version_key),
+                    project_key=project_key,
+                    version_key=version_key,
                 ).model_dump(),
                 asf_uid=asf_uid,
                 revision_number=revision_number,
diff --git a/atr/tasks/keys.py b/atr/tasks/keys.py
index a183718d..b3743555 100644
--- a/atr/tasks/keys.py
+++ b/atr/tasks/keys.py
@@ -26,18 +26,16 @@ class ImportFile(schema.Strict):
     """Import a KEYS file from a draft release candidate revision."""
 
     asf_uid: str
-    project_key: str
-    version_key: str
+    project_key: safe.ProjectKey
+    version_key: safe.VersionKey
 
 
 @checks.with_model(ImportFile)
 async def import_file(args: ImportFile) -> results.Results | None:
     """Import a KEYS file from a draft release candidate revision."""
-    project = safe.ProjectKey(args.project_key)
-    version = safe.VersionKey(args.version_key)
     async with storage.write(args.asf_uid) as write:
-        wacm = await write.as_project_committee_member(project)
-        outcomes = await wacm.keys.import_keys_file(project, version)
+        wacm = await write.as_project_committee_member(args.project_key)
+        outcomes = await wacm.keys.import_keys_file(args.project_key, 
args.version_key)
         if outcomes.any_error:
             # TODO: Log this? This code is unused anyway
             pass
diff --git a/atr/tasks/sbom.py b/atr/tasks/sbom.py
index 22a1963f..c96e6a6e 100644
--- a/atr/tasks/sbom.py
+++ b/atr/tasks/sbom.py
@@ -70,29 +70,31 @@ class SBOMScoringError(Exception):
 
 
 class FileArgs(schema.Strict):
-    project_key: str = schema.description("Project name")
-    version_key: str = schema.description("Version name")
-    revision_number: str = schema.description("Revision number")
-    file_path: str = schema.description("Relative path to the SBOM file")
+    project_key: safe.ProjectKey = schema.description("Project name")
+    version_key: safe.VersionKey = schema.description("Version name")
+    revision_number: safe.RevisionNumber = schema.description("Revision 
number")
+    file_path: safe.RelPath = schema.description("Relative path to the SBOM 
file")
     asf_uid: str | None = None
 
 
 class ScoreArgs(FileArgs):
-    previous_release_version: str | None = schema.description("Previous 
release version")
+    previous_release_version: safe.VersionKey | None = 
schema.description("Previous release version")
 
 
 @checks.with_model(FileArgs)
 async def augment(args: FileArgs) -> results.Results | None:
-    project = safe.ProjectKey(args.project_key)
-    version = safe.VersionKey(args.version_key)
+    project_str = str(args.project_key)
+    version_str = str(args.version_key)
+    revision_str = str(args.revision_number)
+    path_str = str(args.file_path)
 
-    base_dir = paths.get_unfinished_dir() / args.project_key / 
args.version_key / args.revision_number
+    base_dir = paths.get_unfinished_dir() / project_str / version_str / 
revision_str
     if not await aiofiles.os.path.isdir(base_dir):
         raise SBOMScoringError("Revision directory does not exist", 
{"base_dir": str(base_dir)})
-    full_path = base_dir / args.file_path
+    full_path = base_dir / path_str
     full_path_str = str(full_path)
     if not (full_path_str.endswith(".cdx.json") and await 
aiofiles.os.path.isfile(full_path)):
-        raise SBOMScoringError("SBOM file does not exist", {"file_path": 
args.file_path})
+        raise SBOMScoringError("SBOM file does not exist", {"file_path": 
path_str})
     # Read from the old revision
     bundle = sbom.utilities.path_to_bundle(full_path)
     patch_ops = await sbom.utilities.bundle_to_ntia_patch(bundle)
@@ -100,14 +102,14 @@ async def augment(args: FileArgs) -> results.Results | 
None:
     new_full_path_str: str | None = None
     new_version = None
     if patch_ops:
-        new_version, merged = sbom.utilities.apply_patch("augment", 
args.revision_number, bundle, patch_ops)
+        new_version, merged = sbom.utilities.apply_patch("augment", 
revision_str, bundle, patch_ops)
         description = "SBOM augmentation through web interface"
         async with storage.write(args.asf_uid) as write:
-            wacp = await write.as_project_committee_participant(project)
+            wacp = await 
write.as_project_committee_participant(args.project_key)
 
             async def modify(path: pathlib.Path, _old_rev: sql.Revision | 
None) -> None:
                 nonlocal new_full_path, new_full_path_str
-                new_full_path = path / args.file_path
+                new_full_path = path / path_str
                 new_full_path_str = str(new_full_path)
                 # Write to the new revision
                 log.info(f"Writing augmented SBOM to {new_full_path_str}")
@@ -116,7 +118,7 @@ async def augment(args: FileArgs) -> results.Results | None:
                     await f.write(merged.dumps())
 
             await wacp.revision.create_revision_with_quarantine(
-                project, version, args.asf_uid or "unknown", 
description=description, modify=modify
+                args.project_key, args.version_key, args.asf_uid or "unknown", 
description=description, modify=modify
             )
 
     return results.SBOMAugment(
@@ -146,16 +148,16 @@ async def generate_cyclonedx(args: GenerateCycloneDX) -> 
results.Results | None:
 
 @checks.with_model(FileArgs)
 async def osv_scan(args: FileArgs) -> results.Results | None:
-    project = safe.ProjectKey(args.project_key)
-    version = safe.VersionKey(args.version_key)
+    revision_str = str(args.revision_number)
+    path_str = str(args.file_path)
 
-    base_dir = paths.get_unfinished_dir() / args.project_key / 
args.version_key / args.revision_number
+    base_dir = paths.get_unfinished_dir_for(args.project_key, 
args.version_key, args.revision_number)
     if not await aiofiles.os.path.isdir(base_dir):
         raise SBOMScanningError("Revision directory does not exist", 
{"base_dir": str(base_dir)})
-    full_path = base_dir / args.file_path
+    full_path = base_dir / path_str
     full_path_str = str(full_path)
     if not (full_path_str.endswith(".cdx.json") and await 
aiofiles.os.path.isfile(full_path)):
-        raise SBOMScanningError("SBOM file does not exist", {"file_path": 
args.file_path})
+        raise SBOMScanningError("SBOM file does not exist", {"file_path": 
path_str})
     bundle = sbom.utilities.path_to_bundle(full_path)
     vulnerabilities, ignored = await sbom.osv.scan_bundle(bundle)
     patch_ops = await sbom.utilities.bundle_to_vuln_patch(bundle, 
vulnerabilities)
@@ -172,14 +174,14 @@ async def osv_scan(args: FileArgs) -> results.Results | 
None:
 
     new_full_path: pathlib.Path | None = None
     new_full_path_str: str | None = None
-    new_version, merged = sbom.utilities.apply_patch("osv-scan", 
args.revision_number, bundle, patch_ops)
+    new_version, merged = sbom.utilities.apply_patch("osv-scan", revision_str, 
bundle, patch_ops)
     description = "SBOM vulnerability scan through web interface"
     async with storage.write(args.asf_uid) as write:
-        wacp = await write.as_project_committee_participant(project)
+        wacp = await write.as_project_committee_participant(args.project_key)
 
         async def modify(path: pathlib.Path, _old_rev: sql.Revision | None) -> 
None:
             nonlocal new_full_path, new_full_path_str
-            new_full_path = path / args.file_path
+            new_full_path = path / str(args.file_path)
             new_full_path_str = str(new_full_path)
             # Write to the new revision
             log.info(f"Writing updated SBOM to {new_full_path_str}")
@@ -188,7 +190,7 @@ async def osv_scan(args: FileArgs) -> results.Results | 
None:
                 await f.write(merged.dumps())
 
         await wacp.revision.create_revision_with_quarantine(
-            project, version, args.asf_uid or "unknown", 
description=description, modify=modify
+            args.project_key, args.version_key, args.asf_uid or "unknown", 
description=description, modify=modify
         )
 
     return results.SBOMOSVScan(
@@ -206,16 +208,15 @@ async def osv_scan(args: FileArgs) -> results.Results | 
None:
 
 @checks.with_model(FileArgs)
 async def score_qs(args: FileArgs) -> results.Results | None:
-    safe.ProjectKey(args.project_key)
-    safe.VersionKey(args.version_key)
+    path_str = str(args.file_path)
 
-    base_dir = paths.get_unfinished_dir() / args.project_key / 
args.version_key / args.revision_number
+    base_dir = paths.get_unfinished_dir_for(args.project_key, 
args.version_key, args.revision_number)
     if not await aiofiles.os.path.isdir(base_dir):
         raise SBOMScoringError("Revision directory does not exist", 
{"base_dir": str(base_dir)})
-    full_path = base_dir / args.file_path
+    full_path = base_dir / path_str
     full_path_str = str(full_path)
     if not (full_path_str.endswith(".cdx.json") and await 
aiofiles.os.path.isfile(full_path)):
-        raise SBOMScoringError("SBOM file does not exist", {"file_path": 
args.file_path})
+        raise SBOMScoringError("SBOM file does not exist", {"file_path": 
path_str})
     proc = await asyncio.create_subprocess_exec(
         "sbomqs",
         "score",
@@ -245,19 +246,18 @@ async def score_qs(args: FileArgs) -> results.Results | 
None:
 
 @checks.with_model(ScoreArgs)
 async def score_tool(args: ScoreArgs) -> results.Results | None:
-    safe.ProjectKey(args.project_key)
-    safe.VersionKey(args.version_key)
+    path_str = str(args.file_path)
 
-    base_dir = paths.get_unfinished_dir() / args.project_key / 
args.version_key / args.revision_number
+    base_dir = paths.get_unfinished_dir_for(args.project_key, 
args.version_key, args.revision_number)
     previous_base_dir = None
     if args.previous_release_version is not None:
-        previous_base_dir = paths.get_finished_dir() / args.project_key / 
str(args.previous_release_version)
+        previous_base_dir = paths.get_finished_dir_for(args.project_key, 
args.previous_release_version)
     if not await aiofiles.os.path.isdir(base_dir):
         raise SBOMScoringError("Revision directory does not exist", 
{"base_dir": str(base_dir)})
-    full_path = base_dir / args.file_path
+    full_path = base_dir / path_str
     full_path_str = str(full_path)
     if not (full_path_str.endswith(".cdx.json") and await 
aiofiles.os.path.isfile(full_path)):
-        raise SBOMScoringError("SBOM file does not exist", {"file_path": 
args.file_path})
+        raise SBOMScoringError("SBOM file does not exist", {"file_path": 
path_str})
     bundle = sbom.utilities.path_to_bundle(full_path)
     version, properties = sbom.utilities.get_props_from_bundle(bundle)
     warnings, errors = sbom.conformance.ntia_2021_issues(bundle.bom)
@@ -272,7 +272,7 @@ async def score_tool(args: ScoreArgs) -> results.Results | 
None:
     prev_licenses = None
     prev_vulnerabilities = None
     if previous_base_dir is not None:
-        previous_full_path = previous_base_dir / args.file_path
+        previous_full_path = previous_base_dir / path_str
         try:
             previous_bundle = sbom.utilities.path_to_bundle(previous_full_path)
         except FileNotFoundError:
diff --git a/atr/tasks/svn.py b/atr/tasks/svn.py
index 476a40f3..acd67a25 100644
--- a/atr/tasks/svn.py
+++ b/atr/tasks/svn.py
@@ -36,11 +36,11 @@ _SVN_BASE_URL: Final[str] = 
"https://dist.apache.org/repos/dist";
 class SvnImport(schema.Strict):
     """Arguments for the task to import files from SVN."""
 
-    svn_url: str
+    svn_url: safe.RelPath
     revision: str
     target_subdirectory: str | None
-    project_key: str
-    version_key: str
+    project_key: safe.ProjectKey
+    version_key: safe.VersionKey
     asf_uid: str
 
 
@@ -73,17 +73,16 @@ async def import_files(args: SvnImport) -> results.Results 
| None:
 async def _import_files_core(args: SvnImport) -> str:
     """Core logic to perform the SVN export."""
 
-    project = safe.ProjectKey(args.project_key)
-    version = safe.VersionKey(args.version_key)
-    svn_path = safe.RelPath(args.svn_url)
+    project_str = str(args.project_key)
+    version_str = str(args.version_key)
 
-    log.info(f"Starting SVN import for {args.project_key}-{args.version_key}")
+    log.info(f"Starting SVN import for {project_str}-{version_str}")
     # We have to use a temporary directory otherwise SVN thinks it's a pegged 
revision
     temp_export_dir_name = ".svn-export.tmp"
 
     description = "Import of files from subversion"
     async with storage.write(args.asf_uid) as write:
-        wacp = await write.as_project_committee_participant(project)
+        wacp = await write.as_project_committee_participant(args.project_key)
 
         async def modify(path: pathlib.Path, _old_rev: sql.Revision | None) -> 
None:
             log.debug(f"Created revision directory: {path}")
@@ -110,7 +109,7 @@ async def _import_files_core(args: SvnImport) -> str:
                 "-r",
                 args.revision,
                 "--",
-                f"{_SVN_BASE_URL}/{svn_path!s}",
+                f"{_SVN_BASE_URL}/{args.svn_url!s}",
                 str(temp_export_path),
             ]
 
@@ -132,11 +131,11 @@ async def _import_files_core(args: SvnImport) -> str:
             log.info(f"Removed temporary export directory: {temp_export_path}")
 
         result = await wacp.revision.create_revision_with_quarantine(
-            project, version, args.asf_uid, description=description, 
modify=modify
+            args.project_key, args.version_key, args.asf_uid, 
description=description, modify=modify
         )
         if isinstance(result, sql.Quarantined):
-            log.info(f"SVN import quarantined for 
{args.project_key}-{args.version_key}")
-            return f"SVN import received for 
{args.project_key}-{args.version_key}. Archive validation in progress."
+            log.info(f"SVN import quarantined for {project_str}-{version_str}")
+            return f"SVN import received for {project_str}-{version_str}. 
Archive validation in progress."
         return f"Successfully imported files from SVN into revision 
{result.number}"
 
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to