This is an automated email from the ASF dual-hosted git repository.

sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-release.git


The following commit(s) were added to refs/heads/main by this push:
     new f1d0796  Use the new decorator style in the rsync tasks module
f1d0796 is described below

commit f1d07963a1859900cf84640c14e44e88bc4bfc9b
Author: Sean B. Palmer <[email protected]>
AuthorDate: Mon Mar 31 16:55:30 2025 +0100

    Use the new decorator style in the rsync tasks module
---
 atr/db/models.py   |  2 +-
 atr/ssh.py         |  4 ++--
 atr/tasks/rsync.py | 33 ++++++++++++++++++---------------
 atr/worker.py      |  2 +-
 4 files changed, 22 insertions(+), 19 deletions(-)

diff --git a/atr/db/models.py b/atr/db/models.py
index e8530a1..1e96477 100644
--- a/atr/db/models.py
+++ b/atr/db/models.py
@@ -37,7 +37,7 @@ class UTCDateTime(sqlalchemy.types.TypeDecorator):
     A custom column type to store datetime in sqlite.
 
     As sqlite does not have timezone support, we ensure that all datetimes 
stored
-    within sqlite are converted to UTC. When retrieved, the datetimes are 
constructred
+    within sqlite are converted to UTC. When retrieved, the datetimes are 
constructed
     as offset-aware datetime with UTC as their timezone.
     """
 
diff --git a/atr/ssh.py b/atr/ssh.py
index 8296e55..b1a43ba 100644
--- a/atr/ssh.py
+++ b/atr/ssh.py
@@ -32,6 +32,7 @@ import asyncssh
 import atr.config as config
 import atr.db as db
 import atr.db.models as models
+import atr.tasks.checks as checks
 import atr.tasks.rsync as rsync
 import atr.user as user
 import atr.util as util
@@ -314,9 +315,8 @@ async def _handle_client(process: 
asyncssh.SSHServerProcess) -> None:
             data.add(
                 models.Task(
                     status=models.TaskStatus.QUEUED,
-                    task_type="rsync_analyse",
+                    task_type=checks.function_key(rsync.analyse),
                     task_args=rsync.Analyse(
-                        asf_uid=asf_uid,
                         project_name=project_name,
                         release_version=release_version,
                     ).model_dump(),
diff --git a/atr/tasks/rsync.py b/atr/tasks/rsync.py
index 7ca4920..37205e5 100644
--- a/atr/tasks/rsync.py
+++ b/atr/tasks/rsync.py
@@ -24,7 +24,7 @@ import pydantic
 import atr.db as db
 import atr.db.models as models
 import atr.tasks as tasks
-import atr.tasks.task as task
+import atr.tasks.checks as checks
 import atr.util as util
 
 if TYPE_CHECKING:
@@ -37,27 +37,30 @@ _LOGGER: Final = logging.getLogger(__name__)
 class Analyse(pydantic.BaseModel):
     """Parameters for rsync analysis."""
 
-    asf_uid: str = pydantic.Field(..., description="ASF UID of the user to 
rsync from")
     project_name: str = pydantic.Field(..., description="Name of the project 
to rsync")
     release_version: str = pydantic.Field(..., description="Version of the 
release to rsync")
 
 
-async def analyse(args: dict[str, Any]) -> tuple[models.TaskStatus, str | 
None, tuple[Any, ...]]:
-    """Analyse an rsync upload."""
-    data = Analyse(**args)
-    task_results = task.results_as_tuple(
-        await _analyse_core(
-            data.asf_uid,
-            data.project_name,
-            data.release_version,
[email protected]_model(Analyse)
+async def analyse(args: Analyse) -> str | None:
+    """Analyse an rsync upload by queuing specific checks for discovered 
files."""
+    _LOGGER.info(f"Starting rsync analysis for {args.project_name} 
{args.release_version}")
+    try:
+        result_data = await _analyse_core(
+            args.project_name,
+            args.release_version,
         )
-    )
-    _LOGGER.info(f"Analyse {data.project_name} {data.release_version}")
-    return task.COMPLETED, None, task_results
+        num_paths = len(result_data.get("paths", []))
+        _LOGGER.info(f"Finished rsync analysis for {args.project_name} 
{args.release_version}, found {num_paths} paths")
+    except Exception as e:
+        _LOGGER.exception(f"Rsync analysis failed for {args.project_name} 
{args.release_version}: {e}")
+        raise e
 
+    return None
 
-async def _analyse_core(asf_uid: str, project_name: str, release_version: str) 
-> dict[str, Any]:
-    """Analyse an rsync upload."""
+
+async def _analyse_core(project_name: str, release_version: str) -> dict[str, 
Any]:
+    """Core logic to analyse an rsync upload and queue checks."""
     base_path = util.get_release_candidate_draft_dir() / project_name / 
release_version
     paths = await util.paths_recursive(base_path)
     release_name = f"{project_name}-{release_version}"
diff --git a/atr/worker.py b/atr/worker.py
index 0ee92cf..13af8bf 100644
--- a/atr/worker.py
+++ b/atr/worker.py
@@ -194,11 +194,11 @@ async def _task_process(task_id: int, task_type: str, 
task_args: list[str] | dic
             checks.function_key(license.headers): license.headers,
             checks.function_key(rat.check): rat.check,
             checks.function_key(signature.check): signature.check,
+            checks.function_key(rsync.analyse): rsync.analyse,
         }
         # TODO: We should use a decorator to register these automatically
         dict_task_handlers = {
             "package_bulk_download": bulk.download,
-            "rsync_analyse": rsync.analyse,
         }
         # TODO: These are synchronous
         # We plan to convert these to async dict handlers


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to