This is an automated email from the ASF dual-hosted git repository.
sbp pushed a commit to branch sbp
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-releases.git
The following commit(s) were added to refs/heads/sbp by this push:
new c501f89 Add a task that reads GitHub Trusted Publishing payloads
c501f89 is described below
commit c501f895e39e0837d7f57587e78580b8a4823c68
Author: Sean B. Palmer <[email protected]>
AuthorDate: Wed Feb 4 15:56:26 2026 +0000
Add a task that reads GitHub Trusted Publishing payloads
---
atr/models/sql.py | 1 +
atr/sbom/models/__init__.py | 4 +-
atr/sbom/models/{__init__.py => github.py} | 35 +++++++++++-
atr/tasks/__init__.py | 5 ++
atr/tasks/checks/compare.py | 90 ++++++++++++++++++++++++++++++
5 files changed, 131 insertions(+), 4 deletions(-)
diff --git a/atr/models/sql.py b/atr/models/sql.py
index 61a5c62..affd557 100644
--- a/atr/models/sql.py
+++ b/atr/models/sql.py
@@ -190,6 +190,7 @@ class TaskStatus(str, enum.Enum):
class TaskType(str, enum.Enum):
+ COMPARE_SOURCE_TREES = "compare_source_trees"
DISTRIBUTION_STATUS = "distribution_status"
DISTRIBUTION_WORKFLOW = "distribution_workflow"
HASHING_CHECK = "hashing_check"
diff --git a/atr/sbom/models/__init__.py b/atr/sbom/models/__init__.py
index 15cf734..b0dae61 100644
--- a/atr/sbom/models/__init__.py
+++ b/atr/sbom/models/__init__.py
@@ -17,6 +17,6 @@
from __future__ import annotations
-from . import base, bom, bundle, conformance, licenses, osv, patch, sbomqs,
tool
+from . import base, bom, bundle, conformance, github, licenses, osv, patch,
sbomqs, tool
-__all__ = ["base", "bom", "bundle", "conformance", "licenses", "osv", "patch",
"sbomqs", "tool"]
+__all__ = ["base", "bom", "bundle", "conformance", "github", "licenses",
"osv", "patch", "sbomqs", "tool"]
diff --git a/atr/sbom/models/__init__.py b/atr/sbom/models/github.py
similarity index 56%
copy from atr/sbom/models/__init__.py
copy to atr/sbom/models/github.py
index 15cf734..99a4b8d 100644
--- a/atr/sbom/models/__init__.py
+++ b/atr/sbom/models/github.py
@@ -17,6 +17,37 @@
from __future__ import annotations
-from . import base, bom, bundle, conformance, licenses, osv, patch, sbomqs,
tool
+from .base import Lax
-__all__ = ["base", "bom", "bundle", "conformance", "licenses", "osv", "patch",
"sbomqs", "tool"]
+
+class TrustedPublisherPayload(Lax):
+ actor: str
+ actor_id: str
+ aud: str
+ base_ref: str
+ check_run_id: str
+ enterprise: str
+ enterprise_id: str
+ event_name: str
+ exp: int
+ head_ref: str
+ iat: int
+ iss: str
+ job_workflow_ref: str
+ job_workflow_sha: str
+ jti: str
+ nbf: int | None = None
+ ref: str
+ ref_protected: str
+ ref_type: str
+ repository: str
+ repository_owner: str
+ repository_visibility: str
+ run_attempt: str
+ run_number: str
+ runner_environment: str
+ sha: str
+ sub: str
+ workflow: str
+ workflow_ref: str
+ workflow_sha: str
diff --git a/atr/tasks/__init__.py b/atr/tasks/__init__.py
index 8ba7fd3..8030727 100644
--- a/atr/tasks/__init__.py
+++ b/atr/tasks/__init__.py
@@ -24,6 +24,7 @@ import sqlmodel
import atr.db as db
import atr.models.results as results
import atr.models.sql as sql
+import atr.tasks.checks.compare as compare
import atr.tasks.checks.hashing as hashing
import atr.tasks.checks.license as license
import atr.tasks.checks.paths as paths
@@ -251,6 +252,8 @@ def queued(
def resolve(task_type: sql.TaskType) -> Callable[...,
Awaitable[results.Results | None]]: # noqa: C901
match task_type:
+ case sql.TaskType.COMPARE_SOURCE_TREES:
+ return compare.source_trees
case sql.TaskType.DISTRIBUTION_STATUS:
return distribution.status_check
case sql.TaskType.DISTRIBUTION_WORKFLOW:
@@ -315,6 +318,7 @@ async def tar_gz_checks(asf_uid: str, release: sql.Release,
revision: str, path:
# This release has committee, as guaranteed in draft_checks
is_podling = (release.project.committee is not None) and
release.project.committee.is_podling
tasks = [
+ queued(asf_uid, sql.TaskType.COMPARE_SOURCE_TREES, release, revision,
path),
queued(asf_uid, sql.TaskType.LICENSE_FILES, release, revision, path,
extra_args={"is_podling": is_podling}),
queued(asf_uid, sql.TaskType.LICENSE_HEADERS, release, revision, path),
queued(asf_uid, sql.TaskType.RAT_CHECK, release, revision, path),
@@ -357,6 +361,7 @@ async def zip_checks(asf_uid: str, release: sql.Release,
revision: str, path: st
# This release has committee, as guaranteed in draft_checks
is_podling = (release.project.committee is not None) and
release.project.committee.is_podling
tasks = [
+ queued(asf_uid, sql.TaskType.COMPARE_SOURCE_TREES, release, revision,
path),
queued(asf_uid, sql.TaskType.LICENSE_FILES, release, revision, path,
extra_args={"is_podling": is_podling}),
queued(asf_uid, sql.TaskType.LICENSE_HEADERS, release, revision, path),
queued(asf_uid, sql.TaskType.RAT_CHECK, release, revision, path),
diff --git a/atr/tasks/checks/compare.py b/atr/tasks/checks/compare.py
new file mode 100644
index 0000000..52f8d1b
--- /dev/null
+++ b/atr/tasks/checks/compare.py
@@ -0,0 +1,90 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import json
+from typing import Any
+
+import aiofiles
+import aiofiles.os
+import pydantic
+
+import atr.attestable as attestable
+import atr.log as log
+import atr.models.results as results
+import atr.sbom.models.github as github_models
+import atr.tasks.checks as checks
+
+
+async def source_trees(args: checks.FunctionArguments) -> results.Results |
None:
+ recorder = await args.recorder()
+ is_source = await recorder.primary_path_is_source()
+ if not is_source:
+ log.info(
+ "Skipping compare.source_trees because the input is not a source
artifact",
+ project=args.project_name,
+ version=args.version_name,
+ revision=args.revision_number,
+ path=args.primary_rel_path,
+ )
+ return None
+
+ payload = await _load_tp_payload(args.project_name, args.version_name,
args.revision_number)
+ payload_summary = _payload_summary(payload)
+ log.info(
+ "Ran compare.source_trees successfully",
+ project=args.project_name,
+ version=args.version_name,
+ revision=args.revision_number,
+ path=args.primary_rel_path,
+ github_payload=payload_summary,
+ )
+ return None
+
+
+async def _load_tp_payload(
+ project_name: str, version_name: str, revision_number: str
+) -> github_models.TrustedPublisherPayload | None:
+ payload_path = attestable.github_tp_payload_path(project_name,
version_name, revision_number)
+ if not await aiofiles.os.path.isfile(payload_path):
+ return None
+ try:
+ async with aiofiles.open(payload_path, encoding="utf-8") as f:
+ data = json.loads(await f.read())
+ if not isinstance(data, dict):
+ log.warning(f"TP payload was not a JSON object in {payload_path}")
+ return None
+ return github_models.TrustedPublisherPayload.model_validate(data)
+ except (OSError, json.JSONDecodeError) as e:
+ log.warning(f"Failed to read TP payload from {payload_path}: {e}")
+ return None
+ except pydantic.ValidationError as e:
+ log.warning(f"Failed to validate TP payload from {payload_path}: {e}")
+ return None
+
+
+def _payload_summary(payload: github_models.TrustedPublisherPayload | None) ->
dict[str, Any]:
+ if payload is None:
+ return {"present": False}
+ return {
+ "present": True,
+ "repository": payload.repository,
+ "ref": payload.ref,
+ "sha": payload.sha,
+ "workflow_ref": payload.workflow_ref,
+ "actor": payload.actor,
+ "actor_id": payload.actor_id,
+ }
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]