This is an automated email from the ASF dual-hosted git repository.

sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-releases.git


The following commit(s) were added to refs/heads/main by this push:
     new b9de5d4  Detect automated release committees from their OpenPGP keys
b9de5d4 is described below

commit b9de5d40ff0d430cae003d9e2d1f390430fea285
Author: Sean B. Palmer <[email protected]>
AuthorDate: Tue Oct 14 20:12:46 2025 +0100

    Detect automated release committees from their OpenPGP keys
---
 atr/db/interaction.py  | 56 +++++++++++++++++++++++++++++++++++++++++++++++---
 atr/registry.py        | 22 ++++++++++----------
 scripts/keys_import.py | 49 +++++++++++++++++++++++++------------------
 3 files changed, 93 insertions(+), 34 deletions(-)

diff --git a/atr/db/interaction.py b/atr/db/interaction.py
index 345b8c1..c5e342e 100644
--- a/atr/db/interaction.py
+++ b/atr/db/interaction.py
@@ -23,6 +23,7 @@ from typing import Any, Final
 
 import packaging.version as version
 import sqlalchemy
+import sqlalchemy.orm as orm
 import sqlmodel
 
 import atr.db as db
@@ -31,7 +32,6 @@ import atr.ldap as ldap
 import atr.log as log
 import atr.models.results as results
 import atr.models.sql as sql
-import atr.registry as registry
 import atr.user as user
 import atr.util as util
 
@@ -112,6 +112,55 @@ async def all_releases(project: sql.Project) -> 
list[sql.Release]:
     return results
 
 
+async def automated_release_signing_committees(caller_data: db.Session | None 
= None) -> frozenset[str]:
+    """Get all automated release signing committees."""
+    committees = []
+    async with db.ensure_session(caller_data) as data:
+        via = sql.validate_instrumented_attribute
+        query = (
+            sqlmodel.select(sql.PublicSigningKey)
+            .options(orm.selectinload(via(sql.PublicSigningKey.committees)))
+            .where(
+                sqlalchemy.and_(
+                    sqlalchemy.or_(
+                        
via(sql.PublicSigningKey.primary_declared_uid).like("%Automated Release 
Signing%"),
+                        
via(sql.PublicSigningKey.primary_declared_uid).like("%Services RM%"),
+                    ),
+                    
via(sql.PublicSigningKey.primary_declared_uid).like("%private@%.apache.org%"),
+                )
+            )
+        )
+        result = await data.execute(query)
+        keys = result.scalars().all()
+
+        for key in keys:
+            for committee in key.committees:
+                committees.append(committee.name)
+
+    # Committees allowed to make automated releases for testing
+    committees.append("test")
+    committees.append("tooling")
+
+    return frozenset(committees)
+
+
+async def automated_release_signing_keys(caller_data: db.Session | None = 
None) -> Sequence[sql.PublicSigningKey]:
+    """Get all automated release signing keys."""
+    async with db.ensure_session(caller_data) as data:
+        via = sql.validate_instrumented_attribute
+        query = sqlmodel.select(sql.PublicSigningKey).where(
+            sqlalchemy.and_(
+                sqlalchemy.or_(
+                    
via(sql.PublicSigningKey.primary_declared_uid).like("%Automated Release 
Signing%"),
+                    
via(sql.PublicSigningKey.primary_declared_uid).like("%Services RM%"),
+                ),
+                
via(sql.PublicSigningKey.primary_declared_uid).like("%private@%.apache.org%"),
+            ),
+        )
+        result = await data.execute(query)
+        return result.scalars().all()
+
+
 async def candidate_drafts(project: sql.Project) -> list[sql.Release]:
     """Get the candidate drafts for the project."""
     return await releases_by_phase(project, 
sql.ReleasePhase.RELEASE_CANDIDATE_DRAFT)
@@ -399,8 +448,9 @@ async def _trusted_project(repository: str, workflow_ref: 
str, phase: TrustedPro
         )
     if project.committee is None:
         raise InteractionError(f"Project {project.name} has no committee")
-    if project.committee.name not in 
registry.GITHUB_AUTOMATED_RELEASE_COMMITTEES:
-        raise InteractionError(f"Project {project.name} is not in a committee 
that can make releases")
+    github_automated_release_committees = await 
automated_release_signing_committees()
+    if project.committee.name not in github_automated_release_committees:
+        raise InteractionError(f"Project {project.name} is not in a committee 
that can make automated releases")
     return project
 
 
diff --git a/atr/registry.py b/atr/registry.py
index cffb6f7..7195688 100644
--- a/atr/registry.py
+++ b/atr/registry.py
@@ -25,17 +25,17 @@ FORBIDDEN_PROJECT_CATEGORIES: Final[set[str]] = {
 
 # Committees which are allowed by Infra to make releases via GitHub Actions
 # TODO: This should actually be at the project level, not committee level
-GITHUB_AUTOMATED_RELEASE_COMMITTEES: Final[frozenset[str]] = frozenset(
-    {
-        "arrow",
-        "baremaps",
-        "beam",
-        "daffodil",
-        "directory",
-        "logging",
-        "tooling",
-    }
-)
+# GITHUB_AUTOMATED_RELEASE_COMMITTEES: Final[frozenset[str]] = frozenset(
+#     {
+#         "arrow",
+#         "baremaps",
+#         "beam",
+#         "daffodil",
+#         "directory",
+#         "logging",
+#         "tooling",
+#     }
+# )
 
 # Committees which cannot make releases, by policy
 STANDING_COMMITTEES: Final[frozenset[str]] = frozenset(
diff --git a/scripts/keys_import.py b/scripts/keys_import.py
index 42c071a..70f29d3 100755
--- a/scripts/keys_import.py
+++ b/scripts/keys_import.py
@@ -80,25 +80,34 @@ async def keys_import(conf: config.AppConfig, asf_uid: str) 
-> None:
     print_and_flush(f"LDAP search took {(end - start) / 1000000} ms")
     print_and_flush(f"Email addresses from LDAP: {len(email_to_uid)}")
 
-    # Open an ATR database connection
-    async with db.session() as data, storage.write(asf_uid) as write:
-        # Get the KEYS file of each committee
+    # Get the KEYS file of each committee
+    async with db.session() as data:
         committees = await data.committee().all()
-        committees = list(committees)
-        committees.sort(key=lambda c: c.name.lower())
-        urls = [f"https://downloads.apache.org/{committee.name}/KEYS"; for 
committee in committees]
-        total_yes = 0
-        total_no = 0
-        async for url, status, content in util.get_urls_as_completed(urls):
-            # For each remote KEYS file, check that it responded 200 OK
-            committee_name = url.rsplit("/", 2)[-2]
-            if status != 200:
-                print_and_flush(f"{committee_name} error: {status}")
-                continue
-
-            # Parse the KEYS file and add it to the database
-            # TODO: We could have this return the keys to make it more 
efficient
-            # Then we could use the bulk upsert query method
+    committees = list(committees)
+    committees.sort(key=lambda c: c.name.lower())
+
+    urls = []
+    for committee in committees:
+        if committee.is_podling:
+            url = 
f"https://downloads.apache.org/incubator/{committee.name}/KEYS";
+        else:
+            url = f"https://downloads.apache.org/{committee.name}/KEYS";
+        urls.append(url)
+
+    total_yes = 0
+    total_no = 0
+    async for url, status, content in util.get_urls_as_completed(urls):
+        # For each remote KEYS file, check that it responded 200 OK
+        # Extract committee name from URL
+        # This works for both /committee/KEYS and /incubator/committee/KEYS
+        committee_name = url.rsplit("/", 2)[-2]
+        if status != 200:
+            print_and_flush(f"{committee_name} error: {status}")
+            continue
+
+        # Parse the KEYS file and add it to the database
+        # We use a separate storage.write() context for each committee to 
avoid transaction conflicts
+        async with storage.write(asf_uid) as write:
             wafa = write.as_foundation_admin(committee_name)
             keys_file_text = content.decode("utf-8", errors="replace")
             outcomes = await wafa.keys.ensure_associated(keys_file_text)
@@ -111,8 +120,8 @@ async def keys_import(conf: config.AppConfig, asf_uid: str) 
-> None:
             print_and_flush(f"{committee_name} {yes} {no}")
             total_yes += yes
             total_no += no
-        print_and_flush(f"Total okay: {total_yes}")
-        print_and_flush(f"Total failed: {total_no}")
+    print_and_flush(f"Total okay: {total_yes}")
+    print_and_flush(f"Total failed: {total_no}")
     end = time.perf_counter_ns()
     print_and_flush(f"Script took {(end - start) / 1000000} ms")
     print_and_flush("")


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to