This is an automated email from the ASF dual-hosted git repository.
sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-release.git
The following commit(s) were added to refs/heads/main by this push:
new 79ac26c Migrate the keys import script to the storage interface
79ac26c is described below
commit 79ac26c4160f4d67016cee0491858783d3e6e6b4
Author: Sean B. Palmer <[email protected]>
AuthorDate: Thu Jul 24 17:13:36 2025 +0100
Migrate the keys import script to the storage interface
---
atr/blueprints/admin/admin.py | 14 ++++--
atr/storage/__init__.py | 66 ++++++++++++++++---------
atr/storage/types.py | 33 +++----------
atr/storage/writers/keys.py | 111 +++++++++++++++++++++++-------------------
scripts/keys_import.py | 47 ++++++++++--------
5 files changed, 150 insertions(+), 121 deletions(-)
diff --git a/atr/blueprints/admin/admin.py b/atr/blueprints/admin/admin.py
index 49dd69f..a839398 100644
--- a/atr/blueprints/admin/admin.py
+++ b/atr/blueprints/admin/admin.py
@@ -461,7 +461,13 @@ async def admin_keys_update() -> str | response.Response |
tuple[Mapping[str, An
return await template.render("update-keys.html",
empty_form=empty_form, previous_output=previous_output)
try:
- pid = await _update_keys()
+ web_session = await session.read()
+ if web_session is None:
+ raise base.ASFQuartException("Not authenticated", 401)
+ asf_uid = web_session.uid
+ if asf_uid is None:
+ raise base.ASFQuartException("Invalid session, uid is None", 500)
+ pid = await _update_keys(asf_uid)
return {
"message": f"Successfully started key update process with PID
{pid}",
"category": "success",
@@ -907,7 +913,7 @@ async def _update_committees(
return added_count, updated_count
-async def _update_keys() -> int:
+async def _update_keys(asf_uid: str) -> int:
async def _log_process(process: asyncio.subprocess.Process) -> None:
try:
stdout, stderr = await process.communicate()
@@ -924,10 +930,10 @@ async def _update_keys() -> int:
if await aiofiles.os.path.exists("../Dockerfile.alpine"):
# Not in a container, developing locally
- command = ["poetry", "run", "python3", "scripts/keys_import.py"]
+ command = ["poetry", "run", "python3", "scripts/keys_import.py",
asf_uid]
else:
# In a container
- command = [sys.executable, "scripts/keys_import.py"]
+ command = [sys.executable, "scripts/keys_import.py", asf_uid]
process = await asyncio.create_subprocess_exec(
*command, stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE, cwd=".."
diff --git a/atr/storage/__init__.py b/atr/storage/__init__.py
index 57eea09..4b7ab9d 100644
--- a/atr/storage/__init__.py
+++ b/atr/storage/__init__.py
@@ -32,6 +32,7 @@ import atr.log as log
import atr.models.sql as sql
import atr.storage.types as types
import atr.storage.writers as writers
+import atr.user as user
import atr.util as util
VALIDATE_AT_RUNTIME: Final[bool] = True
@@ -188,26 +189,33 @@ class WriteAsCommitteeMember(WriteAsCommitteeParticipant):
return VALIDATE_AT_RUNTIME
-# class WriteAsFoundationAdmin(WriteAsFoundationCommitter):
-# def __init__(self, write: Write, data: db.Session, asf_uid: str):
-# self.__write = write
-# self.__data = data
-# self.__asf_uid = asf_uid
-# self.__authenticated = True
-# self.keys = writers.keys.FoundationAdmin(
-# self,
-# self.__write,
-# self.__data,
-# self.__asf_uid,
-# )
+# TODO: Or WriteAsCommitteeAdmin
+class WriteAsFoundationAdmin(WriteAsCommitteeMember):
+ def __init__(self, write: Write, data: db.Session, asf_uid: str,
committee_name: str):
+ self.__write = write
+ self.__data = data
+ self.__asf_uid = asf_uid
+ self.__committee_name = committee_name
+ self.__authenticated = True
+ self.keys = writers.keys.FoundationAdmin(
+ self,
+ self.__write,
+ self.__data,
+ self.__asf_uid,
+ committee_name,
+ )
-# @property
-# def authenticated(self) -> bool:
-# return self.__authenticated
+ @property
+ def authenticated(self) -> bool:
+ return self.__authenticated
-# @property
-# def validate_at_runtime(self) -> bool:
-# return VALIDATE_AT_RUNTIME
+ @property
+ def committee_name(self) -> str:
+ return self.__committee_name
+
+ @property
+ def validate_at_runtime(self) -> bool:
+ return VALIDATE_AT_RUNTIME
class Write:
@@ -268,6 +276,20 @@ class Write:
return types.OutcomeException(e)
return types.OutcomeResult(wafm)
+ def as_foundation_admin(self, committee_name: str) ->
WriteAsFoundationAdmin:
+ return
self.as_foundation_admin_outcome(committee_name).result_or_raise()
+
+ def as_foundation_admin_outcome(self, committee_name: str) ->
types.Outcome[WriteAsFoundationAdmin]:
+ if self.__asf_uid is None:
+ return types.OutcomeException(AccessError("No ASF UID"))
+ if not user.is_admin(self.__asf_uid):
+ return types.OutcomeException(AccessError("Not an admin"))
+ try:
+ wafa = WriteAsFoundationAdmin(self, self.__data, self.__asf_uid,
committee_name)
+ except Exception as e:
+ return types.OutcomeException(e)
+ return types.OutcomeResult(wafa)
+
# async def as_key_owner(self) -> types.Outcome[WriteAsKeyOwner]:
# ...
@@ -346,16 +368,16 @@ class ContextManagers:
if asfquart_session is None:
raise AccessError("No ASFQuart session")
if asf_uid is None:
- asf_uid = asfquart_session.uid
+ asf_uid = asfquart_session["uid"]
if asf_uid is None:
raise AccessError("No ASF UID, and not set in the ASFQuart
session")
- elif asfquart_session.uid != asf_uid:
+ elif asfquart_session["uid"] != asf_uid:
raise AccessError("ASF UID mismatch")
# TODO: Use our own LDAP calls instead of using sqlite as a cache
await data.ns_text_set("asfquart_session", asf_uid,
json.dumps(asfquart_session))
- self.__member_of_cache[asf_uid] = set(asfquart_session.committees)
- self.__participant_of_cache[asf_uid] = set(asfquart_session.projects)
+ self.__member_of_cache[asf_uid] = set(asfquart_session["committees"])
+ self.__participant_of_cache[asf_uid] =
set(asfquart_session["projects"])
self.__last_refreshed = int(time.time())
finish = time.perf_counter_ns()
diff --git a/atr/storage/types.py b/atr/storage/types.py
index 83f78d3..ca643e9 100644
--- a/atr/storage/types.py
+++ b/atr/storage/types.py
@@ -29,31 +29,6 @@ E = TypeVar("E", bound=Exception)
T = TypeVar("T", bound=object)
-# class OutcomeCore[T]:
-# @property
-# def ok(self) -> bool:
-# raise NotImplementedError("ok is not implemented")
-
-# @property
-# def name(self) -> str | None:
-# raise NotImplementedError("name is not implemented")
-
-# def result_or_none(self) -> T | None:
-# raise NotImplementedError("result_or_none is not implemented")
-
-# def result_or_raise(self, exception_class: type[E] | None = None) -> T:
-# raise NotImplementedError("result_or_raise is not implemented")
-
-# def exception_or_none(self) -> Exception | None:
-# raise NotImplementedError("exception_or_none is not implemented")
-
-# def exception_or_raise(self, exception_class: type[E] | None = None) ->
NoReturn:
-# raise NotImplementedError("exception_or_raise is not implemented")
-
-# def exception_type_or_none(self) -> type[Exception] | None:
-# raise NotImplementedError("exception_type_or_none is not
implemented")
-
-
class OutcomeResult[T]:
__result: T
@@ -131,6 +106,9 @@ class Outcomes[T, E: Exception = Exception]:
def __init__(self, *outcomes: Outcome[T, E]):
self.__outcomes = list(outcomes)
+ def __str__(self) -> str:
+ return f"Outcomes({self.__outcomes})"
+
@property
def any_exception(self) -> bool:
return any((not outcome.ok) for outcome in self.__outcomes)
@@ -177,6 +155,11 @@ class Outcomes[T, E: Exception = Exception]:
exceptions_list.append(exception_or_none)
return exceptions_list
+ def exceptions_print(self) -> None:
+ for exception in self.exceptions():
+ # traceback.print_exception(exception)
+ print(exception.__class__.__name__ + ":", exception)
+
def extend_exceptions(self, exceptions: Sequence[E]) -> None:
for exception in exceptions:
self.append_exception(exception)
diff --git a/atr/storage/writers/keys.py b/atr/storage/writers/keys.py
index aac7e86..f6315db 100644
--- a/atr/storage/writers/keys.py
+++ b/atr/storage/writers/keys.py
@@ -112,6 +112,42 @@ class FoundationCommitter:
async def ensure_stored_one(self, key_file_text: str) ->
types.Outcome[types.Key]:
return await self.__ensure_one(key_file_text, associate=False)
+ @performance
+ def keyring_fingerprint_model(
+ self, keyring: pgpy.PGPKeyring, fingerprint: str, ldap_data: dict[str,
str]
+ ) -> sql.PublicSigningKey | None:
+ with keyring.key(fingerprint) as key:
+ if not key.is_primary:
+ return None
+ uids = [uid.userid for uid in key.userids]
+ asf_uid = self.__uids_asf_uid(uids, ldap_data)
+
+ # TODO: Improve this
+ key_size = key.key_size
+ length = 0
+ if isinstance(key_size, constants.EllipticCurveOID):
+ if isinstance(key_size.key_size, int):
+ length = key_size.key_size
+ else:
+ raise ValueError(f"Key size is not an integer:
{type(key_size.key_size)}, {key_size.key_size}")
+ elif isinstance(key_size, int):
+ length = key_size
+ else:
+ raise ValueError(f"Key size is not an integer:
{type(key_size)}, {key_size}")
+
+ return sql.PublicSigningKey(
+ fingerprint=str(key.fingerprint).lower(),
+ algorithm=key.key_algorithm.value,
+ length=length,
+ created=key.created,
+ latest_self_signature=key.expires_at,
+ expires=key.expires_at,
+ primary_declared_uid=uids[0],
+ secondary_declared_uids=uids[1:],
+ apache_uid=asf_uid,
+ ascii_armored_key=str(key),
+ )
+
@performance_async
async def keys_file_text(self, committee_name: str) -> str:
committee = await self.__data.committee(name=committee_name,
_public_signing_keys=True).demand(
@@ -174,7 +210,7 @@ class FoundationCommitter:
key = None
for fingerprint in fingerprints:
try:
- key_model = self.__keyring_fingerprint_model(keyring,
fingerprint, ldap_data)
+ key_model = self.keyring_fingerprint_model(keyring,
fingerprint, ldap_data)
if key_model is None:
# Was not a primary key, so skip it
continue
@@ -278,42 +314,6 @@ and was published by the committee.\
full_keys_file_content = header_content + key_blocks_str
return full_keys_file_content
- @performance
- def __keyring_fingerprint_model(
- self, keyring: pgpy.PGPKeyring, fingerprint: str, ldap_data: dict[str,
str]
- ) -> sql.PublicSigningKey | None:
- with keyring.key(fingerprint) as key:
- if not key.is_primary:
- return None
- uids = [uid.userid for uid in key.userids]
- asf_uid = self.__uids_asf_uid(uids, ldap_data)
-
- # TODO: Improve this
- key_size = key.key_size
- length = 0
- if isinstance(key_size, constants.EllipticCurveOID):
- if isinstance(key_size.key_size, int):
- length = key_size.key_size
- else:
- raise ValueError(f"Key size is not an integer:
{type(key_size.key_size)}, {key_size.key_size}")
- elif isinstance(key_size, int):
- length = key_size
- else:
- raise ValueError(f"Key size is not an integer:
{type(key_size)}, {key_size}")
-
- return sql.PublicSigningKey(
- fingerprint=str(key.fingerprint).lower(),
- algorithm=key.key_algorithm.value,
- length=length,
- created=key.created,
- latest_self_signature=key.expires_at,
- expires=key.expires_at,
- primary_declared_uid=uids[0],
- secondary_declared_uids=uids[1:],
- apache_uid=asf_uid,
- ascii_armored_key=str(key),
- )
-
@performance
def __uids_asf_uid(self, uids: list[str], ldap_data: dict[str, str]) ->
str | None:
# Test data
@@ -352,11 +352,13 @@ class CommitteeParticipant(FoundationCommitter):
asf_uid: str,
committee_name: str,
):
+ super().__init__(credentials, write, data, asf_uid)
self.__credentials = credentials
self.__write = write
self.__data = data
self.__asf_uid = asf_uid
self.__committee_name = committee_name
+ self.__key_block_models_cache = {}
@performance_async
async def associate_fingerprint(self, fingerprint: str) ->
types.Outcome[types.LinkedCommittee]:
@@ -487,7 +489,7 @@ class CommitteeParticipant(FoundationCommitter):
key_list = []
for fingerprint in fingerprints:
try:
- key_model = self.__keyring_fingerprint_model(keyring,
fingerprint, ldap_data)
+ key_model = self.keyring_fingerprint_model(keyring,
fingerprint, ldap_data)
if key_model is None:
# Was not a primary key, so skip it
continue
@@ -592,8 +594,9 @@ class CommitteeParticipant(FoundationCommitter):
return outcomes
for key_block in key_blocks:
try:
+ # TODO: Change self.__block_models to return outcomes
key_models = await asyncio.to_thread(self.__block_models,
key_block, ldap_data)
- outcomes.extend_results(key_models)
+ outcomes.extend_roes(Exception, key_models)
except Exception as e:
outcomes.append_exception(e)
# Try adding the keys to the database
@@ -614,6 +617,7 @@ class CommitteeMember(CommitteeParticipant):
asf_uid: str,
committee_name: str,
):
+ super().__init__(credentials, write, data, asf_uid, committee_name)
self.__credentials = credentials
self.__write = write
self.__data = data
@@ -621,15 +625,22 @@ class CommitteeMember(CommitteeParticipant):
self.__committee_name = committee_name
-# class FoundationAdmin(FoundationCommitter):
-# def __init__(
-# self, credentials: storage.WriteAsFoundationAdmin, write:
storage.Write, data: db.Session, asf_uid: str
-# ):
-# self.__credentials = credentials
-# self.__write = write
-# self.__data = data
-# self.__asf_uid = asf_uid
+class FoundationAdmin(CommitteeMember):
+ def __init__(
+ self,
+ credentials: storage.WriteAsFoundationAdmin,
+ write: storage.Write,
+ data: db.Session,
+ asf_uid: str,
+ committee_name: str,
+ ):
+ super().__init__(credentials, write, data, asf_uid, committee_name)
+ self.__credentials = credentials
+ self.__write = write
+ self.__data = data
+ self.__asf_uid = asf_uid
+ self.__committee_name = committee_name
-# @performance_async
-# async def ensure_stored_one(self, key_file_text: str) ->
types.Outcome[types.Key]:
-# return await self.__ensure_one(key_file_text, associate=False)
+ @property
+ def committee_name(self) -> str:
+ return self.__committee_name
diff --git a/scripts/keys_import.py b/scripts/keys_import.py
index 55132a8..fcc48cc 100644
--- a/scripts/keys_import.py
+++ b/scripts/keys_import.py
@@ -23,13 +23,14 @@ import contextlib
import os
import sys
import time
+import traceback
sys.path.append(".")
import atr.config as config
import atr.db as db
-import atr.db.interaction as interaction
+import atr.storage as storage
import atr.util as util
@@ -41,7 +42,7 @@ def get(entry: dict, prop: str) -> str | None:
return None
-def write(message: str) -> None:
+def print_and_flush(message: str) -> None:
print(message)
sys.stdout.flush()
@@ -64,7 +65,7 @@ def log_to_file(conf: config.AppConfig):
sys.stderr = original_stderr
-async def keys_import(conf: config.AppConfig) -> None:
+async def keys_import(conf: config.AppConfig, asf_uid: str) -> None:
# Runs as a standalone script, so we need a worker style database
connection
await db.init_database_for_worker()
# Print the time and current PID
@@ -76,11 +77,11 @@ async def keys_import(conf: config.AppConfig) -> None:
start = time.perf_counter_ns()
email_to_uid = await util.email_to_uid_map()
end = time.perf_counter_ns()
- write(f"LDAP search took {(end - start) / 1000000} ms")
- write(f"Email addresses from LDAP: {len(email_to_uid)}")
+ print_and_flush(f"LDAP search took {(end - start) / 1000000} ms")
+ print_and_flush(f"Email addresses from LDAP: {len(email_to_uid)}")
# Open an ATR database connection
- async with db.session() as data:
+ async with db.session() as data, storage.write(asf_uid) as write:
# Get the KEYS file of each committee
committees = await data.committee().all()
committees = list(committees)
@@ -92,35 +93,41 @@ async def keys_import(conf: config.AppConfig) -> None:
# For each remote KEYS file, check that it responded 200 OK
committee_name = url.rsplit("/", 2)[-2]
if status != 200:
- write(f"{committee_name} error: {status}")
+ print_and_flush(f"{committee_name} error: {status}")
continue
# Parse the KEYS file and add it to the database
# TODO: We could have this return the keys to make it more
efficient
# Then we could use the bulk upsert query method
- try:
- _result, yes, no, _committees = await
interaction.upload_keys_bytes(
- [committee_name], content, [committee_name],
ldap_data=email_to_uid, update_existing=True
- )
- except Exception as e:
- write(f"{committee_name} error: {e}")
- continue
+ wafa = write.as_foundation_admin(committee_name)
+ keys_file_text = content.decode("utf-8", errors="replace")
+ outcomes = await wafa.keys.ensure_associated(keys_file_text)
+ yes = outcomes.result_count
+ no = outcomes.exception_count
+ if no:
+ outcomes.exceptions_print()
# Print and record the number of keys that were okay and failed
- write(f"{committee_name} {yes} {no}")
+ print_and_flush(f"{committee_name} {yes} {no}")
total_yes += yes
total_no += no
- write(f"Total okay: {total_yes}")
- write(f"Total failed: {total_no}")
+ print_and_flush(f"Total okay: {total_yes}")
+ print_and_flush(f"Total failed: {total_no}")
end = time.perf_counter_ns()
- write(f"Script took {(end - start) / 1000000} ms")
- write("")
+ print_and_flush(f"Script took {(end - start) / 1000000} ms")
+ print_and_flush("")
async def amain() -> None:
conf = config.AppConfig()
with log_to_file(conf):
- await keys_import(conf)
+ try:
+ await keys_import(conf, sys.argv[1])
+ except Exception as e:
+ print_and_flush(f"Error: {e}")
+ traceback.print_exc()
+ sys.stdout.flush()
+ sys.exit(1)
def main() -> None:
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]