This is an automated email from the ASF dual-hosted git repository.

sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-release.git


The following commit(s) were added to refs/heads/main by this push:
     new 88e82e5  Add a new Revision model
88e82e5 is described below

commit 88e82e58b04a129f083a6f1a3724544c8afb7552
Author: Sean B. Palmer <[email protected]>
AuthorDate: Thu May 15 18:55:35 2025 +0100

    Add a new Revision model
---
 atr/blueprints/admin/admin.py                      |   1 +
 atr/db/__init__.py                                 |  82 ++++++---
 atr/db/interaction.py                              |  23 +--
 atr/db/models.py                                   | 158 +++++++++++++++--
 atr/manager.py                                     |   2 +-
 atr/revision.py                                    | 152 ++++++++---------
 atr/routes/__init__.py                             |   6 +-
 atr/routes/announce.py                             |  29 ++--
 atr/routes/candidate.py                            |   4 +-
 atr/routes/compose.py                              |  23 +--
 atr/routes/download.py                             |   2 +-
 atr/routes/draft.py                                |  20 ++-
 atr/routes/finish.py                               |  10 +-
 atr/routes/keys.py                                 |   6 +-
 atr/routes/preview.py                              |   4 +-
 atr/routes/report.py                               |  10 +-
 atr/routes/resolve.py                              |   2 +-
 atr/routes/revisions.py                            | 187 ++++++++-------------
 atr/routes/root.py                                 |   6 +-
 atr/routes/start.py                                |  62 +++----
 atr/routes/upload.py                               |   2 +-
 atr/tasks/__init__.py                              |  20 +--
 atr/tasks/bulk.py                                  |   2 +-
 atr/tasks/checks/__init__.py                       |  24 +--
 atr/tasks/checks/paths.py                          |   6 +-
 atr/tasks/checks/signature.py                      |   2 +-
 atr/tasks/keys.py                                  |   2 +-
 atr/tasks/svn.py                                   |   4 +-
 atr/templates/announce-selected.html               |   2 +-
 atr/templates/check-selected-path-table.html       |  10 +-
 atr/templates/check-selected-release-info.html     |   6 +-
 atr/templates/finish-selected.html                 |   2 +-
 atr/templates/revisions-selected.html              |  42 ++---
 atr/templates/voting-selected-revision.html        |   2 +-
 atr/util.py                                        |  14 +-
 atr/worker.py                                      |  16 +-
 ....06_38b0d2de.py => 0001_2025.05.15_32c59be6.py} | 133 ++++++++-------
 migrations/versions/0002_2025.05.08_32fdbfe0.py    |  27 ---
 migrations/versions/0003_2025.05.09_ee553bee.py    |  72 --------
 migrations/versions/0004_2025.05.13_657bf05b.py    |  61 -------
 migrations/versions/0005_2025.05.13_d94f16f6.py    |  42 -----
 pyproject.toml                                     |   6 +
 42 files changed, 618 insertions(+), 668 deletions(-)

diff --git a/atr/blueprints/admin/admin.py b/atr/blueprints/admin/admin.py
index 238a812..3b86be4 100644
--- a/atr/blueprints/admin/admin.py
+++ b/atr/blueprints/admin/admin.py
@@ -72,6 +72,7 @@ async def admin_data(model: str = "Committee") -> str:
             "PublicSigningKey": data.public_signing_key,
             "Release": data.release,
             "ReleasePolicy": data.release_policy,
+            "Revision": data.revision,
             "SSHKey": data.ssh_key,
             "Task": data.task,
             "TextValue": data.text_value,
diff --git a/atr/db/__init__.py b/atr/db/__init__.py
index 1167730..7cdecc7 100644
--- a/atr/db/__init__.py
+++ b/atr/db/__init__.py
@@ -21,6 +21,8 @@ import logging
 import os
 from typing import TYPE_CHECKING, Any, Final, Generic, TypeGuard, TypeVar
 
+import alembic.command as command
+import alembic.config as alembic_config
 import sqlalchemy
 import sqlalchemy.dialects.sqlite
 import sqlalchemy.ext.asyncio
@@ -28,8 +30,6 @@ import sqlalchemy.orm as orm
 import sqlalchemy.sql as sql
 import sqlmodel
 import sqlmodel.sql.expression as expression
-from alembic import command
-from alembic.config import Config
 
 import atr.config as config
 import atr.db.models as models
@@ -112,7 +112,7 @@ class Session(sqlalchemy.ext.asyncio.AsyncSession):
         self,
         id: Opt[int] = NOT_SET,
         release_name: Opt[str] = NOT_SET,
-        revision: Opt[str] = NOT_SET,
+        revision_number: Opt[str] = NOT_SET,
         checker: Opt[str] = NOT_SET,
         primary_rel_path: Opt[str | None] = NOT_SET,
         member_rel_path: Opt[str | None] = NOT_SET,
@@ -128,8 +128,8 @@ class Session(sqlalchemy.ext.asyncio.AsyncSession):
             query = query.where(models.CheckResult.id == id)
         if is_defined(release_name):
             query = query.where(models.CheckResult.release_name == 
release_name)
-        if is_defined(revision):
-            query = query.where(models.CheckResult.revision == revision)
+        if is_defined(revision_number):
+            query = query.where(models.CheckResult.revision_number == 
revision_number)
         if is_defined(checker):
             query = query.where(models.CheckResult.checker == checker)
         if is_defined(primary_rel_path):
@@ -181,7 +181,7 @@ class Session(sqlalchemy.ext.asyncio.AsyncSession):
             query = query.where(models.Committee.release_managers == 
release_managers)
 
         if is_defined(name_in):
-            models_committee_name = 
validate_instrumented_attribute(models.Committee.name)
+            models_committee_name = 
models.validate_instrumented_attribute(models.Committee.name)
             query = query.where(models_committee_name.in_(name_in))
 
         if _projects:
@@ -193,8 +193,8 @@ class Session(sqlalchemy.ext.asyncio.AsyncSession):
 
     async def ns_text_del(self, ns: str, key: str, commit: bool = True) -> 
None:
         stmt = sql.delete(models.TextValue).where(
-            validate_instrumented_attribute(models.TextValue.ns) == ns,
-            validate_instrumented_attribute(models.TextValue.key) == key,
+            models.validate_instrumented_attribute(models.TextValue.ns) == ns,
+            models.validate_instrumented_attribute(models.TextValue.key) == 
key,
         )
         await self.execute(stmt)
         if commit is True:
@@ -202,7 +202,7 @@ class Session(sqlalchemy.ext.asyncio.AsyncSession):
 
     async def ns_text_del_all(self, ns: str, commit: bool = True) -> None:
         stmt = sql.delete(models.TextValue).where(
-            validate_instrumented_attribute(models.TextValue.ns) == ns,
+            models.validate_instrumented_attribute(models.TextValue.ns) == ns,
         )
         await self.execute(stmt)
         if commit is True:
@@ -210,8 +210,8 @@ class Session(sqlalchemy.ext.asyncio.AsyncSession):
 
     async def ns_text_get(self, ns: str, key: str) -> str | None:
         stmt = sql.select(models.TextValue).where(
-            validate_instrumented_attribute(models.TextValue.ns) == ns,
-            validate_instrumented_attribute(models.TextValue.key) == key,
+            models.validate_instrumented_attribute(models.TextValue.ns) == ns,
+            models.validate_instrumented_attribute(models.TextValue.key) == 
key,
         )
         result = await self.execute(stmt)
         match result.scalar_one_or_none():
@@ -317,7 +317,6 @@ class Session(sqlalchemy.ext.asyncio.AsyncSession):
         project_name: Opt[str] = NOT_SET,
         package_managers: Opt[list[str]] = NOT_SET,
         version: Opt[str] = NOT_SET,
-        revision: Opt[str] = NOT_SET,
         sboms: Opt[list[str]] = NOT_SET,
         release_policy_id: Opt[int] = NOT_SET,
         votes: Opt[list[models.VoteEntry]] = NOT_SET,
@@ -325,6 +324,7 @@ class Session(sqlalchemy.ext.asyncio.AsyncSession):
         _release_policy: bool = False,
         _committee: bool = False,
         _tasks: bool = False,
+        _revisions: bool = False,
     ) -> Query[models.Release]:
         query = sqlmodel.select(models.Release)
 
@@ -342,8 +342,6 @@ class Session(sqlalchemy.ext.asyncio.AsyncSession):
             query = query.where(models.Release.package_managers == 
package_managers)
         if is_defined(version):
             query = query.where(models.Release.version == version)
-        if is_defined(revision):
-            query = query.where(models.Release.revision == revision)
         if is_defined(sboms):
             query = query.where(models.Release.sboms == sboms)
         if is_defined(release_policy_id):
@@ -359,6 +357,8 @@ class Session(sqlalchemy.ext.asyncio.AsyncSession):
             query = 
query.options(select_in_load_nested(models.Release.project, 
models.Project.committee))
         if _tasks:
             query = query.options(select_in_load(models.Release.tasks))
+        if _revisions:
+            query = query.options(select_in_load(models.Release.revisions))
 
         return Query(self, query)
 
@@ -392,6 +392,51 @@ class Session(sqlalchemy.ext.asyncio.AsyncSession):
 
         return Query(self, query)
 
+    def revision(
+        self,
+        name: Opt[str] = NOT_SET,
+        release_name: Opt[str] = NOT_SET,
+        seq: Opt[int] = NOT_SET,
+        number: Opt[str] = NOT_SET,
+        asfuid: Opt[str] = NOT_SET,
+        created: Opt[datetime.datetime] = NOT_SET,
+        phase: Opt[models.ReleasePhase] = NOT_SET,
+        parent_name: Opt[str | None] = NOT_SET,
+        description: Opt[str | None] = NOT_SET,
+        _release: bool = False,
+        _parent: bool = False,
+        _child: bool = False,
+    ) -> Query[models.Revision]:
+        query = sqlmodel.select(models.Revision)
+
+        if is_defined(name):
+            query = query.where(models.Revision.name == name)
+        if is_defined(release_name):
+            query = query.where(models.Revision.release_name == release_name)
+        if is_defined(seq):
+            query = query.where(models.Revision.seq == seq)
+        if is_defined(number):
+            query = query.where(models.Revision.number == number)
+        if is_defined(asfuid):
+            query = query.where(models.Revision.asfuid == asfuid)
+        if is_defined(created):
+            query = query.where(models.Revision.created == created)
+        if is_defined(phase):
+            query = query.where(models.Revision.phase == phase)
+        if is_defined(parent_name):
+            query = query.where(models.Revision.parent_name == parent_name)
+        if is_defined(description):
+            query = query.where(models.Revision.description == description)
+
+        if _release:
+            query = query.options(select_in_load(models.Revision.release))
+        if _parent:
+            query = query.options(select_in_load(models.Revision.parent))
+        if _child:
+            query = query.options(select_in_load(models.Revision.child))
+
+        return Query(self, query)
+
     def ssh_key(
         self,
         fingerprint: Opt[str] = NOT_SET,
@@ -528,7 +573,7 @@ def init_database(app: base.QuartApp) -> None:
         # Run any pending migrations on startup
         _LOGGER.info("Applying database migrations via init_database...")
         alembic_ini_path = os.path.join(app_config.PROJECT_ROOT, "alembic.ini")
-        alembic_cfg = Config(alembic_ini_path)
+        alembic_cfg = alembic_config.Config(alembic_ini_path)
 
         # Construct synchronous URLs
         absolute_db_path = os.path.join(app_config.STATE_DIR, 
app_config.SQLITE_DB_PATH)
@@ -647,10 +692,3 @@ async def shutdown_database() -> None:
         await _global_atr_engine.dispose()
     else:
         _LOGGER.info("No database to close")
-
-
-def validate_instrumented_attribute(obj: Any) -> orm.InstrumentedAttribute:
-    """Check if the given object is an InstrumentedAttribute."""
-    if not isinstance(obj, orm.InstrumentedAttribute):
-        raise ValueError(f"Object must be an orm.InstrumentedAttribute, got: 
{type(obj)}")
-    return obj
diff --git a/atr/db/interaction.py b/atr/db/interaction.py
index 623c50a..e557597 100644
--- a/atr/db/interaction.py
+++ b/atr/db/interaction.py
@@ -182,10 +182,11 @@ async def key_user_session_add(
     }
 
 
-async def path_info(release: models.Release, paths: list[pathlib.Path]) -> 
PathInfo:
+async def path_info(release: models.Release, paths: list[pathlib.Path]) -> 
PathInfo | None:
     info = PathInfo()
-    if release.revision is None:
-        raise ValueError(f"Release {release.name} has no revision")
+    latest_revision_number = release.latest_revision_number
+    if latest_revision_number is None:
+        return None
     for path in paths:
         # Get template and substitutions
         # elements = {
@@ -212,7 +213,7 @@ async def path_info(release: models.Release, paths: 
list[pathlib.Path]) -> PathI
             info.successes[path] = list(
                 await data.check_result(
                     release_name=release.name,
-                    revision=release.revision,
+                    revision_number=latest_revision_number,
                     primary_rel_path=str(path),
                     member_rel_path=None,
                     status=models.CheckResultStatus.SUCCESS,
@@ -221,7 +222,7 @@ async def path_info(release: models.Release, paths: 
list[pathlib.Path]) -> PathI
             info.warnings[path] = list(
                 await data.check_result(
                     release_name=release.name,
-                    revision=release.revision,
+                    revision_number=latest_revision_number,
                     primary_rel_path=str(path),
                     member_rel_path=None,
                     status=models.CheckResultStatus.WARNING,
@@ -230,7 +231,7 @@ async def path_info(release: models.Release, paths: 
list[pathlib.Path]) -> PathI
             info.errors[path] = list(
                 await data.check_result(
                     release_name=release.name,
-                    revision=release.revision,
+                    revision_number=latest_revision_number,
                     primary_rel_path=str(path),
                     member_rel_path=None,
                     status=models.CheckResultStatus.FAILURE,
@@ -239,7 +240,7 @@ async def path_info(release: models.Release, paths: 
list[pathlib.Path]) -> PathI
     return info
 
 
-async def tasks_ongoing(project_name: str, version_name: str, revision: str) 
-> int:
+async def tasks_ongoing(project_name: str, version_name: str, revision_number: 
str) -> int:
     release_name = models.release_name(project_name, version_name)
     async with db.session() as data:
         query = (
@@ -247,8 +248,8 @@ async def tasks_ongoing(project_name: str, version_name: 
str, revision: str) ->
             .select_from(models.Task)
             .where(
                 models.Task.release_name == release_name,
-                models.Task.revision == revision,
-                db.validate_instrumented_attribute(models.Task.status).in_(
+                models.Task.revision_number == revision_number,
+                models.validate_instrumented_attribute(models.Task.status).in_(
                     [models.TaskStatus.QUEUED, models.TaskStatus.ACTIVE]
                 ),
             )
@@ -273,10 +274,10 @@ async def unfinished_releases(asfuid: str) -> dict[str, 
list[models.Release]]:
                 sqlmodel.select(models.Release)
                 .where(
                     models.Release.project_name == project.name,
-                    
db.validate_instrumented_attribute(models.Release.phase).in_(active_phases),
+                    
models.validate_instrumented_attribute(models.Release.phase).in_(active_phases),
                 )
                 .options(db.select_in_load(models.Release.project))
-                
.order_by(db.validate_instrumented_attribute(models.Release.created).desc())
+                
.order_by(models.validate_instrumented_attribute(models.Release.created).desc())
             )
             result = await data.execute(stmt)
             active_releases = list(result.scalars().all())
diff --git a/atr/db/models.py b/atr/db/models.py
index 4b6506f..f530274 100644
--- a/atr/db/models.py
+++ b/atr/db/models.py
@@ -24,13 +24,24 @@ import datetime
 import enum
 from typing import Any, Optional
 
+import pydantic
 import sqlalchemy
 import sqlalchemy.event as event
+import sqlalchemy.orm as orm
 import sqlmodel
 
-import atr.db as db
 import atr.schema as schema
 
+sqlmodel.SQLModel.metadata = sqlalchemy.MetaData(
+    naming_convention={
+        "ix": "ix_%(table_name)s_%(column_0_N_name)s",
+        "uq": "uq_%(table_name)s_%(column_0_N_name)s",
+        "ck": "ck_%(table_name)s_%(constraint_name)s",
+        "fk": "fk_%(table_name)s_%(column_0_N_name)s_%(referred_table_name)s",
+        "pk": "pk_%(table_name)s",
+    }
+)
+
 
 class UTCDateTime(sqlalchemy.types.TypeDecorator):
     """
@@ -200,16 +211,19 @@ class Project(sqlmodel.SQLModel, table=True):
 
     async def releases_by_phase(self, phase: "ReleasePhase") -> 
list["Release"]:
         """Get the releases for the project by phase."""
+        import atr.db as db
+
         query = (
             sqlmodel.select(Release)
             .where(
                 Release.project_name == self.name,
                 Release.phase == phase,
             )
-            
.order_by(db.validate_instrumented_attribute(Release.created).desc())
+            .order_by(validate_instrumented_attribute(Release.created).desc())
         )
 
         results = []
+        # TODO: Use inspect(self).session, if available
         async with db.session() as data:
             for result in (await data.execute(query)).all():
                 release = result[0]
@@ -292,6 +306,84 @@ class ReleasePhase(str, enum.Enum):
     RELEASE = "release"
 
 
+def revision_name(release_name: str, seq: int) -> str:
+    return f"{release_name} {seq}"
+
+
+class Revision(sqlmodel.SQLModel, table=True):
+    name: str = sqlmodel.Field(default="", primary_key=True, unique=True)
+    release_name: str | None = sqlmodel.Field(default=None, 
foreign_key="release.name")
+    release: "Release" = sqlmodel.Relationship(
+        back_populates="revisions",
+        sa_relationship_kwargs={
+            "foreign_keys": "[Revision.release_name]",
+        },
+    )
+    seq: int = sqlmodel.Field(default=0)
+    # This was designed as a property, but it's better for it to be a column
+    # That way, we can do dynamic Release.latest_revision_number construction 
easier
+    number: str = sqlmodel.Field(default="")
+    asfuid: str
+    created: datetime.datetime = sqlmodel.Field(
+        default_factory=lambda: datetime.datetime.now(datetime.UTC), 
sa_column=sqlalchemy.Column(UTCDateTime)
+    )
+    phase: ReleasePhase
+
+    parent_name: str | None = sqlmodel.Field(default=None, 
foreign_key="revision.name")
+    parent: Optional["Revision"] = sqlmodel.Relationship(
+        sa_relationship_kwargs=dict(
+            remote_side=lambda: Revision.name,
+            uselist=False,
+            primaryjoin=lambda: Revision.parent_name == Revision.name,
+            back_populates="child",
+        )
+    )
+    child: Optional["Revision"] = 
sqlmodel.Relationship(back_populates="parent")
+
+    description: str | None = sqlmodel.Field(default=None)
+
+
[email protected]_for(Revision, "before_insert")
+def populate_revision_sequence_and_name(
+    mapper: sqlalchemy.orm.Mapper, connection: sqlalchemy.engine.Connection, 
revision: Revision
+) -> None:
+    # We require Revision.release_name to have been set
+    if not revision.release_name:
+        # Raise an exception
+        # Otherwise, Revision.name would be "", Revision.seq 0, and 
Revision.number ""
+        raise RuntimeError("Cannot populate revision sequence and name without 
release_name")
+
+    # Get the Revision with the maximum existing Revision.seq and the same 
Revision.release_name
+    stmt = (
+        sqlmodel.select(Revision.seq, Revision.name)
+        .where(Revision.release_name == revision.release_name)
+        
.order_by(sqlalchemy.desc(validate_instrumented_attribute(Revision.seq)))
+        .limit(1)
+    )
+    parent_row = connection.execute(stmt).fetchone()
+
+    # We cannot happy path this, because we must recalculate the Revision.name 
afterwards
+    if parent_row is None:
+        # This is the first Revision for this Revision.release_name
+        # Revision.seq is 0, but we use a 1-based system
+        revision.seq = 1
+        revision.number = str(revision.seq).zfill(5)
+    else:
+        # We don't have the ORM available in this event listener
+        # Therefore we must construct a new Revision object from the database 
row
+        parent_row_seq = parent_row.seq
+        parent_row_name = parent_row.name
+        # Compute the next sequence number
+        revision.seq = parent_row_seq + 1
+        revision.number = str(revision.seq).zfill(5)
+        # Set the parent_name foreign key. SQLAlchemy will handle the 
relationship.
+        revision.parent_name = parent_row_name
+        # Do NOT set revision.parent directly here
+
+    # Recalculate the Revision.name
+    revision.name = revision_name(revision.release_name, revision.seq)
+
+
 class TaskStatus(str, enum.Enum):
     """Status of a task in the task queue."""
 
@@ -346,7 +438,7 @@ class Task(sqlmodel.SQLModel, table=True):
     # We don't put these in task_args because we want to query them efficiently
     release_name: str | None = sqlmodel.Field(default=None, 
foreign_key="release.name")
     release: Optional["Release"] = 
sqlmodel.Relationship(back_populates="tasks")
-    revision: str | None = sqlmodel.Field(default=None, index=True)
+    revision_number: str | None = sqlmodel.Field(default=None, index=True)
     primary_rel_path: str | None = sqlmodel.Field(default=None, index=True)
 
     # Create an index on status and added for efficient task claiming
@@ -374,7 +466,16 @@ class Task(sqlmodel.SQLModel, table=True):
     )
 
 
+def validate_instrumented_attribute(obj: Any) -> orm.InstrumentedAttribute:
+    """Check if the given object is an InstrumentedAttribute."""
+    if not isinstance(obj, orm.InstrumentedAttribute):
+        raise ValueError(f"Object must be an orm.InstrumentedAttribute, got: 
{type(obj)}")
+    return obj
+
+
 class Release(sqlmodel.SQLModel, table=True):
+    # model_config = compat.SQLModelConfig(extra="forbid", 
from_attributes=True)
+
     # We guarantee that "{project.name}-{version}" is unique
     # Therefore we can use that for the name
     name: str = sqlmodel.Field(default="", primary_key=True, unique=True)
@@ -393,7 +494,6 @@ class Release(sqlmodel.SQLModel, table=True):
     # For example, Apache Airflow Providers do not have an overall version
     # They have one version per package, i.e. per provider
     version: str
-    revision: str | None = sqlmodel.Field(default=None, index=True)
     sboms: list[str] = sqlmodel.Field(default_factory=list, 
sa_column=sqlalchemy.Column(sqlalchemy.JSON))
 
     # Many-to-one: A release can have one release policy, a release policy can 
be used by multiple releases
@@ -407,6 +507,15 @@ class Release(sqlmodel.SQLModel, table=True):
     vote_started: datetime.datetime | None = sqlmodel.Field(default=None, 
sa_column=sqlalchemy.Column(UTCDateTime))
     vote_resolved: datetime.datetime | None = sqlmodel.Field(default=None, 
sa_column=sqlalchemy.Column(UTCDateTime))
 
+    revisions: list["Revision"] = sqlmodel.Relationship(
+        back_populates="release",
+        sa_relationship_kwargs={
+            "order_by": "Revision.seq",
+            "foreign_keys": "[Revision.release_name]",
+            "cascade": "all, delete-orphan",
+        },
+    )
+
     # One-to-many: A release can have multiple tasks
     tasks: list["Task"] = sqlmodel.Relationship(
         back_populates="release", sa_relationship_kwargs={"cascade": "all, 
delete-orphan"}
@@ -416,7 +525,7 @@ class Release(sqlmodel.SQLModel, table=True):
     check_results: list["CheckResult"] = 
sqlmodel.Relationship(back_populates="release")
 
     # The combination of project_name and version must be unique
-    __table_args__ = (sqlalchemy.UniqueConstraint("project_name", "version", 
name="unique_project_version"),)
+    __table_args__ = (sqlmodel.UniqueConstraint("project_name", "version", 
name="unique_project_version"),)
 
     @property
     def committee(self) -> Committee | None:
@@ -432,11 +541,33 @@ class Release(sqlmodel.SQLModel, table=True):
         return f"{self.project.short_display_name} {self.version}"
 
     @property
-    def unwrap_revision(self) -> str:
-        """Get the revision for the release."""
-        if self.revision is None:
-            raise ValueError("Release has no revision")
-        return self.revision
+    def unwrap_revision_number(self) -> str:
+        """Get the revision number for the release, or raise an exception."""
+        number = self.latest_revision_number
+        if number is None:
+            raise ValueError("Release has no revisions")
+        return number
+
+    @pydantic.computed_field  # type: ignore[prop-decorator]
+    @property
+    def latest_revision_number(self) -> str | None:
+        """Get the latest revision number for the release."""
+        # The session must still be active for this to work
+        number = getattr(self, "_latest_revision_number", None)
+        if not (isinstance(number, str) or (number is None)):
+            raise ValueError("Latest revision number is not a str or None")
+        return number
+
+
+# https://github.com/fastapi/sqlmodel/issues/240#issuecomment-2074161775
+Release._latest_revision_number = orm.column_property(
+    sqlalchemy.select(validate_instrumented_attribute(Revision.number))
+    .where(validate_instrumented_attribute(Revision.release_name) == 
Release.name)
+    .order_by(validate_instrumented_attribute(Revision.seq).desc())
+    .limit(1)
+    .correlate_except(Revision)
+    .scalar_subquery(),
+)
 
 
 class SSHKey(sqlmodel.SQLModel, table=True):
@@ -456,7 +587,8 @@ class CheckResult(sqlmodel.SQLModel, table=True):
     id: int = sqlmodel.Field(default=None, primary_key=True)
     release_name: str = sqlmodel.Field(foreign_key="release.name")
     release: Release = sqlmodel.Relationship(back_populates="check_results")
-    revision: str = sqlmodel.Field(default=None, index=True)
+    # We don't call this latest_revision_number, because it might not be the 
latest
+    revision_number: str | None = sqlmodel.Field(default=None, index=True)
     checker: str
     primary_rel_path: str | None = sqlmodel.Field(default=None, index=True)
     member_rel_path: str | None = sqlmodel.Field(default=None, index=True)
@@ -476,7 +608,9 @@ class TextValue(sqlmodel.SQLModel, table=True):
 @event.listens_for(Release, "before_insert")
 def check_release_name(_mapper: sqlalchemy.orm.Mapper, _connection: 
sqlalchemy.Connection, release: Release) -> None:
     if release.name == "":
-        release.name = release_name(release.project.name, release.version)
+        if (release.project_name is None) or (release.version is None):
+            raise ValueError("Cannot generate release name without 
project_name and version")
+        release.name = release_name(release.project_name, release.version)
 
 
 def project_version(release_name: str) -> tuple[str, str]:
diff --git a/atr/manager.py b/atr/manager.py
index c8a81d0..d3b1494 100644
--- a/atr/manager.py
+++ b/atr/manager.py
@@ -301,7 +301,7 @@ class WorkerManager:
                         sqlmodel.update(models.Task)
                         .where(
                             sqlmodel.and_(
-                                
db.validate_instrumented_attribute(models.Task.pid).notin_(active_worker_pids),
+                                
models.validate_instrumented_attribute(models.Task.pid).notin_(active_worker_pids),
                                 models.Task.status == models.TaskStatus.ACTIVE,
                             )
                         )
diff --git a/atr/revision.py b/atr/revision.py
index 822701c..539243a 100644
--- a/atr/revision.py
+++ b/atr/revision.py
@@ -39,112 +39,102 @@ async def create_and_manage(
     """Manage the creation and symlinking of a mutable release revision."""
     base_dir = util.get_unfinished_dir()
     base_release_dir = base_dir / project_name / version_name
-    new_revision_name = _new_name(asf_uid)
-    new_revision_dir = base_release_dir / new_revision_name
 
     # Ensure that the base directory for the release exists
     await aiofiles.os.makedirs(base_release_dir, exist_ok=True)
 
-    # Get the parent revision, if available
+    release_name = models.release_name(project_name, version_name)
+    # Create and commit the new Revision
     async with db.session() as data:
-        release_name = models.release_name(project_name, version_name)
-        namespace, release, parent_revision_id, parent_revision_dir = await 
_create_and_manage(
-            data, release_name, base_release_dir
+        release_one = await data.release(name=release_name, 
_project=True).demand(
+            RuntimeError("Release does not exist for new revision creation")
         )
+
+        new_revision = models.Revision(
+            # name is automatically computed in an event listener
+            release_name=release_one.name,
+            release=release_one,
+            # seq is automatically computed in an event listener
+            # number is automatically computed in an event listener
+            asfuid=asf_uid,
+            created=datetime.datetime.now(datetime.UTC),
+            phase=models.ReleasePhase.RELEASE_CANDIDATE_DRAFT,
+            # parent_name is automatically computed in an event listener
+            # parent is automatically computed in an event listener
+            child=None,
+            description=None,
+        )
+        data.add(new_revision)
+        await data.commit()
+
+        # After commit, new_revision has its .name, .seq, and .number 
populated by the listener
+        new_revision_name = new_revision.name
+        new_revision_number = new_revision.number
+
+    if not (new_revision_name and new_revision_number):
+        raise RuntimeError("Failed to obtain the name and number of the newly 
committed revision.")
+
+    # Details needed for directory structure and yield
+    parent_revision_dir: pathlib.Path | None = None
+
+    # Get details of the committed revision
+    async with db.session() as data:
+        new_revision_with_parent = await data.revision(name=new_revision_name, 
_parent=True).demand(
+            RuntimeError("Committed revision not found or parent could not be 
loaded")
+        )
+        if new_revision_with_parent.parent:
+            parent_revision_dir = base_release_dir / 
new_revision_with_parent.parent.number
+
+    new_revision_dir = base_release_dir / new_revision_number
+
     try:
         # Create the new revision directory
         if parent_revision_dir:
-            _LOGGER.info(f"Creating new revision {new_revision_name} by hard 
linking from {parent_revision_id}")
+            _LOGGER.info(f"Creating new revision {new_revision_number} by hard 
linking from {parent_revision_dir.name}")
             await util.create_hard_link_clone(parent_revision_dir, 
new_revision_dir)
         elif create_directory:
-            _LOGGER.info(f"Creating new empty revision directory 
{new_revision_name}")
+            _LOGGER.info(f"Creating new empty revision directory 
{new_revision_number}")
             await aiofiles.os.makedirs(new_revision_dir)
         else:
-            _LOGGER.info(f"Creating new empty revision with no directory for 
{new_revision_name}")
+            _LOGGER.info(f"Creating new empty revision with no directory for 
{new_revision_number}")
 
         # Yield control to the block within "async with"
-        yield new_revision_dir, new_revision_name
-
-        # # Was a KEYS file uploaded in DRAFT phase?
-        # # If so, we need to try to import all of the keys in a task
-        # if (release is None) or (release.phase is 
models.ReleasePhase.RELEASE_CANDIDATE_DRAFT):
-        #     await tasks.keys.import_file(release_name, str(new_revision_dir 
/ "KEYS"))
+        yield new_revision_dir, new_revision_number
 
-        # If the "with" block completed without error, store the parent link
+        # If the release is in the DRAFT phase, schedule the checks to be run
+        # The caller may have modified release_one, so we must get it again
         async with db.session() as data:
-            async with data.begin():
-                if parent_revision_id is not None:
-                    _LOGGER.info(f"Storing parent link for {new_revision_name} 
-> {parent_revision_id}")
-                    data.add(
-                        models.TextValue(
-                            ns=f"{namespace}-revision-parent", 
key=new_revision_name, value=parent_revision_id
-                        )
-                    )
-                else:
-                    _LOGGER.info(f"No parent revision for {new_revision_name}")
-                release = await data.release(name=release_name, 
_project=True).demand(
-                    RuntimeError("Release does not exist")
+            release_two = await data.release(name=release_name).demand(
+                RuntimeError("Release not found for task scheduling")
+            )
+            if release_two.phase is 
models.ReleasePhase.RELEASE_CANDIDATE_DRAFT:
+                _LOGGER.warning(f"Scheduling checks for {project_name} 
{version_name} {new_revision_number}")
+                # TODO: Passing data=data here breaks the database session
+                # Should figure out why that happens
+                await tasks.draft_checks(project_name, version_name, 
new_revision_number)
+            else:
+                _LOGGER.warning(
+                    f"Skipping checks for {project_name} {version_name}"
+                    f" {new_revision_number} because release is not in DRAFT 
phase"
                 )
-                release.revision = new_revision_name
-        if (release is None) or (release.phase is 
models.ReleasePhase.RELEASE_CANDIDATE_DRAFT):
-            # Schedule the checks to be run
-            await tasks.draft_checks(project_name, version_name, 
new_revision_name)
 
     except Exception:
-        _LOGGER.exception(f"Error during revision management for 
{new_revision_name}")
-        # Keep this in case we do clean up the new revision directory
+        _LOGGER.exception(f"Error during revision management for 
{new_revision_number}")
+        # Consider adding cleanup for new_revision_dir if it was created 
before an error
         raise
-    finally:
-        # TODO: It's hard to know whether we should clean up the new revision 
directory
-        # Generally we should probably keep it no matter what
-        # The only exception would be if release.revision was never set
-        # But if it wasn't, it doesn't matter so much
-        ...
 
 
-async def latest_info(project_name: str, version_name: str) -> tuple[str | 
None, str | None, datetime.datetime | None]:
+async def latest_info(project_name: str, version_name: str) -> tuple[str, str, 
datetime.datetime] | None:
     """Get the name, editor, and timestamp of the latest revision."""
-    revision_name: str | None = None
-    editor: str | None = None
-    timestamp: datetime.datetime | None = None
-
     async with db.session() as data:
+        # TODO: No need to get release here
+        # Just use maximum seq from revisions
         release = await data.release(name=models.release_name(project_name, 
version_name), _project=True).demand(
             RuntimeError("Release does not exist")
         )
-        revision_name = release.revision
-        if not revision_name:
-            return revision_name, editor, timestamp
-
-        parts = revision_name.split("@", 1)
-        if len(parts) == 2:
-            editor = parts[0]
-            dt_obj = datetime.datetime.strptime(parts[1][:-1], 
"%Y-%m-%dT%H.%M.%S.%f")
-            timestamp = dt_obj.replace(tzinfo=datetime.UTC)
-
-    return revision_name, editor, timestamp
-
-
-async def _create_and_manage(
-    data: db.Session, release_name: str, base_release_dir: pathlib.Path
-) -> tuple[str, models.Release | None, str | None, pathlib.Path | None]:
-    parent_revision_id: str | None = None
-    parent_revision_dir: pathlib.Path | None = None
-    release = await data.release(name=release_name, _project=True).get()
-    namespace = "draft"
-    if release is not None:
-        parent_revision_id = release.revision
-        if parent_revision_id:
-            parent_revision_dir = base_release_dir / parent_revision_id
-        if release.phase is models.ReleasePhase.RELEASE_PREVIEW:
-            namespace = "preview"
-    return namespace, release, parent_revision_id, parent_revision_dir
-
-
-def _new_name(asf_uid: str) -> str:
-    """Generate a new revision name with timestamp truncated to 
milliseconds."""
-    now_utc = datetime.datetime.now(datetime.UTC)
-    time_prefix = now_utc.strftime("%Y-%m-%dT%H.%M.%S")
-    milliseconds = now_utc.microsecond // 1000
-    timestamp_str = f"{time_prefix}.{milliseconds:03d}Z"
-    return f"{asf_uid}@{timestamp_str}"
+        if release.latest_revision_number is None:
+            return None
+        revision = await data.revision(release_name=release.name, 
number=release.latest_revision_number).get()
+        if not revision:
+            return None
+    return revision.number, revision.asfuid, revision.created
diff --git a/atr/routes/__init__.py b/atr/routes/__init__.py
index 06dd0c8..9d0d1a1 100644
--- a/atr/routes/__init__.py
+++ b/atr/routes/__init__.py
@@ -31,11 +31,11 @@ import asfquart.base as base
 import asfquart.session as session
 import quart
 
+import atr.config as config
 import atr.db as db
 import atr.db.models as models
 import atr.user as user
 import atr.util as util
-from atr import config
 
 if TYPE_CHECKING:
     from collections.abc import Awaitable, Callable, Coroutine, Sequence
@@ -213,9 +213,9 @@ class CommitterSession:
         phase: models.ReleasePhase | db.NotSet | None = db.NOT_SET,
         data: db.Session | None = None,
         with_committee: bool = False,
-        with_packages: bool = False,
         with_project: bool = True,
         with_tasks: bool = False,
+        with_revisions: bool = False,
     ) -> models.Release:
         # We reuse db.NOT_SET as an entirely different sentinel
         # TODO: We probably shouldn't do that, or should make it clearer
@@ -234,6 +234,7 @@ class CommitterSession:
                     _committee=with_committee,
                     _project=with_project,
                     _tasks=with_tasks,
+                    _revisions=with_revisions,
                 ).demand(base.ASFQuartException("Release does not exist", 
errorcode=404))
         else:
             release = await data.release(
@@ -242,6 +243,7 @@ class CommitterSession:
                 _committee=with_committee,
                 _project=with_project,
                 _tasks=with_tasks,
+                _revisions=with_revisions,
             ).demand(base.ASFQuartException("Release does not exist", 
errorcode=404))
         return release
 
diff --git a/atr/routes/announce.py b/atr/routes/announce.py
index 539b022..ed8bfe0 100644
--- a/atr/routes/announce.py
+++ b/atr/routes/announce.py
@@ -83,7 +83,7 @@ async def selected(session: routes.CommitterSession, 
project_name: str, version_
     announce_form = await 
_create_announce_form_instance(util.permitted_recipients(session.uid))
     # Hidden fields
     announce_form.preview_name.data = release.name
-    announce_form.preview_revision.data = release.unwrap_revision
+    announce_form.preview_revision.data = release.unwrap_revision_number
 
     # Variables used in defaults for subject and body
     project_display_name = release.project.display_name or release.project.name
@@ -112,7 +112,7 @@ async def selected_post(
             error_message = f"{error_message}: {error_details}"
 
         # Render the page again, with errors
-        release = await session.release(
+        release: models.Release = await session.release(
             project_name, version_name, with_committee=True, 
phase=models.ReleasePhase.RELEASE_PREVIEW
         )
         await quart.flash(error_message, "error")
@@ -128,7 +128,7 @@ async def selected_post(
     async with db.session() as data:
         try:
             release = await session.release(
-                project_name, version_name, 
phase=models.ReleasePhase.RELEASE_PREVIEW, data=data
+                project_name, version_name, 
phase=models.ReleasePhase.RELEASE_PREVIEW, with_revisions=True, data=data
             )
 
             test_list = "user-tests"
@@ -166,20 +166,20 @@ async def selected_post(
 
             # Prepare paths for file operations
             source_base = util.release_directory_base(release)
-            source = str(source_base / release.unwrap_revision)
+            source = str(source_base / release.unwrap_revision_number)
 
             # TODO: We should update only if the announcement email was sent
             # That would require moving this, and the filesystem operations, 
into a task
             release.phase = models.ReleasePhase.RELEASE
-            release.revision = None
+            # Delete all revisions associated with this release
+            for revision in release.revisions:
+                await data.delete(revision)
+            # Essential to set revisions to [], otherwise release.revisions is 
still populated
+            # And util.release_directory() below checks for it
+            release.revisions = []
             release.released = datetime.datetime.now(datetime.UTC)
             await data.commit()
 
-            # This must come after updating the release object
-            target = str(util.release_directory(release))
-            if await aiofiles.os.path.exists(target):
-                raise routes.FlashError("Release already exists")
-
         except (routes.FlashError, Exception) as e:
             logging.exception("Error during release announcement, database 
phase:")
             return await session.redirect(
@@ -189,6 +189,15 @@ async def selected_post(
                 version_name=version_name,
             )
 
+    async with db.session() as data:
+        # This must come after updating the release object
+        # Do not put it in the data block after data.commit()
+        # Otherwise util.release_directory() will not work
+        release = await 
data.release(name=release.name).demand(RuntimeError("Release does not exist"))
+        target = str(util.release_directory(release))
+        if await aiofiles.os.path.exists(target):
+            raise routes.FlashError("Release already exists")
+
     try:
         await aioshutil.move(source, target)
         if source_base:
diff --git a/atr/routes/candidate.py b/atr/routes/candidate.py
index ba72035..d91d693 100644
--- a/atr/routes/candidate.py
+++ b/atr/routes/candidate.py
@@ -54,7 +54,9 @@ async def view(session: routes.CommitterSession, 
project_name: str, version_name
     # Convert async generator to list
     file_stats = [
         stat
-        async for stat in util.content_list(util.get_unfinished_dir(), 
project_name, version_name, release.revision)
+        async for stat in util.content_list(
+            util.get_unfinished_dir(), project_name, version_name, 
release.unwrap_revision_number
+        )
     ]
     logging.debug(f"File stats: {file_stats}")
 
diff --git a/atr/routes/compose.py b/atr/routes/compose.py
index 771ae16..499a67c 100644
--- a/atr/routes/compose.py
+++ b/atr/routes/compose.py
@@ -53,16 +53,19 @@ async def check(
     async with db.session() as data:
         user_ssh_keys = await data.ssh_key(asf_uid=session.uid).all()
 
-    revision_name_from_link, revision_editor, revision_time = await 
revision.latest_info(
-        release.project.name, release.version
-    )
-
     # Get the number of ongoing tasks for the current revision
     ongoing_tasks_count = 0
-    if revision_name_from_link:
-        ongoing_tasks_count = await interaction.tasks_ongoing(
-            release.project.name, release.version, revision_name_from_link
-        )
+    match await revision.latest_info(release.project.name, release.version):
+        case (revision_number, revision_editor, revision_timestamp):
+            ongoing_tasks_count = await interaction.tasks_ongoing(
+                release.project.name,
+                release.version,
+                revision_number,  # type: ignore[arg-type]
+            )
+        case None:
+            revision_number = None  # type: ignore[assignment]
+            revision_editor = None  # type: ignore[assignment]
+            revision_timestamp = None  # type: ignore[assignment]
 
     delete_draft_form = await draft.DeleteForm.create_form()
     delete_file_form = await draft.DeleteFileForm.create_form()
@@ -78,8 +81,8 @@ async def check(
         paths=paths,
         info=info,
         revision_editor=revision_editor,
-        revision_time=revision_time,
-        revision_name_from_link=revision_name_from_link,
+        revision_time=revision_timestamp,
+        revision_number=revision_number,
         ongoing_tasks_count=ongoing_tasks_count,
         delete_form=delete_draft_form,
         delete_file_form=delete_file_form,
diff --git a/atr/routes/download.py b/atr/routes/download.py
index 2093fd4..93023d9 100644
--- a/atr/routes/download.py
+++ b/atr/routes/download.py
@@ -27,13 +27,13 @@ import quart
 import werkzeug.wrappers.response as response
 import zipstream
 
+import atr.config as config
 import atr.db as db
 import atr.db.models as models
 import atr.routes as routes
 import atr.routes.mapping as mapping
 import atr.routes.root as root
 import atr.util as util
-from atr import config
 
 
 @routes.committer("/download/all/<project_name>/<version_name>")
diff --git a/atr/routes/draft.py b/atr/routes/draft.py
index 51b1dd7..4b7d3c7 100644
--- a/atr/routes/draft.py
+++ b/atr/routes/draft.py
@@ -142,7 +142,7 @@ async def delete_file(session: routes.CommitterSession, 
project_name: str, versi
     try:
         async with revision.create_and_manage(project_name, version_name, 
session.uid) as (
             new_revision_dir,
-            new_revision_name,
+            new_revision_number,
         ):
             # Path to delete within the new revision directory
             path_in_new_revision = new_revision_dir / rel_path_to_delete
@@ -152,7 +152,7 @@ async def delete_file(session: routes.CommitterSession, 
project_name: str, versi
                 # This indicates a potential severe issue with hard linking or 
logic
                 logging.error(
                     f"SEVERE ERROR! File {rel_path_to_delete} not found in new 
revision"
-                    f" {new_revision_name} before deletion"
+                    f" {new_revision_number} before deletion"
                 )
                 raise routes.FlashError("File to delete was not found in the 
new revision")
 
@@ -197,7 +197,7 @@ async def fresh(session: routes.CommitterSession, 
project_name: str, version_nam
     # Therefore we only show the button for this to admins
     async with revision.create_and_manage(project_name, version_name, 
session.uid) as (
         _new_revision_dir,
-        _new_revision_name,
+        _new_revision_number,
     ):
         ...
 
@@ -229,7 +229,7 @@ async def hashgen(
     try:
         async with revision.create_and_manage(project_name, version_name, 
session.uid) as (
             new_revision_dir,
-            new_revision_name,
+            new_revision_number,
         ):
             path_in_new_revision = new_revision_dir / rel_path
             hash_path_rel = rel_path.name + f".{hash_type}"
@@ -238,7 +238,7 @@ async def hashgen(
             # Check that the source file exists in the new revision
             if not await aiofiles.os.path.exists(path_in_new_revision):
                 logging.error(
-                    f"Source file {rel_path} not found in new revision 
{new_revision_name} for hash generation."
+                    f"Source file {rel_path} not found in new revision 
{new_revision_number} for hash generation."
                 )
                 raise routes.FlashError("Source file not found in the new 
revision.")
 
@@ -287,7 +287,7 @@ async def sbomgen(
     try:
         async with revision.create_and_manage(project_name, version_name, 
session.uid) as (
             new_revision_dir,
-            new_revision_name,
+            new_revision_number,
         ):
             path_in_new_revision = new_revision_dir / rel_path
             sbom_path_rel = rel_path.with_suffix(rel_path.suffix + 
".cdx.json").name
@@ -296,7 +296,7 @@ async def sbomgen(
             # Check that the source file exists in the new revision
             if not await aiofiles.os.path.exists(path_in_new_revision):
                 logging.error(
-                    f"Source file {rel_path} not found in new revision 
{new_revision_name} for SBOM generation."
+                    f"Source file {rel_path} not found in new revision 
{new_revision_number} for SBOM generation."
                 )
                 raise routes.FlashError("Source artifact file not found in the 
new revision.")
 
@@ -318,7 +318,7 @@ async def sbomgen(
                     added=datetime.datetime.now(datetime.UTC),
                     status=models.TaskStatus.QUEUED,
                     release_name=release.name,
-                    revision=new_revision_name,
+                    revision_number=new_revision_number,
                 )
                 data.add(sbom_task)
                 await data.commit()
@@ -446,7 +446,9 @@ async def view(session: routes.CommitterSession, 
project_name: str, version_name
     # Convert async generator to list
     file_stats = [
         stat
-        async for stat in util.content_list(util.get_unfinished_dir(), 
project_name, version_name, release.revision)
+        async for stat in util.content_list(
+            util.get_unfinished_dir(), project_name, version_name, 
release.unwrap_revision_number
+        )
     ]
 
     return await quart.render_template(
diff --git a/atr/routes/finish.py b/atr/routes/finish.py
index e8b1003..7303f77 100644
--- a/atr/routes/finish.py
+++ b/atr/routes/finish.py
@@ -63,12 +63,12 @@ async def selected(session: routes.CommitterSession, 
project_name: str, version_
         )
         user_ssh_keys = await data.ssh_key(asf_uid=session.uid).all()
 
-    current_revision_dir = util.release_directory(release)
+    latest_revision_dir = util.release_directory(release)
     file_paths_rel: list[pathlib.Path] = []
     unique_dirs: set[pathlib.Path] = {pathlib.Path(".")}
 
     try:
-        async for path in util.paths_recursive(current_revision_dir):
+        async for path in util.paths_recursive(latest_revision_dir):
             file_paths_rel.append(path)
             unique_dirs.add(path.parent)
     except FileNotFoundError:
@@ -111,7 +111,7 @@ async def _move_file(
         try:
             async with revision.create_and_manage(project_name, version_name, 
session.uid) as (
                 new_revision_dir,
-                new_revision_name,
+                new_revision_number,
             ):
                 source_path_in_new = new_revision_dir / source_file_rel
                 target_path_in_new = new_revision_dir / target_dir_rel / 
source_file_rel.name
@@ -123,7 +123,9 @@ async def _move_file(
                     )
                     return await session.redirect(selected, 
project_name=project_name, version_name=version_name)
 
-                _LOGGER.info(f"Moving {source_path_in_new} to 
{target_path_in_new} in new revision {new_revision_name}")
+                _LOGGER.info(
+                    f"Moving {source_path_in_new} to {target_path_in_new} in 
new revision {new_revision_number}"
+                )
                 await aiofiles.os.rename(source_path_in_new, 
target_path_in_new)
 
             await quart.flash(
diff --git a/atr/routes/keys.py b/atr/routes/keys.py
index d674e71..5a564a1 100644
--- a/atr/routes/keys.py
+++ b/atr/routes/keys.py
@@ -41,10 +41,10 @@ import wtforms
 import atr.db as db
 import atr.db.interaction as interaction
 import atr.db.models as models
+import atr.revision as revision
 import atr.routes as routes
+import atr.routes.compose as compose
 import atr.util as util
-from atr import revision
-from atr.routes import compose
 
 
 class AddSSHKeyForm(util.QuartFormTyped):
@@ -191,7 +191,7 @@ async def import_selected_revision(
     if (success_count > 0) and (error_count == 0):
         async with revision.create_and_manage(project_name, version_name, 
session.uid) as (
             new_revision_dir,
-            _new_revision_name,
+            _new_revision_number,
         ):
             path_in_new_revision = new_revision_dir / "KEYS"
             await aiofiles.os.remove(path_in_new_revision)
diff --git a/atr/routes/preview.py b/atr/routes/preview.py
index 669b8f1..73d94f5 100644
--- a/atr/routes/preview.py
+++ b/atr/routes/preview.py
@@ -151,7 +151,9 @@ async def view(session: routes.CommitterSession, 
project_name: str, version_name
     # Convert async generator to list
     file_stats = [
         stat
-        async for stat in util.content_list(util.get_unfinished_dir(), 
project_name, version_name, release.revision)
+        async for stat in util.content_list(
+            util.get_unfinished_dir(), project_name, version_name, 
release.unwrap_revision_number
+        )
     ]
 
     return await quart.render_template(
diff --git a/atr/routes/report.py b/atr/routes/report.py
index 1b6b980..215a037 100644
--- a/atr/routes/report.py
+++ b/atr/routes/report.py
@@ -41,7 +41,7 @@ async def selected_path(session: routes.CommitterSession, 
project_name: str, ver
 
     # TODO: When we do more than one thing in a dir, we should use the 
revision directory directly
     abs_path = util.release_directory(release) / rel_path
-    if release.revision is None:
+    if release.latest_revision_number is None:
         raise base.ASFQuartException("Release has no revision", errorcode=500)
 
     # Check that the file exists
@@ -54,10 +54,12 @@ async def selected_path(session: routes.CommitterSession, 
project_name: str, ver
     # Get all check results for this file
     async with db.session() as data:
         query = data.check_result(
-            release_name=release.name, revision=release.revision, 
primary_rel_path=str(rel_path)
+            release_name=release.name,
+            revision_number=release.latest_revision_number,
+            primary_rel_path=str(rel_path),
         ).order_by(
-            
db.validate_instrumented_attribute(models.CheckResult.checker).asc(),
-            
db.validate_instrumented_attribute(models.CheckResult.created).desc(),
+            
models.validate_instrumented_attribute(models.CheckResult.checker).asc(),
+            
models.validate_instrumented_attribute(models.CheckResult.created).desc(),
         )
         all_results = await query.all()
 
diff --git a/atr/routes/resolve.py b/atr/routes/resolve.py
index 8858b1f..41d089c 100644
--- a/atr/routes/resolve.py
+++ b/atr/routes/resolve.py
@@ -28,8 +28,8 @@ import atr.routes as routes
 import atr.routes.compose as compose
 import atr.routes.finish as finish
 import atr.routes.vote as vote
+import atr.tasks.message as message
 import atr.util as util
-from atr.tasks import message
 
 
 class ResolveForm(util.QuartFormTyped):
diff --git a/atr/routes/revisions.py b/atr/routes/revisions.py
index 6cef6b7..2d023a9 100644
--- a/atr/routes/revisions.py
+++ b/atr/routes/revisions.py
@@ -16,22 +16,22 @@
 # under the License.
 
 import asyncio
-import contextlib
-import datetime
-import logging
 import pathlib
-from collections.abc import Callable
 
 import aiofiles.os
+import aioshutil
 import asfquart.base as base
 import quart
+import sqlalchemy.orm as orm
 import sqlmodel
 import werkzeug.wrappers.response as response
 
 import atr.db as db
 import atr.db.models as models
 import atr.routes as routes
+import atr.schema as schema
 import atr.util as util
+from atr import revision
 
 
 @routes.committer("/revisions/<project_name>/<version_name>")
@@ -47,56 +47,33 @@ async def selected(session: routes.CommitterSession, 
project_name: str, version_
         phase_key = "preview"
     release_dir = util.release_directory_base(release)
 
-    revision_dirs: list[str] = []
-    with contextlib.suppress(FileNotFoundError):
-        for entry in await aiofiles.os.listdir(str(release_dir)):
-            # Match pattern like "[email protected]"
-            if "@" in entry and entry.endswith("Z"):
-                if await aiofiles.os.path.isdir(release_dir / entry):
-                    revision_dirs.append(entry)
-
-    # Sort revisions by timestamp
-    def sort_key(rev_name: str) -> datetime.datetime:
-        try:
-            # Remove trailing Z, though we could just put it in the template 
pattern
-            timestamp_str = rev_name.split("@", 1)[1][:-1]
-            return datetime.datetime.strptime(timestamp_str, 
"%Y-%m-%dT%H.%M.%S.%f")
-        except (IndexError, ValueError):
-            # Should not happen for valid names, put invalid ones last
-            return datetime.datetime.min
-
-    # Sort revisions by timestamp, newest first
-    revision_dirs.sort(key=sort_key, reverse=True)
+    # Determine the current revision
+    latest_revision_number = release.unwrap_revision_number
 
-    async with db.session() as data:
-        # Get parent links using a direct query due to the use of in_(...)
-        query = sqlmodel.select(models.TextValue).where(
-            models.TextValue.ns == release.name + f" {phase_key}",
-            
db.validate_instrumented_attribute(models.TextValue.key).in_(revision_dirs),
+    # Oldest to newest, to build diffs relative to previous revision
+    async with db.session() as data_for_revisions:
+        revisions_stmt = (
+            sqlmodel.select(models.Revision)
+            .where(models.Revision.release_name == release.name)
+            
.order_by(models.validate_instrumented_attribute(models.Revision.seq))
+            
.options(orm.selectinload(models.validate_instrumented_attribute(models.Revision.parent)))
         )
-        parent_links_result = await data.execute(query)
-        parent_map = {link.key: link.value for link in 
parent_links_result.scalars().all()}
-
-    # Determine the current revision
-    current_revision_name = release.revision
+        revisions_result = await data_for_revisions.execute(revisions_stmt)
+        revisions_list: list[models.Revision] = 
list(revisions_result.scalars().all())
 
     revision_history = []
-    prev_revision_files: set[pathlib.Path] | None = None
-    prev_revision_name: str | None = None
-
-    # Oldest to newest, to build diffs relative to previous revision
-    for rev_name in reversed(revision_dirs):
-        revision_data, current_revision_files = await _revisions_process(
-            rev_name,
-            release_dir,
-            parent_map,
-            prev_revision_files,
-            prev_revision_name,
-            sort_key,
+    loop_prev_revision_files: set[pathlib.Path] | None = None
+    loop_prev_revision_number: str | None = None
+    for current_db_revision in revisions_list:
+        current_files_for_diff, files_diff_for_current = await 
_revision_files_diff(
+            revision_number=current_db_revision.number,
+            release_dir=release_dir,
+            prev_revision_files=loop_prev_revision_files,
+            prev_revision_number=loop_prev_revision_number,
         )
-        revision_history.append(revision_data)
-        prev_revision_files = current_revision_files
-        prev_revision_name = rev_name
+        revision_history.append((current_db_revision, files_diff_for_current))
+        loop_prev_revision_files = current_files_for_diff
+        loop_prev_revision_number = current_db_revision.number
 
     return await quart.render_template(
         "revisions-selected.html",
@@ -105,7 +82,7 @@ async def selected(session: routes.CommitterSession, 
project_name: str, version_
         release=release,
         phase_key=phase_key,
         revision_history=list(reversed(revision_history)),
-        current_revision_name=current_revision_name,
+        latest_revision_number=latest_revision_number,
         empty_form=await util.EmptyForm.create_form(),
     )
 
@@ -118,80 +95,67 @@ async def selected_post(session: routes.CommitterSession, 
project_name: str, ver
     # TODO: This is not truly empty, so make a form object for this
     await util.validate_empty_form()
     form_data = await quart.request.form
-    revision_name = form_data.get("revision_name")
-    if not revision_name:
-        raise base.ASFQuartException("Missing revision name", errorcode=400)
+    selected_revision_number = form_data.get("revision_number")
+    if not selected_revision_number:
+        raise base.ASFQuartException("Missing revision number", errorcode=400)
 
-    try:
-        # Target must be relative for the symlink
-        # TODO: We should probably log who is doing this, to create an audit 
trail
-        async with db.session() as data:
-            try:
-                release = await session.release(project_name, version_name, 
data=data)
-            except base.ASFQuartException:
-                release = await session.release(
-                    project_name, version_name, 
phase=models.ReleasePhase.RELEASE_PREVIEW, data=data
-                )
-            release_dir = util.release_directory_base(release)
-
-            # Check that the target revision directory exists
-            target_revision_dir = release_dir / revision_name
-            if not await aiofiles.os.path.isdir(target_revision_dir):
-                raise base.ASFQuartException("Target revision directory not 
found", errorcode=404)
-
-            release.revision = revision_name
-            await data.commit()
-    except base.ASFQuartException as e:
-        raise e
-    except Exception as e:
-        logging.exception("Error setting revision:")
-        return await session.redirect(
-            selected,
-            error=f"Failed to set revision {revision_name} as latest: {e!s}",
-            project_name=project_name,
-            version_name=version_name,
-        )
+    async with db.session() as data:
+        release = await session.release(project_name, version_name, 
phase=None, data=data)
+        selected_revision_dir = util.release_directory_base(release) / 
selected_revision_number
+        if release.phase not in {models.ReleasePhase.RELEASE_CANDIDATE_DRAFT, 
models.ReleasePhase.RELEASE_PREVIEW}:
+            raise base.ASFQuartException("Cannot set revision for non-draft or 
preview release", errorcode=400)
+
+    async with revision.create_and_manage(project_name, version_name, 
session.uid) as (
+        new_revision_dir,
+        new_revision_number,
+    ):
+        # TODO: Stop create_and_manage from hard linking the parent first
+        await aioshutil.rmtree(new_revision_dir)  # type: ignore[call-arg]
+        await util.create_hard_link_clone(selected_revision_dir, 
new_revision_dir)
 
     return await session.redirect(
         selected,
-        success=f"Revision {revision_name} set as latest",
+        success=f"Copied revision {selected_revision_number} to new latest 
revision, {new_revision_number}",
         project_name=project_name,
         version_name=version_name,
     )
 
 
-async def _revisions_process(
-    rev_name: str,
+class FilesDiff(schema.Strict):
+    added: list[pathlib.Path]
+    removed: list[pathlib.Path]
+    modified: list[pathlib.Path]
+
+
+async def _revision_files_diff(
+    revision_number: str,
     release_dir: pathlib.Path,
-    parent_map: dict[str, str],
     prev_revision_files: set[pathlib.Path] | None,
-    prev_revision_name: str | None,
-    sort_key: Callable[[str], datetime.datetime],
-) -> tuple[dict, set[pathlib.Path]]:
+    prev_revision_number: str | None,
+) -> tuple[set[pathlib.Path], FilesDiff]:
     """Process a single revision and calculate its diff from the previous."""
-    current_revision_dir = release_dir / rev_name
-    current_revision_files = {path async for path in 
util.paths_recursive(current_revision_dir)}
-    parent_name = parent_map.get(rev_name)
+    latest_revision_dir = release_dir / revision_number
+    latest_revision_files = {path async for path in 
util.paths_recursive(latest_revision_dir)}
 
     added_files: set[pathlib.Path] = set()
     removed_files: set[pathlib.Path] = set()
     modified_files: set[pathlib.Path] = set()
 
-    if (prev_revision_files is not None) and (prev_revision_name is not None):
-        added_files = current_revision_files - prev_revision_files
-        removed_files = prev_revision_files - current_revision_files
-        common_files = current_revision_files & prev_revision_files
+    if (prev_revision_files is not None) and (prev_revision_number is not 
None):
+        added_files = latest_revision_files - prev_revision_files
+        removed_files = prev_revision_files - latest_revision_files
+        common_files = latest_revision_files & prev_revision_files
 
         # Check modification times for common files
-        parent_revision_dir = release_dir / prev_revision_name
+        parent_revision_dir = release_dir / prev_revision_number
         mtime_tasks = []
         for common_file in common_files:
 
             async def check_mtime(file_path: pathlib.Path) -> 
tuple[pathlib.Path, bool]:
                 try:
                     parent_mtime = await 
aiofiles.os.path.getmtime(parent_revision_dir / file_path)
-                    current_mtime = await 
aiofiles.os.path.getmtime(current_revision_dir / file_path)
-                    return file_path, parent_mtime != current_mtime
+                    latest_mtime = await 
aiofiles.os.path.getmtime(latest_revision_dir / file_path)
+                    return file_path, parent_mtime != latest_mtime
                 except OSError:
                     # Treat errors as modified
                     return file_path, True
@@ -202,22 +166,11 @@ async def _revisions_process(
         modified_files = {f for f, modified in results if modified}
     else:
         # First revision, all files are considered added
-        added_files = current_revision_files
+        added_files = latest_revision_files
 
-    try:
-        editor = rev_name.split("@", 1)[0]
-        timestamp = sort_key(rev_name)
-    except (ValueError, IndexError):
-        editor = "Unknown"
-        timestamp = None
-
-    revision_data = {
-        "name": rev_name,
-        "editor": editor,
-        "timestamp": timestamp,
-        "parent": parent_name,
-        "added": sorted(list(added_files)),
-        "removed": sorted(list(removed_files)),
-        "modified": sorted(list(modified_files)),
-    }
-    return revision_data, current_revision_files
+    files_diff = FilesDiff(
+        added=sorted(list(added_files)),
+        removed=sorted(list(removed_files)),
+        modified=sorted(list(modified_files)),
+    )
+    return latest_revision_files, files_diff
diff --git a/atr/routes/root.py b/atr/routes/root.py
index aef6220..0aed1c2 100644
--- a/atr/routes/root.py
+++ b/atr/routes/root.py
@@ -59,10 +59,10 @@ async def index() -> response.Response | str:
                     sqlmodel.select(models.Release)
                     .where(
                         models.Release.project_name == project.name,
-                        
db.validate_instrumented_attribute(models.Release.phase).in_(active_phases),
+                        
models.validate_instrumented_attribute(models.Release.phase).in_(active_phases),
                     )
-                    
.options(orm.selectinload(db.validate_instrumented_attribute(models.Release.project)))
-                    
.order_by(db.validate_instrumented_attribute(models.Release.created).desc())
+                    
.options(orm.selectinload(models.validate_instrumented_attribute(models.Release.project)))
+                    
.order_by(models.validate_instrumented_attribute(models.Release.created).desc())
                 )
                 result = await data.execute(stmt)
                 active_releases = result.scalars().all()
diff --git a/atr/routes/start.py b/atr/routes/start.py
index 3d99ea6..89d40c6 100644
--- a/atr/routes/start.py
+++ b/atr/routes/start.py
@@ -63,39 +63,41 @@ async def create_release_draft(project_name: str, version: 
str, asf_uid: str) ->
                     errorcode=403,
                 )
 
+    # TODO: Consider using Release.revision instead of ./latest
+    async with db.session() as data:
+        async with data.begin():
+            # Check whether the release already exists
+            if release := await data.release(project_name=project.name, 
version=version).get():
+                if release.phase == 
models.ReleasePhase.RELEASE_CANDIDATE_DRAFT:
+                    raise routes.FlashError(f"A draft for {project_name} 
{version} already exists.")
+                else:
+                    raise routes.FlashError(
+                        f"A release ({release.phase.value}) for {project_name} 
{version} already exists."
+                    )
+
+            # Validate the version name
+            # TODO: We should check that it's bigger than the current version
+            if version_name_error := util.version_name_error(version):
+                raise routes.FlashError(f'Invalid version name "{version}": 
{version_name_error}')
+
+            release = models.Release(
+                stage=models.ReleaseStage.RELEASE_CANDIDATE,
+                phase=models.ReleasePhase.RELEASE_CANDIDATE_DRAFT,
+                project_name=project.name,
+                project=project,
+                version=version,
+                created=datetime.datetime.now(datetime.UTC),
+            )
+            data.add(release)
+
+        await data.refresh(release)
+
     async with revision.create_and_manage(project_name, version, asf_uid) as (
         _new_revision_dir,
-        _new_revision_name,
+        _new_revision_number,
     ):
-        # TODO: Consider using Release.revision instead of ./latest
-        async with db.session() as data:
-            async with data.begin():
-                # Check whether the release already exists
-                if release := await data.release(project_name=project.name, 
version=version).get():
-                    if release.phase == 
models.ReleasePhase.RELEASE_CANDIDATE_DRAFT:
-                        raise routes.FlashError(f"A draft for {project_name} 
{version} already exists.")
-                    else:
-                        raise routes.FlashError(
-                            f"A release ({release.phase.value}) for 
{project_name} {version} already exists."
-                        )
-
-                # Validate the version name
-                # TODO: We should check that it's bigger than the current 
version
-                if version_name_error := util.version_name_error(version):
-                    raise routes.FlashError(f'Invalid version name 
"{version}": {version_name_error}')
-
-                release = models.Release(
-                    stage=models.ReleaseStage.RELEASE_CANDIDATE,
-                    phase=models.ReleasePhase.RELEASE_CANDIDATE_DRAFT,
-                    project_name=project.name,
-                    project=project,
-                    version=version,
-                    created=datetime.datetime.now(datetime.UTC),
-                )
-                data.add(release)
-
-            await data.refresh(release)
-            return release, project
+        pass
+    return release, project
 
 
 @routes.committer("/start/<project_name>", methods=["GET", "POST"])
diff --git a/atr/routes/upload.py b/atr/routes/upload.py
index 4963410..6a60291 100644
--- a/atr/routes/upload.py
+++ b/atr/routes/upload.py
@@ -137,7 +137,7 @@ async def _upload_files(
     """Process and save the uploaded files into a new draft revision."""
     async with revision.create_and_manage(project_name, version_name, asf_uid) 
as (
         new_revision_dir,
-        _new_revision_name,
+        _new_revision_number,
     ):
 
         def get_target_path(file: datastructures.FileStorage) -> pathlib.Path:
diff --git a/atr/tasks/__init__.py b/atr/tasks/__init__.py
index ad037d0..d7202ae 100644
--- a/atr/tasks/__init__.py
+++ b/atr/tasks/__init__.py
@@ -55,19 +55,19 @@ async def asc_checks(release: models.Release, revision: 
str, signature_path: str
 
 
 async def draft_checks(
-    project_name: str, release_version: str, revision: str, caller_data: 
db.Session | None = None
+    project_name: str, release_version: str, revision_number: str, 
caller_data: db.Session | None = None
 ) -> int:
     """Core logic to analyse a draft revision and queue checks."""
     # Construct path to the specific revision
     # We don't have the release object here, so we can't use 
util.release_directory
-    revision_path = util.get_unfinished_dir() / project_name / release_version 
/ revision
+    revision_path = util.get_unfinished_dir() / project_name / release_version 
/ revision_number
     relative_paths = [path async for path in 
util.paths_recursive(revision_path)]
 
     async with ensure_session(caller_data) as data:
         release = await data.release(name=models.release_name(project_name, 
release_version), _committee=True).demand(
             RuntimeError("Release not found")
         )
-        for path_idx, path in enumerate(relative_paths):
+        for path in relative_paths:
             path_str = str(path)
             task_function: Callable[[models.Release, str, str], 
Awaitable[list[models.Task]]] | None = None
             for suffix, func in TASK_FUNCTIONS.items():
@@ -75,11 +75,11 @@ async def draft_checks(
                     task_function = func
                     break
             if task_function:
-                for task in await task_function(release, revision, path_str):
-                    task.revision = revision
+                for task in await task_function(release, revision_number, 
path_str):
+                    task.revision_number = revision_number
                     data.add(task)
 
-        path_check_task = queued(models.TaskType.PATHS_CHECK, release, 
revision)
+        path_check_task = queued(models.TaskType.PATHS_CHECK, release, 
revision_number)
         data.add(path_check_task)
         if caller_data is None:
             await data.commit()
@@ -94,7 +94,7 @@ def ensure_session(caller_data: db.Session | None) -> 
db.Session | contextlib.nu
 
 
 async def keys_import_file(
-    release_name: str, revision: str, abs_keys_path: str, caller_data: 
db.Session | None = None
+    release_name: str, revision_number: str, abs_keys_path: str, caller_data: 
db.Session | None = None
 ) -> None:
     """Import a KEYS file from a draft release candidate revision."""
     async with ensure_session(caller_data) as data:
@@ -106,7 +106,7 @@ async def keys_import_file(
                     release_name=release_name,
                     abs_keys_path=abs_keys_path,
                 ).model_dump(),
-                revision=revision,
+                revision_number=revision_number,
                 primary_rel_path=None,
             )
         )
@@ -116,7 +116,7 @@ async def keys_import_file(
 def queued(
     task_type: models.TaskType,
     release: models.Release,
-    revision: str,
+    revision_number: str,
     primary_rel_path: str | None = None,
     extra_args: dict[str, Any] | None = None,
 ) -> models.Task:
@@ -125,7 +125,7 @@ def queued(
         task_type=task_type,
         task_args=extra_args or {},
         release_name=release.name,
-        revision=revision,
+        revision_number=revision_number,
         primary_rel_path=primary_rel_path,
     )
 
diff --git a/atr/tasks/bulk.py b/atr/tasks/bulk.py
index 1fb3b4d..4380578 100644
--- a/atr/tasks/bulk.py
+++ b/atr/tasks/bulk.py
@@ -29,9 +29,9 @@ import aiohttp
 import sqlalchemy
 import sqlalchemy.ext.asyncio
 
+import atr.config as config
 import atr.db.models as models
 import atr.tasks.task as task
-from atr import config
 
 # Configure detailed logging
 _LOGGER: Final = logging.getLogger(__name__)
diff --git a/atr/tasks/checks/__init__.py b/atr/tasks/checks/__init__.py
index fbd1344..71e7542 100644
--- a/atr/tasks/checks/__init__.py
+++ b/atr/tasks/checks/__init__.py
@@ -41,7 +41,7 @@ import atr.util as util
 class FunctionArguments:
     recorder: Callable[[], Awaitable[Recorder]]
     release_name: str
-    revision: str
+    revision_number: str
     primary_rel_path: str | None
     extra_args: dict[str, Any]
 
@@ -60,14 +60,14 @@ class Recorder:
         self,
         checker: str | Callable[..., Any],
         release_name: str,
-        revision: str,
+        revision_number: str,
         primary_rel_path: str | None = None,
         member_rel_path: str | None = None,
         afresh: bool = True,
     ) -> None:
         self.checker = function_key(checker) if callable(checker) else checker
         self.release_name = release_name
-        self.revision = revision
+        self.revision_number = revision_number
         self.primary_rel_path = primary_rel_path
         self.member_rel_path = member_rel_path
         self.afresh = afresh
@@ -82,12 +82,12 @@ class Recorder:
         cls,
         checker: str | Callable[..., Any],
         release_name: str,
-        revision: str,
+        revision_number: str,
         primary_rel_path: str | None = None,
         member_rel_path: str | None = None,
         afresh: bool = True,
     ) -> Recorder:
-        recorder = cls(checker, release_name, revision, primary_rel_path, 
member_rel_path, afresh)
+        recorder = cls(checker, release_name, revision_number, 
primary_rel_path, member_rel_path, afresh)
         if afresh is True:
             # Clear outer path whether it's specified or not
             await recorder.clear(primary_rel_path=primary_rel_path, 
member_rel_path=member_rel_path)
@@ -113,7 +113,7 @@ class Recorder:
 
         result = models.CheckResult(
             release_name=self.release_name,
-            revision=self.revision,
+            revision_number=self.revision_number,
             checker=self.checker,
             primary_rel_path=primary_rel_path or self.primary_rel_path,
             member_rel_path=member_rel_path,
@@ -136,7 +136,7 @@ class Recorder:
         base_dir = util.get_unfinished_dir()
         project_part = self.project_name
         version_part = self.version_name
-        revision_part = self.revision
+        revision_part = self.revision_number
 
         # Determine the relative path part
         rel_path_part: str | None = None
@@ -154,11 +154,11 @@ class Recorder:
     async def clear(self, primary_rel_path: str | None = None, 
member_rel_path: str | None = None) -> None:
         async with db.session() as data:
             stmt = sqlmodel.delete(models.CheckResult).where(
-                
db.validate_instrumented_attribute(models.CheckResult.release_name) == 
self.release_name,
-                
db.validate_instrumented_attribute(models.CheckResult.revision) == 
self.revision,
-                db.validate_instrumented_attribute(models.CheckResult.checker) 
== self.checker,
-                
db.validate_instrumented_attribute(models.CheckResult.primary_rel_path) == 
primary_rel_path,
-                
db.validate_instrumented_attribute(models.CheckResult.member_rel_path) == 
member_rel_path,
+                
models.validate_instrumented_attribute(models.CheckResult.release_name) == 
self.release_name,
+                
models.validate_instrumented_attribute(models.CheckResult.revision_number) == 
self.revision_number,
+                
models.validate_instrumented_attribute(models.CheckResult.checker) == 
self.checker,
+                
models.validate_instrumented_attribute(models.CheckResult.primary_rel_path) == 
primary_rel_path,
+                
models.validate_instrumented_attribute(models.CheckResult.member_rel_path) == 
member_rel_path,
             )
             await data.execute(stmt)
             await data.commit()
diff --git a/atr/tasks/checks/paths.py b/atr/tasks/checks/paths.py
index b96c77f..5afff99 100644
--- a/atr/tasks/checks/paths.py
+++ b/atr/tasks/checks/paths.py
@@ -38,21 +38,21 @@ async def check(args: checks.FunctionArguments) -> None:
     recorder_errors = await checks.Recorder.create(
         checker=checks.function_key(check) + "_errors",
         release_name=args.release_name,
-        revision=args.revision,
+        revision_number=args.revision_number,
         primary_rel_path=None,
         afresh=True,
     )
     recorder_warnings = await checks.Recorder.create(
         checker=checks.function_key(check) + "_warnings",
         release_name=args.release_name,
-        revision=args.revision,
+        revision_number=args.revision_number,
         primary_rel_path=None,
         afresh=True,
     )
     recorder_success = await checks.Recorder.create(
         checker=checks.function_key(check) + "_success",
         release_name=args.release_name,
-        revision=args.revision,
+        revision_number=args.revision_number,
         primary_rel_path=None,
         afresh=True,
     )
diff --git a/atr/tasks/checks/signature.py b/atr/tasks/checks/signature.py
index e9a52fd..7da35b6 100644
--- a/atr/tasks/checks/signature.py
+++ b/atr/tasks/checks/signature.py
@@ -82,7 +82,7 @@ async def _check_core_logic(committee_name: str, 
artifact_path: str, signature_p
             sqlmodel.select(models.PublicSigningKey)
             .join(models.KeyLink)
             .join(models.Committee)
-            .where(db.validate_instrumented_attribute(models.Committee.name) 
== committee_name)
+            
.where(models.validate_instrumented_attribute(models.Committee.name) == 
committee_name)
         )
         result = await session.execute(statement)
         public_keys = [key.ascii_armored_key for key in result.scalars().all()]
diff --git a/atr/tasks/keys.py b/atr/tasks/keys.py
index 57bf43a..aa2a6d1 100644
--- a/atr/tasks/keys.py
+++ b/atr/tasks/keys.py
@@ -25,7 +25,7 @@ import atr.db.interaction as interaction
 import atr.db.models as models
 import atr.schema as schema
 import atr.tasks.checks as checks
-from atr import util
+import atr.util as util
 
 _LOGGER: Final = logging.getLogger(__name__)
 
diff --git a/atr/tasks/svn.py b/atr/tasks/svn.py
index 0e2db1c..2710beb 100644
--- a/atr/tasks/svn.py
+++ b/atr/tasks/svn.py
@@ -71,7 +71,7 @@ async def _import_files_core(args: SvnImport) -> str:
 
     async with revision.create_and_manage(args.project_name, 
args.version_name, args.asf_uid) as (
         new_revision_dir,
-        new_revision_name,
+        new_revision_number,
     ):
         _LOGGER.debug(f"Created revision directory: {new_revision_dir}")
 
@@ -117,7 +117,7 @@ async def _import_files_core(args: SvnImport) -> str:
         await aiofiles.os.rmdir(temp_export_path)
         _LOGGER.info(f"Removed temporary export directory: {temp_export_path}")
 
-    return f"Successfully imported files from SVN into revision 
{new_revision_name}"
+    return f"Successfully imported files from SVN into revision 
{new_revision_number}"
 
 
 async def _import_files_core_run_svn_export(svn_command: list[str], 
temp_export_path: pathlib.Path) -> None:
diff --git a/atr/templates/announce-selected.html 
b/atr/templates/announce-selected.html
index 89d2bfe..e05c014 100644
--- a/atr/templates/announce-selected.html
+++ b/atr/templates/announce-selected.html
@@ -47,7 +47,7 @@
     </div>
     <div class="card-body">
       <div class="d-flex flex-wrap gap-3 pb-1 text-secondary fs-6">
-        <span class="page-preview-meta-item">Revision: {{ release.revision 
}}</span>
+        <span class="page-preview-meta-item">Revision: {{ 
release.latest_revision_number }}</span>
         <span class="page-preview-meta-item">Created: {{ 
release.created.strftime("%Y-%m-%d %H:%M:%S UTC") }}</span>
       </div>
       <!--
diff --git a/atr/templates/check-selected-path-table.html 
b/atr/templates/check-selected-path-table.html
index ebe5bed..4e94786 100644
--- a/atr/templates/check-selected-path-table.html
+++ b/atr/templates/check-selected-path-table.html
@@ -3,8 +3,8 @@
   <table class="table table-hover align-middle table-sm mb-0 border">
     <tbody>
       {% for path in paths %}
-        {% set has_errors = info.errors[path]|length > 0 %}
-        {% set has_warnings = info.warnings[path]|length > 0 %}
+        {% set has_errors = info and (info.errors[path]|length > 0) %}
+        {% set has_warnings = info and (info.warnings[path]|length > 0) %}
         {% set row_id = path|string|slugify %}
 
         {# Manual striping for pairs of rows #}
@@ -29,11 +29,11 @@
               {% set icon_class = "text-warning" %}
             {% endif %}
 
-            {% if path in info.artifacts %}
+            {% if info and (path in info.artifacts) %}
               <i class="bi bi-archive {{ icon_class }}"
                  title="Artifact"
                  aria-label="Artifact"></i>
-            {% elif path in info.metadata %}
+            {% elif info and (path in info.metadata) %}
               <i class="bi bi-file-earmark-text {{ icon_class }}"
                  title="Metadata"
                  aria-label="Metadata"></i>
@@ -71,7 +71,7 @@
               {% elif has_warnings %}
                 <a href="{{ as_url(routes.report.selected_path, 
project_name=project_name, version_name=version_name, rel_path=path) }}"
                    class="btn btn-sm btn-outline-warning">Show {{ 
info.warnings[path]|length }} {{ "warning" if info.warnings[path]|length == 1 
else "warnings" }}</a>
-              {% elif info.successes[path] %}
+              {% elif info and (path in info.successes) %}
                 <a href="{{ as_url(routes.report.selected_path, 
project_name=project_name, version_name=version_name, rel_path=path) }}"
                    class="btn btn-sm btn-outline-success"
                    title="Show report for {{ path }}">Show report</a>
diff --git a/atr/templates/check-selected-release-info.html 
b/atr/templates/check-selected-release-info.html
index 7e83ca9..0a4648b 100644
--- a/atr/templates/check-selected-release-info.html
+++ b/atr/templates/check-selected-release-info.html
@@ -17,10 +17,10 @@
         <p>
           <strong>Created:</strong> {{ format_datetime(release.created) }}
         </p>
-        {% if revision_time %}
+        {% if (phase == "release_candidate_draft") and revision_time %}
           <p>
             <strong>Revision:</strong>
-            <a href="{{ as_url(routes.revisions.selected, 
project_name=project_name, version_name=version_name) }}#{{ 
revision_name_from_link }}">
+            <a href="{{ as_url(routes.revisions.selected, 
project_name=project_name, version_name=version_name) }}#{{ revision_number }}">
               {{ format_datetime(revision_time) }}
             </a>
             {% if revision_editor %}by {{ revision_editor }}{% endif %}
@@ -39,7 +39,7 @@
         <a href="{{ as_url(routes.revisions.selected, 
project_name=release.project.name, version_name=release.version) }}"
            title="View revision history"
            class="btn btn-secondary"><i class="bi bi-clock-history me-1"></i> 
Revisions</a>
-        <a href="{{ as_url(routes.voting.selected_revision, 
project_name=release.project.name, version_name=release.version, 
revision=release.revision) }}"
+        <a href="{{ as_url(routes.voting.selected_revision, 
project_name=release.project.name, version_name=release.version, 
revision=release.latest_revision_number) }}"
            title="Start a vote on this draft"
            class="btn btn-success"><i class="bi bi-check-circle me-1"></i> 
Start voting</a>
         <button class="btn btn-danger"
diff --git a/atr/templates/finish-selected.html 
b/atr/templates/finish-selected.html
index 691c16e..d6a7683 100644
--- a/atr/templates/finish-selected.html
+++ b/atr/templates/finish-selected.html
@@ -31,7 +31,7 @@
     </div>
     <div class="card-body">
       <div class="d-flex flex-wrap gap-3 pb-3 mb-3 border-bottom 
text-secondary fs-6">
-        <span class="page-preview-meta-item">Revision: {{ release.revision 
}}</span>
+        <span class="page-preview-meta-item">Revision: {{ 
release.latest_revision_number }}</span>
         <span class="page-preview-meta-item">Created: {{ 
release.created.strftime("%Y-%m-%d %H:%M:%S UTC") }}</span>
       </div>
       <div>
diff --git a/atr/templates/revisions-selected.html 
b/atr/templates/revisions-selected.html
index aa5796d..bb2a5dc 100644
--- a/atr/templates/revisions-selected.html
+++ b/atr/templates/revisions-selected.html
@@ -42,77 +42,77 @@
   </h1>
 
   {% if revision_history %}
-    {% for revision in revision_history %}
-      <div id="{{ revision.name }}" class="card mb-3">
+    {% for revision, files_diff in revision_history %}
+      <div id="{{ revision.number }}" class="card mb-3">
         <div class="card-header d-flex justify-content-between 
align-items-center">
           <h2 class="fs-6 my-2 mx-0 p-0 border-0 atr-sans">
-            <a href="#{{ revision.name }}"
-               class="fw-bold text-decoration-none text-body">{{ revision.name 
}}</a>
-            {% if revision.name == current_revision_name %}<span class="badge 
bg-primary ms-2">Current</span>{% endif %}
+            <a href="#{{ revision.number }}"
+               class="fw-bold text-decoration-none text-body">{{ 
revision.number }}</a>
+            {% if revision.number == latest_revision_number %}<span 
class="badge bg-primary ms-2">Current</span>{% endif %}
           </h2>
           <span class="fs-6 text-muted">
-            {% if revision.timestamp %}
-              {{ revision.timestamp.strftime("%Y-%m-%d %H:%M:%S UTC") }}
+            {% if revision.created %}
+              {{ revision.created.strftime("%Y-%m-%d %H:%M:%S UTC") }}
             {% else %}
               Invalid timestamp
             {% endif %}
-            by {{ revision.editor }}
+            by {{ revision.asfuid }}
           </span>
         </div>
         <div class="card-body">
           {% if revision.parent %}
             <p class="small text-muted mb-2">
-              Changes from parent revision: <a href="#{{ revision.parent 
}}">{{ revision.parent }}</a>
+              Changes from parent revision: <a href="#{{ 
revision.parent.number }}">{{ revision.parent.number }}</a>
             </p>
           {% else %}
             <p class="small text-muted mb-2">Initial revision</p>
           {% endif %}
 
-          {% if not revision.added and not revision.removed and not 
revision.modified %}
+          {% if (not files_diff.added) and (not files_diff.removed) and (not 
files_diff.modified) %}
             <p class="fst-italic text-muted">No file changes detected in this 
revision.</p>
           {% else %}
-            {% if revision.added %}
+            {% if files_diff.added %}
               <h3 class="fs-6 fw-semibold mt-3 atr-sans">
-                Added files <span class="font-monospace fw-normal">({{ 
revision.added|length }})</span>
+                Added files <span class="font-monospace fw-normal">({{ 
files_diff.added|length }})</span>
               </h3>
               <ul class="list-group list-group-flush mb-2">
-                {% for file in revision.added %}
+                {% for file in files_diff.added %}
                   <li class="list-group-item list-group-item-success py-1 px-3 
small rounded-2">{{ file }}</li>
                 {% endfor %}
               </ul>
             {% endif %}
 
-            {% if revision.removed %}
+            {% if files_diff.removed %}
               <h3 class="fs-6 fw-semibold mt-3 atr-sans">
-                Removed files <span class="font-monospace fw-normal">({{ 
revision.removed|length }})</span>
+                Removed files <span class="font-monospace fw-normal">({{ 
files_diff.removed|length }})</span>
               </h3>
               <ul class="list-group list-group-flush mb-2">
-                {% for file in revision.removed %}
+                {% for file in files_diff.removed %}
                   <li class="list-group-item list-group-item-danger py-1 px-3 
small rounded-2">{{ file }}</li>
                 {% endfor %}
               </ul>
             {% endif %}
 
-            {% if revision.modified %}
+            {% if files_diff.modified %}
               <h3 class="fs-6 fw-semibold mt-3 atr-sans">
-                Modified files <span class="font-monospace fw-normal">({{ 
revision.modified|length }})</span>
+                Modified files <span class="font-monospace fw-normal">({{ 
files_diff.modified|length }})</span>
               </h3>
               <ul class="list-group list-group-flush mb-2">
-                {% for file in revision.modified %}
+                {% for file in files_diff.modified %}
                   <li class="list-group-item list-group-item-warning py-1 px-3 
small rounded-2">{{ file }}</li>
                 {% endfor %}
               </ul>
             {% endif %}
           {% endif %}
 
-          {% if revision.name != current_revision_name %}
+          {% if revision.number != latest_revision_number %}
             <h3 class="fs-6 fw-semibold mt-3 atr-sans">Actions</h3>
             <div class="mt-3">
               <form method="post"
                     action="{{ as_url(routes.revisions.selected_post, 
project_name=project_name, version_name=version_name) }}">
                 {{ empty_form.hidden_tag() }}
 
-                <input type="hidden" name="revision_name" value="{{ 
revision.name }}" />
+                <input type="hidden" name="revision_number" value="{{ 
revision.number }}" />
                 <button type="submit" class="btn btn-sm 
btn-outline-danger">Set this revision as current</button>
               </form>
             </div>
diff --git a/atr/templates/voting-selected-revision.html 
b/atr/templates/voting-selected-revision.html
index ed67af2..31cc06e 100644
--- a/atr/templates/voting-selected-revision.html
+++ b/atr/templates/voting-selected-revision.html
@@ -50,7 +50,7 @@
   <form method="post"
         id="vote-initiate-form"
         class="atr-canary py-4 px-5"
-        action="{{ as_url(routes.voting.selected_revision, 
project_name=release.project.name, version_name=release.version, 
revision=release.revision) }}"
+        action="{{ as_url(routes.voting.selected_revision, 
project_name=release.project.name, version_name=release.version, 
revision=release.unwrap_revision_number) }}"
         novalidate>
     {{ form.hidden_tag() }}
 
diff --git a/atr/util.py b/atr/util.py
index 23e37da..1136d17 100644
--- a/atr/util.py
+++ b/atr/util.py
@@ -24,7 +24,6 @@ import hashlib
 import logging
 import pathlib
 import re
-import shutil
 import tarfile
 import tempfile
 import uuid
@@ -33,6 +32,7 @@ from collections.abc import AsyncGenerator, Callable, Sequence
 from typing import Any, Final, TypeVar
 
 import aiofiles.os
+import aioshutil
 import asfquart
 import asfquart.base as base
 import asfquart.session as session
@@ -140,7 +140,10 @@ async def async_temporary_directory(
     try:
         yield pathlib.Path(temp_dir_path)
     finally:
-        await asyncio.to_thread(shutil.rmtree, temp_dir_path, 
ignore_errors=True)
+        try:
+            await aioshutil.rmtree(temp_dir_path)  # type: ignore[call-arg]
+        except Exception:
+            pass
 
 
 def compute_sha3_256(file_data: bytes) -> str:
@@ -355,7 +358,7 @@ async def number_of_release_files(release: models.Release) 
-> int:
     """Return the number of files in a release."""
     path_project = release.project.name
     path_version = release.version
-    path_revision = release.revision or "force-error"
+    path_revision = release.unwrap_revision_number
     match release.phase:
         case models.ReleasePhase.RELEASE_CANDIDATE_DRAFT:
             path = get_unfinished_dir() / path_project / path_version / 
path_revision
@@ -464,9 +467,10 @@ async def read_file_for_viewer(full_path: pathlib.Path, 
max_size: int) -> tuple[
 
 def release_directory(release: models.Release) -> pathlib.Path:
     """Return the absolute path to the directory containing the active files 
for a given release phase."""
-    if release.revision is None:
+    latest_revision_number = release.latest_revision_number
+    if latest_revision_number is None:
         return release_directory_base(release)
-    return release_directory_base(release) / release.revision
+    return release_directory_base(release) / latest_revision_number
 
 
 def release_directory_base(release: models.Release) -> pathlib.Path:
diff --git a/atr/worker.py b/atr/worker.py
index e7f51c9..41b5aa1 100644
--- a/atr/worker.py
+++ b/atr/worker.py
@@ -145,7 +145,7 @@ async def _task_next_claim() -> tuple[int, str, list[str] | 
dict[str, Any]] | No
             oldest_queued_task = (
                 sqlmodel.select(models.Task.id)
                 .where(models.Task.status == task.QUEUED)
-                
.order_by(db.validate_instrumented_attribute(models.Task.added).asc())
+                
.order_by(models.validate_instrumented_attribute(models.Task.added).asc())
                 .limit(1)
             )
 
@@ -157,9 +157,9 @@ async def _task_next_claim() -> tuple[int, str, list[str] | 
dict[str, Any]] | No
                 .where(sqlmodel.and_(models.Task.id == oldest_queued_task, 
models.Task.status == task.QUEUED))
                 .values(status=task.ACTIVE, started=now, pid=os.getpid())
                 .returning(
-                    db.validate_instrumented_attribute(models.Task.id),
-                    db.validate_instrumented_attribute(models.Task.task_type),
-                    db.validate_instrumented_attribute(models.Task.task_args),
+                    models.validate_instrumented_attribute(models.Task.id),
+                    
models.validate_instrumented_attribute(models.Task.task_type),
+                    
models.validate_instrumented_attribute(models.Task.task_args),
                 )
             )
 
@@ -201,8 +201,8 @@ async def _task_process(task_id: int, task_type: str, 
task_args: list[str] | dic
             # Validate required fields from the Task object itself
             if task_obj.release_name is None:
                 raise ValueError(f"Task {task_id} is missing required 
release_name")
-            if task_obj.revision is None:
-                raise ValueError(f"Task {task_id} is missing required 
revision")
+            if task_obj.revision_number is None:
+                raise ValueError(f"Task {task_id} is missing required 
revision_number")
 
             if not isinstance(task_args, dict):
                 raise TypeError(
@@ -214,14 +214,14 @@ async def _task_process(task_id: int, task_type: str, 
task_args: list[str] | dic
                 return await checks.Recorder.create(
                     checker=handler,
                     release_name=task_obj.release_name or "",
-                    revision=task_obj.revision or "",
+                    revision_number=task_obj.revision_number or "",
                     primary_rel_path=task_obj.primary_rel_path,
                 )
 
             function_arguments = checks.FunctionArguments(
                 recorder=recorder_factory,
                 release_name=task_obj.release_name,
-                revision=task_obj.revision,
+                revision_number=task_obj.revision_number,
                 primary_rel_path=task_obj.primary_rel_path,
                 extra_args=task_args,
             )
diff --git a/migrations/versions/0001_2025.05.06_38b0d2de.py 
b/migrations/versions/0001_2025.05.15_32c59be6.py
similarity index 70%
rename from migrations/versions/0001_2025.05.06_38b0d2de.py
rename to migrations/versions/0001_2025.05.15_32c59be6.py
index 883b3c2..9437269 100644
--- a/migrations/versions/0001_2025.05.06_38b0d2de.py
+++ b/migrations/versions/0001_2025.05.15_32c59be6.py
@@ -1,8 +1,8 @@
 """Use the existing ATR schema
 
-Revision ID: 0001_2025.05.06_38b0d2de
+Revision ID: 0001_2025.05.15_32c59be6
 Revises:
-Create Date: 2025-05-06 14:44:00.401362+00:00
+Create Date: 2025-05-15 15:44:04.208248+00:00
 """
 
 from collections.abc import Sequence
@@ -13,7 +13,7 @@ from alembic import op
 import atr.db.models
 
 # Revision identifiers, used by Alembic
-revision: str = "0001_2025.05.06_38b0d2de"
+revision: str = "0001_2025.05.15_32c59be6"
 down_revision: str | None = None
 branch_labels: str | Sequence[str] | None = None
 depends_on: str | Sequence[str] | None = None
@@ -30,11 +30,10 @@ def upgrade() -> None:
         sa.Column("committers", sa.JSON(), nullable=True),
         sa.Column("release_managers", sa.JSON(), nullable=True),
         sa.ForeignKeyConstraint(
-            ["parent_committee_name"],
-            ["committee.name"],
+            ["parent_committee_name"], ["committee.name"], 
name=op.f("fk_committee_parent_committee_name_committee")
         ),
-        sa.PrimaryKeyConstraint("name"),
-        sa.UniqueConstraint("name"),
+        sa.PrimaryKeyConstraint("name", name=op.f("pk_committee")),
+        sa.UniqueConstraint("name", name=op.f("uq_committee_name")),
     )
     op.create_table(
         "publicsigningkey",
@@ -46,8 +45,8 @@ def upgrade() -> None:
         sa.Column("declared_uid", sa.String(), nullable=True),
         sa.Column("apache_uid", sa.String(), nullable=False),
         sa.Column("ascii_armored_key", sa.String(), nullable=False),
-        sa.PrimaryKeyConstraint("fingerprint"),
-        sa.UniqueConstraint("fingerprint"),
+        sa.PrimaryKeyConstraint("fingerprint", 
name=op.f("pk_publicsigningkey")),
+        sa.UniqueConstraint("fingerprint", 
name=op.f("uq_publicsigningkey_fingerprint")),
     )
     op.create_table(
         "releasepolicy",
@@ -59,21 +58,21 @@ def upgrade() -> None:
         sa.Column("pause_for_rm", sa.Boolean(), nullable=False),
         sa.Column("start_vote_template", sa.String(), nullable=False),
         sa.Column("announce_release_template", sa.String(), nullable=False),
-        sa.PrimaryKeyConstraint("id"),
+        sa.PrimaryKeyConstraint("id", name=op.f("pk_releasepolicy")),
     )
     op.create_table(
         "sshkey",
         sa.Column("fingerprint", sa.String(), nullable=False),
         sa.Column("key", sa.String(), nullable=False),
         sa.Column("asf_uid", sa.String(), nullable=False),
-        sa.PrimaryKeyConstraint("fingerprint"),
+        sa.PrimaryKeyConstraint("fingerprint", name=op.f("pk_sshkey")),
     )
     op.create_table(
         "textvalue",
         sa.Column("ns", sa.String(), nullable=False),
         sa.Column("key", sa.String(), nullable=False),
         sa.Column("value", sa.String(), nullable=False),
-        sa.PrimaryKeyConstraint("ns", "key"),
+        sa.PrimaryKeyConstraint("ns", "key", name=op.f("pk_textvalue")),
     )
     op.create_index(op.f("ix_textvalue_key"), "textvalue", ["key"], 
unique=False)
     op.create_index(op.f("ix_textvalue_ns"), "textvalue", ["ns"], unique=False)
@@ -82,14 +81,14 @@ def upgrade() -> None:
         sa.Column("committee_name", sa.String(), nullable=False),
         sa.Column("key_fingerprint", sa.String(), nullable=False),
         sa.ForeignKeyConstraint(
-            ["committee_name"],
-            ["committee.name"],
+            ["committee_name"], ["committee.name"], 
name=op.f("fk_keylink_committee_name_committee")
         ),
         sa.ForeignKeyConstraint(
             ["key_fingerprint"],
             ["publicsigningkey.fingerprint"],
+            name=op.f("fk_keylink_key_fingerprint_publicsigningkey"),
         ),
-        sa.PrimaryKeyConstraint("committee_name", "key_fingerprint"),
+        sa.PrimaryKeyConstraint("committee_name", "key_fingerprint", 
name=op.f("pk_keylink")),
     )
     op.create_table(
         "project",
@@ -106,16 +105,19 @@ def upgrade() -> None:
         sa.Column("created", atr.db.models.UTCDateTime(timezone=True), 
nullable=True),
         sa.Column("created_by", sa.String(), nullable=True),
         sa.ForeignKeyConstraint(
-            ["committee_name"],
-            ["committee.name"],
+            ["committee_name"], ["committee.name"], 
name=op.f("fk_project_committee_name_committee")
         ),
-        sa.ForeignKeyConstraint(["release_policy_id"], ["releasepolicy.id"], 
ondelete="CASCADE"),
         sa.ForeignKeyConstraint(
-            ["super_project_name"],
-            ["project.name"],
+            ["release_policy_id"],
+            ["releasepolicy.id"],
+            name=op.f("fk_project_release_policy_id_releasepolicy"),
+            ondelete="CASCADE",
+        ),
+        sa.ForeignKeyConstraint(
+            ["super_project_name"], ["project.name"], 
name=op.f("fk_project_super_project_name_project")
         ),
-        sa.PrimaryKeyConstraint("name"),
-        sa.UniqueConstraint("name"),
+        sa.PrimaryKeyConstraint("name", name=op.f("pk_project")),
+        sa.UniqueConstraint("name", name=op.f("uq_project_name")),
     )
     op.create_table(
         "distributionchannel",
@@ -127,10 +129,9 @@ def upgrade() -> None:
         sa.Column("automation_endpoint", sa.String(), nullable=False),
         sa.Column("project_name", sa.String(), nullable=False),
         sa.ForeignKeyConstraint(
-            ["project_name"],
-            ["project.name"],
+            ["project_name"], ["project.name"], 
name=op.f("fk_distributionchannel_project_name_project")
         ),
-        sa.PrimaryKeyConstraint("id"),
+        sa.PrimaryKeyConstraint("id", name=op.f("pk_distributionchannel")),
     )
     op.create_index(op.f("ix_distributionchannel_name"), 
"distributionchannel", ["name"], unique=True)
     op.create_table(
@@ -149,44 +150,59 @@ def upgrade() -> None:
         sa.Column("project_name", sa.String(), nullable=False),
         sa.Column("package_managers", sa.JSON(), nullable=True),
         sa.Column("version", sa.String(), nullable=False),
-        sa.Column("revision", sa.String(), nullable=True),
         sa.Column("sboms", sa.JSON(), nullable=True),
         sa.Column("release_policy_id", sa.Integer(), nullable=True),
         sa.Column("votes", sa.JSON(), nullable=True),
         sa.Column("vote_started", atr.db.models.UTCDateTime(timezone=True), 
nullable=True),
         sa.Column("vote_resolved", atr.db.models.UTCDateTime(timezone=True), 
nullable=True),
+        sa.ForeignKeyConstraint(["project_name"], ["project.name"], 
name=op.f("fk_release_project_name_project")),
         sa.ForeignKeyConstraint(
-            ["project_name"],
-            ["project.name"],
-        ),
-        sa.ForeignKeyConstraint(
-            ["release_policy_id"],
-            ["releasepolicy.id"],
+            ["release_policy_id"], ["releasepolicy.id"], 
name=op.f("fk_release_release_policy_id_releasepolicy")
         ),
-        sa.PrimaryKeyConstraint("name"),
-        sa.UniqueConstraint("name"),
+        sa.PrimaryKeyConstraint("name", name=op.f("pk_release")),
+        sa.UniqueConstraint("name", name=op.f("uq_release_name")),
         sa.UniqueConstraint("project_name", "version", 
name="unique_project_version"),
     )
-    op.create_index(op.f("ix_release_revision"), "release", ["revision"], 
unique=False)
     op.create_table(
         "checkresult",
         sa.Column("id", sa.Integer(), nullable=False),
         sa.Column("release_name", sa.String(), nullable=False),
+        sa.Column("revision_number", sa.String(), nullable=True),
         sa.Column("checker", sa.String(), nullable=False),
         sa.Column("primary_rel_path", sa.String(), nullable=True),
+        sa.Column("member_rel_path", sa.String(), nullable=True),
         sa.Column("created", atr.db.models.UTCDateTime(timezone=True), 
nullable=True),
         sa.Column(
             "status", sa.Enum("EXCEPTION", "FAILURE", "SUCCESS", "WARNING", 
name="checkresultstatus"), nullable=False
         ),
         sa.Column("message", sa.String(), nullable=False),
         sa.Column("data", sa.JSON(), nullable=True),
-        sa.ForeignKeyConstraint(
-            ["release_name"],
-            ["release.name"],
-        ),
-        sa.PrimaryKeyConstraint("id"),
+        sa.ForeignKeyConstraint(["release_name"], ["release.name"], 
name=op.f("fk_checkresult_release_name_release")),
+        sa.PrimaryKeyConstraint("id", name=op.f("pk_checkresult")),
     )
+    op.create_index(op.f("ix_checkresult_member_rel_path"), "checkresult", 
["member_rel_path"], unique=False)
     op.create_index(op.f("ix_checkresult_primary_rel_path"), "checkresult", 
["primary_rel_path"], unique=False)
+    op.create_index(op.f("ix_checkresult_revision_number"), "checkresult", 
["revision_number"], unique=False)
+    op.create_table(
+        "revision",
+        sa.Column("name", sa.String(), nullable=False),
+        sa.Column("release_name", sa.String(), nullable=True),
+        sa.Column("seq", sa.Integer(), nullable=False),
+        sa.Column("number", sa.String(), nullable=False),
+        sa.Column("asfuid", sa.String(), nullable=False),
+        sa.Column("created", atr.db.models.UTCDateTime(timezone=True), 
nullable=True),
+        sa.Column(
+            "phase",
+            sa.Enum("RELEASE_CANDIDATE_DRAFT", "RELEASE_CANDIDATE", 
"RELEASE_PREVIEW", "RELEASE", name="releasephase"),
+            nullable=False,
+        ),
+        sa.Column("parent_name", sa.String(), nullable=True),
+        sa.Column("description", sa.String(), nullable=True),
+        sa.ForeignKeyConstraint(["parent_name"], ["revision.name"], 
name=op.f("fk_revision_parent_name_revision")),
+        sa.ForeignKeyConstraint(["release_name"], ["release.name"], 
name=op.f("fk_revision_release_name_release")),
+        sa.PrimaryKeyConstraint("name", name=op.f("pk_revision")),
+        sa.UniqueConstraint("name", name=op.f("uq_revision_name")),
+    )
     op.create_table(
         "task",
         sa.Column("id", sa.Integer(), nullable=False),
@@ -195,6 +211,7 @@ def upgrade() -> None:
             "task_type",
             sa.Enum(
                 "HASHING_CHECK",
+                "KEYS_IMPORT_FILE",
                 "LICENSE_FILES",
                 "LICENSE_HEADERS",
                 "MESSAGE_SEND",
@@ -207,8 +224,6 @@ def upgrade() -> None:
                 "TARGZ_STRUCTURE",
                 "VOTE_INITIATE",
                 "ZIPFORMAT_INTEGRITY",
-                "ZIPFORMAT_LICENSE_FILES",
-                "ZIPFORMAT_LICENSE_HEADERS",
                 "ZIPFORMAT_STRUCTURE",
                 name="tasktype",
             ),
@@ -222,7 +237,7 @@ def upgrade() -> None:
         sa.Column("result", sa.JSON(), nullable=True),
         sa.Column("error", sa.String(), nullable=True),
         sa.Column("release_name", sa.String(), nullable=True),
-        sa.Column("draft_revision", sa.String(), nullable=True),
+        sa.Column("revision_number", sa.String(), nullable=True),
         sa.Column("primary_rel_path", sa.String(), nullable=True),
         sa.CheckConstraint(
             """
@@ -237,46 +252,30 @@ def upgrade() -> None:
                 OR (status = 'FAILED' AND completed IS NOT NULL AND error IS 
NOT NULL)
             )
             """,
-            name="valid_task_status_transitions",
+            name=op.f("ck_task_valid_task_status_transitions"),
         ),
-        sa.ForeignKeyConstraint(
-            ["release_name"],
-            ["release.name"],
-        ),
-        sa.PrimaryKeyConstraint("id"),
+        sa.ForeignKeyConstraint(["release_name"], ["release.name"], 
name=op.f("fk_task_release_name_release")),
+        sa.PrimaryKeyConstraint("id", name=op.f("pk_task")),
     )
     op.create_index(op.f("ix_task_added"), "task", ["added"], unique=False)
-    op.create_index(op.f("ix_task_draft_revision"), "task", 
["draft_revision"], unique=False)
     op.create_index(op.f("ix_task_primary_rel_path"), "task", 
["primary_rel_path"], unique=False)
+    op.create_index(op.f("ix_task_revision_number"), "task", 
["revision_number"], unique=False)
     op.create_index(op.f("ix_task_status"), "task", ["status"], unique=False)
     op.create_index("ix_task_status_added", "task", ["status", "added"], 
unique=False)
-    op.create_table(
-        "checkresulthistorylink",
-        sa.Column("check_result_id", sa.Integer(), nullable=False),
-        sa.Column("draft_revision", sa.String(), nullable=False),
-        sa.ForeignKeyConstraint(
-            ["check_result_id"],
-            ["checkresult.id"],
-        ),
-        sa.PrimaryKeyConstraint("check_result_id", "draft_revision"),
-    )
-    op.create_index(
-        op.f("ix_checkresulthistorylink_draft_revision"), 
"checkresulthistorylink", ["draft_revision"], unique=False
-    )
 
 
 def downgrade() -> None:
-    op.drop_index(op.f("ix_checkresulthistorylink_draft_revision"), 
table_name="checkresulthistorylink")
-    op.drop_table("checkresulthistorylink")
     op.drop_index("ix_task_status_added", table_name="task")
     op.drop_index(op.f("ix_task_status"), table_name="task")
+    op.drop_index(op.f("ix_task_revision_number"), table_name="task")
     op.drop_index(op.f("ix_task_primary_rel_path"), table_name="task")
-    op.drop_index(op.f("ix_task_draft_revision"), table_name="task")
     op.drop_index(op.f("ix_task_added"), table_name="task")
     op.drop_table("task")
+    op.drop_table("revision")
+    op.drop_index(op.f("ix_checkresult_revision_number"), 
table_name="checkresult")
     op.drop_index(op.f("ix_checkresult_primary_rel_path"), 
table_name="checkresult")
+    op.drop_index(op.f("ix_checkresult_member_rel_path"), 
table_name="checkresult")
     op.drop_table("checkresult")
-    op.drop_index(op.f("ix_release_revision"), table_name="release")
     op.drop_table("release")
     op.drop_index(op.f("ix_distributionchannel_name"), 
table_name="distributionchannel")
     op.drop_table("distributionchannel")
diff --git a/migrations/versions/0002_2025.05.08_32fdbfe0.py 
b/migrations/versions/0002_2025.05.08_32fdbfe0.py
deleted file mode 100644
index 63cf6ff..0000000
--- a/migrations/versions/0002_2025.05.08_32fdbfe0.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""Add CheckResult.member_rel_path
-
-Revision ID: 0002_2025.05.08_32fdbfe0
-Revises: 0001_2025.05.06_38b0d2de
-Create Date: 2025-05-08 14:28:07.435446+00:00
-"""
-
-from collections.abc import Sequence
-
-import sqlalchemy as sa
-from alembic import op
-
-# Revision identifiers, used by Alembic
-revision: str = "0002_2025.05.08_32fdbfe0"
-down_revision: str | None = "0001_2025.05.06_38b0d2de"
-branch_labels: str | Sequence[str] | None = None
-depends_on: str | Sequence[str] | None = None
-
-
-def upgrade() -> None:
-    op.add_column("checkresult", sa.Column("member_rel_path", sa.String(), 
nullable=True))
-    op.create_index(op.f("ix_checkresult_member_rel_path"), "checkresult", 
["member_rel_path"], unique=False)
-
-
-def downgrade() -> None:
-    op.drop_index(op.f("ix_checkresult_member_rel_path"), 
table_name="checkresult")
-    op.drop_column("checkresult", "member_rel_path")
diff --git a/migrations/versions/0003_2025.05.09_ee553bee.py 
b/migrations/versions/0003_2025.05.09_ee553bee.py
deleted file mode 100644
index a3ada69..0000000
--- a/migrations/versions/0003_2025.05.09_ee553bee.py
+++ /dev/null
@@ -1,72 +0,0 @@
-"""Add revision to CheckResult and ensure consistent naming
-
-Revision ID: 0003_2025.05.09_ee553bee
-Revises: 0002_2025.05.08_32fdbfe0
-Create Date: 2025-05-09 13:54:38.132731+00:00
-"""
-
-from collections.abc import Sequence
-
-import sqlalchemy as sa
-from alembic import op
-
-# Revision identifiers, used by Alembic
-revision: str = "0003_2025.05.09_ee553bee"
-down_revision: str | None = "0002_2025.05.08_32fdbfe0"
-branch_labels: str | Sequence[str] | None = None
-depends_on: str | Sequence[str] | None = None
-
-
-def upgrade() -> None:
-    op.add_column("checkresult", sa.Column("revision", sa.String(), 
nullable=False, server_default=""))
-    op.create_index(op.f("ix_checkresult_revision"), "checkresult", 
["revision"], unique=False)
-
-    with op.batch_alter_table("task", schema=None) as batch_op:
-        batch_op.alter_column(
-            "draft_revision",
-            new_column_name="revision",
-            type_=sa.String(),
-            existing_type=sa.VARCHAR(),
-            nullable=True,
-            existing_nullable=True,
-        )
-
-    with op.batch_alter_table("task", schema=None) as batch_op:
-        batch_op.drop_index("ix_task_draft_revision")
-        batch_op.create_index(op.f("ix_task_revision"), ["revision"], 
unique=False)
-
-    op.drop_index("ix_checkresulthistorylink_draft_revision", 
table_name="checkresulthistorylink")
-    op.drop_table("checkresulthistorylink")
-
-
-def downgrade() -> None:
-    op.create_table(
-        "checkresulthistorylink",
-        sa.Column("check_result_id", sa.INTEGER(), nullable=False),
-        sa.Column("draft_revision", sa.VARCHAR(), nullable=False),
-        sa.ForeignKeyConstraint(
-            ["check_result_id"],
-            ["checkresult.id"],
-        ),
-        sa.PrimaryKeyConstraint("check_result_id", "draft_revision"),
-    )
-    op.create_index(
-        "ix_checkresulthistorylink_draft_revision", "checkresulthistorylink", 
["draft_revision"], unique=False
-    )
-
-    with op.batch_alter_table("task", schema=None) as batch_op:
-        batch_op.drop_index(batch_op.f("ix_task_revision"))
-        batch_op.create_index("ix_task_draft_revision", ["revision"], 
unique=False)
-
-    with op.batch_alter_table("task", schema=None) as batch_op:
-        batch_op.alter_column(
-            "revision",
-            new_column_name="draft_revision",
-            type_=sa.VARCHAR(),
-            existing_type=sa.String(),
-            nullable=True,
-            existing_nullable=True,
-        )
-
-    op.drop_index(op.f("ix_checkresult_revision"), table_name="checkresult")
-    op.drop_column("checkresult", "revision")
diff --git a/migrations/versions/0004_2025.05.13_657bf05b.py 
b/migrations/versions/0004_2025.05.13_657bf05b.py
deleted file mode 100644
index 6f16dc9..0000000
--- a/migrations/versions/0004_2025.05.13_657bf05b.py
+++ /dev/null
@@ -1,61 +0,0 @@
-"""Remove some check functions from TaskType
-
-Revision ID: 0004_2025.05.13_657bf05b
-Revises: 0003_2025.05.09_ee553bee
-Create Date: 2025-05-13 14:41:31.781711+00:00
-"""
-
-from collections.abc import Sequence
-
-import sqlalchemy as sa
-from alembic import op
-
-# Revision identifiers, used by Alembic
-revision: str = "0004_2025.05.13_657bf05b"
-down_revision: str | None = "0003_2025.05.09_ee553bee"
-branch_labels: str | Sequence[str] | None = None
-depends_on: str | Sequence[str] | None = None
-
-_ENUM_MEMBERS_BEFORE_REMOVAL = (
-    "HASHING_CHECK",
-    "KEYS_IMPORT_FILE",
-    "LICENSE_FILES",
-    "LICENSE_HEADERS",
-    "MESSAGE_SEND",
-    "PATHS_CHECK",
-    "RAT_CHECK",
-    "SBOM_GENERATE_CYCLONEDX",
-    "SIGNATURE_CHECK",
-    "SVN_IMPORT_FILES",
-    "TARGZ_INTEGRITY",
-    "TARGZ_STRUCTURE",
-    "VOTE_INITIATE",
-    "ZIPFORMAT_INTEGRITY",
-    "ZIPFORMAT_LICENSE_FILES",
-    "ZIPFORMAT_LICENSE_HEADERS",
-    "ZIPFORMAT_STRUCTURE",
-)
-
-_ENUM_MEMBERS_AFTER_REMOVAL = tuple(
-    m for m in _ENUM_MEMBERS_BEFORE_REMOVAL if m not in 
{"ZIPFORMAT_LICENSE_FILES", "ZIPFORMAT_LICENSE_HEADERS"}
-)
-
-
-def upgrade() -> None:
-    with op.batch_alter_table("task", schema=None) as batch_op:
-        batch_op.alter_column(
-            "task_type",
-            existing_type=sa.Enum(*_ENUM_MEMBERS_BEFORE_REMOVAL, 
name="tasktype"),
-            type_=sa.Enum(*_ENUM_MEMBERS_AFTER_REMOVAL, name="tasktype"),
-            existing_nullable=False,
-        )
-
-
-def downgrade() -> None:
-    with op.batch_alter_table("task", schema=None) as batch_op:
-        batch_op.alter_column(
-            "task_type",
-            existing_type=sa.Enum(*_ENUM_MEMBERS_AFTER_REMOVAL, 
name="tasktype"),
-            type_=sa.Enum(*_ENUM_MEMBERS_BEFORE_REMOVAL, name="tasktype"),
-            existing_nullable=False,
-        )
diff --git a/migrations/versions/0005_2025.05.13_d94f16f6.py 
b/migrations/versions/0005_2025.05.13_d94f16f6.py
deleted file mode 100644
index aef4857..0000000
--- a/migrations/versions/0005_2025.05.13_d94f16f6.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""Delete tasks with obsolete ZIPFORMAT types
-
-Revision ID: 0005_2025.05.13_d94f16f6
-Revises: 0004_2025.05.13_657bf05b
-Create Date: 2025-05-13 15:42:21.333191+00:00
-"""
-
-from collections.abc import Sequence
-
-import sqlalchemy as sa
-from alembic import op
-
-# Revision identifiers, used by Alembic
-revision: str = "0005_2025.05.13_d94f16f6"
-down_revision: str | None = "0004_2025.05.13_657bf05b"
-branch_labels: str | Sequence[str] | None = None
-depends_on: str | Sequence[str] | None = None
-
-
-def upgrade() -> None:
-    """Delete tasks with obsolete ZIPFORMAT_LICENSE_FILES and 
ZIPFORMAT_LICENSE_HEADERS types."""
-    bind = op.get_bind()
-    result = bind.execute(
-        sa.text(
-            "SELECT 1 FROM task WHERE task_type IN ('ZIPFORMAT_LICENSE_FILES', 
'ZIPFORMAT_LICENSE_HEADERS') LIMIT 1"
-        )
-    ).scalar_one_or_none()
-
-    if result is not None:
-        op.execute("DELETE FROM task WHERE task_type IN 
('ZIPFORMAT_LICENSE_FILES', 'ZIPFORMAT_LICENSE_HEADERS')")
-
-
-def downgrade() -> None:
-    """
-    Downgrade for this migration.
-
-    Since the upgrade deletes data that corresponds to obsolete enum types,
-    a downgrade doesn't have a straightforward way to restore these tasks
-    without knowing their original context or reintroducing obsolete types.
-    Therefore, this downgrade is a noop.
-    """
-    pass
diff --git a/pyproject.toml b/pyproject.toml
index cda44f9..8117ff3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -123,6 +123,7 @@ select = [
 [tool.mypy]
 python_version = "3.13"
 exclude = ["tests"]
+plugins = ["pydantic.mypy"]
 mypy_path = "typestubs"
 check_untyped_defs = false
 disallow_incomplete_defs = true
@@ -136,6 +137,11 @@ warn_return_any = true
 # warn_unreachable = true
 warn_unused_ignores = true
 
+[tool.pydantic-mypy]
+init_forbid_extra = true
+init_typed = true
+warn_required_dynamic_aliases = true
+
 [[tool.mypy.overrides]]
 module = "asfquart.*"
 ignore_errors = true


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to