This is an automated email from the ASF dual-hosted git repository.

arm pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-releases.git

commit 21042a35c5abab5c8344bd68e1c33bdf19ede92d
Author: Alastair McFarlane <[email protected]>
AuthorDate: Fri Feb 20 12:19:53 2026 +0000

    Remove cache ignore logic since we can't have an empty cache key now. Add 
policy dependencies to license and RAT checks. Enable local/global caching 
switch. Fix bug with task list.
---
 atr/admin/__init__.py                           |  5 ++-
 atr/attestable.py                               | 12 ++---
 atr/models/attestable.py                        |  5 +++
 atr/models/sql.py                               |  3 +-
 atr/post/draft.py                               | 56 ++++++++++++-----------
 atr/shared/web.py                               | 20 +++++----
 atr/storage/writers/revision.py                 |  9 +++-
 atr/tasks/checks/__init__.py                    | 60 +++----------------------
 atr/tasks/checks/compare.py                     |  2 -
 atr/tasks/checks/hashing.py                     |  2 -
 atr/tasks/checks/license.py                     |  5 +--
 atr/tasks/checks/paths.py                       | 14 ++----
 atr/tasks/checks/rat.py                         |  4 +-
 atr/tasks/checks/signature.py                   |  2 -
 atr/tasks/checks/targz.py                       |  4 --
 atr/tasks/checks/zipformat.py                   |  4 --
 atr/templates/check-selected.html               | 23 +++++++---
 atr/worker.py                                   |  1 +
 migrations/versions/0052_2026.02.20_96e1972f.py | 33 ++++++++++++++
 tests/unit/recorders.py                         |  6 +--
 tests/unit/test_checks_compare.py               |  6 +--
 tests/unit/test_create_revision.py              |  2 -
 22 files changed, 132 insertions(+), 146 deletions(-)

diff --git a/atr/admin/__init__.py b/atr/admin/__init__.py
index 55c4cc78..21f95172 100644
--- a/atr/admin/__init__.py
+++ b/atr/admin/__init__.py
@@ -786,7 +786,10 @@ async def tasks_recent(session: web.Committer, minutes: 
int) -> str:
     async with db.session() as data:
         statement = (
             sqlmodel.select(sql.Task)
-            .where(via(sql.Task.added) >= cutoff, 
sqlalchemy.not_(via(sql.Task.scheduled) > now))
+            .where(
+                via(sql.Task.added) >= cutoff,
+                sqlalchemy.or_(sqlalchemy.not_(via(sql.Task.scheduled) > now), 
via(sql.Task.scheduled).is_(None)),
+            )
             .order_by(via(sql.Task.added).desc())
         )
         recent_tasks = (await data.execute(statement)).scalars().all()
diff --git a/atr/attestable.py b/atr/attestable.py
index 08404503..5bedab50 100644
--- a/atr/attestable.py
+++ b/atr/attestable.py
@@ -28,7 +28,6 @@ import atr.hashes as hashes
 import atr.log as log
 import atr.models.attestable as models
 import atr.util as util
-from atr.models.attestable import AttestableChecksV1
 
 if TYPE_CHECKING:
     import pathlib
@@ -104,7 +103,10 @@ async def load_checks(
         try:
             async with aiofiles.open(file_path, encoding="utf-8") as f:
                 data = json.loads(await f.read())
-            return models.AttestableChecksV1.model_validate(data).checks
+                if data.get("version") == 1:
+                    log.warning(f"Found old checks file format in {file_path}, 
ignoring old checks")
+                    return {}
+            return models.AttestableChecksV2.model_validate(data).checks
         except (json.JSONDecodeError, pydantic.ValidationError) as e:
             log.warning(f"Could not parse {file_path}: {e}")
     return {}
@@ -175,7 +177,7 @@ async def write_files_data(
     checks_file_path = attestable_checks_path(project_name, version_name, 
revision_number)
     if not checks_file_path.exists():
         async with aiofiles.open(checks_file_path, "w", encoding="utf-8") as f:
-            await 
f.write(models.AttestableChecksV1().model_dump_json(indent=2))
+            await 
f.write(models.AttestableChecksV2().model_dump_json(indent=2))
 
 
 async def write_checks_data(
@@ -189,14 +191,14 @@ async def write_checks_data(
 
     def modify(content: str) -> str:
         try:
-            current = AttestableChecksV1.model_validate_json(content).checks
+            current = 
models.AttestableChecksV2.model_validate_json(content).checks
         except pydantic.ValidationError:
             current = {}
         if rel_path not in current:
             current[rel_path] = checks
         else:
             current[rel_path].update(checks)
-        result = models.AttestableChecksV1(checks=current)
+        result = models.AttestableChecksV2(checks=current)
         return result.model_dump_json(indent=2)
 
     await util.atomic_modify_file(attestable_checks_path(project_name, 
version_name, revision_number), modify)
diff --git a/atr/models/attestable.py b/atr/models/attestable.py
index 4e000984..548a77ea 100644
--- a/atr/models/attestable.py
+++ b/atr/models/attestable.py
@@ -29,6 +29,11 @@ class HashEntry(schema.Strict):
 
 class AttestableChecksV1(schema.Strict):
     version: Literal[1] = 1
+    checks: list[int] = schema.factory(list)
+
+
+class AttestableChecksV2(schema.Strict):
+    version: Literal[2] = 2
     checks: dict[str, dict[str, str]] = schema.factory(dict)
 
 
diff --git a/atr/models/sql.py b/atr/models/sql.py
index 9d2c9099..a0937bf3 100644
--- a/atr/models/sql.py
+++ b/atr/models/sql.py
@@ -803,6 +803,8 @@ class Release(sqlmodel.SQLModel, table=True):
         **example(datetime.datetime(2025, 6, 1, 1, 2, 3, tzinfo=datetime.UTC)),
     )
 
+    check_cache_key: str | None = sqlmodel.Field(default=None, 
**example("ef0ccb0a-3514-4b65-abcd-879850349f74"))
+
     # M-1: Release -> Project
     # 1-M: Project -> [Release]
     project_name: str = sqlmodel.Field(foreign_key="project.name", 
**example("example"))
@@ -1200,7 +1202,6 @@ class Revision(sqlmodel.SQLModel, table=True):
 
     description: str | None = sqlmodel.Field(default=None, **example("This is 
a description"))
     tag: str | None = sqlmodel.Field(default=None, **example("rc1"))
-    use_check_cache: bool = sqlmodel.Field(default=True, **example(True))
 
     def model_post_init(self, _context):
         if isinstance(self.created, str):
diff --git a/atr/post/draft.py b/atr/post/draft.py
index d68826ca..9b57b708 100644
--- a/atr/post/draft.py
+++ b/atr/post/draft.py
@@ -89,29 +89,6 @@ async def delete_file(
     )
 
 
[email protected]("/draft/fresh/<project_name>/<version_name>")
[email protected]()
-async def fresh(session: web.Committer, project_name: str, version_name: str) 
-> web.WerkzeugResponse:
-    """Restart all checks for a whole release candidate draft."""
-    # Admin only button, but it's okay if users find and use this manually
-    await session.check_access(project_name)
-
-    # Restart checks by creating a new identical draft revision
-    # This doesn't make sense unless the checks themselves have been updated
-    # Therefore we only show the button for this to admins
-    description = "Empty revision to restart all checks for the whole release 
candidate draft"
-    async with storage.write(session) as write:
-        wacp = await write.as_project_committee_participant(project_name)
-        await wacp.revision.create_revision(project_name, version_name, 
session.uid, description=description)
-
-    return await session.redirect(
-        get.compose.selected,
-        project_name=project_name,
-        version_name=version_name,
-        success="All checks restarted",
-    )
-
-
 
@post.committer("/draft/hashgen/<project_name>/<version_name>/<path:file_path>")
 @post.empty()
 async def hashgen(session: web.Committer, project_name: str, version_name: 
str, file_path: str) -> web.WerkzeugResponse:
@@ -144,7 +121,34 @@ async def hashgen(session: web.Committer, project_name: 
str, version_name: str,
 @post.committer("/draft/recheck/<project_name>/<version_name>")
 @post.empty()
 async def recheck(session: web.Committer, project_name: str, version_name: 
str) -> web.WerkzeugResponse:
-    """Start a new draft revision to rerun all checks without using caches."""
+    """Start a new draft revision and switch this release to release-local 
caching"""
+    await session.check_access(project_name)
+    if not session.is_admin:
+        raise base.ASFQuartException("Admin access required", errorcode=403)
+
+    description = "Empty revision to restart all checks without cache for the 
whole release candidate draft"
+    async with storage.write(session) as write:
+        wacp = await write.as_project_committee_participant(project_name)
+        await wacp.revision.create_revision(
+            project_name,
+            version_name,
+            session.uid,
+            description=description,
+            set_local_cache=True,
+        )
+
+    return await session.redirect(
+        get.compose.selected,
+        project_name=project_name,
+        version_name=version_name,
+        success="All checks restarted with release-local cache",
+    )
+
+
[email protected]("/draft/reset/<project_name>/<version_name>")
[email protected]()
+async def cache_reset(session: web.Committer, project_name: str, version_name: 
str) -> web.WerkzeugResponse:
+    """Start a new draft revision and switch this release to global caching"""
     await session.check_access(project_name)
     if not session.is_admin:
         raise base.ASFQuartException("Admin access required", errorcode=403)
@@ -157,14 +161,14 @@ async def recheck(session: web.Committer, project_name: 
str, version_name: str)
             version_name,
             session.uid,
             description=description,
-            use_check_cache=False,
+            reset_to_global_cache=True,
         )
 
     return await session.redirect(
         get.compose.selected,
         project_name=project_name,
         version_name=version_name,
-        success="All checks restarted without cache",
+        success="Release set back to global caching",
     )
 
 
diff --git a/atr/shared/web.py b/atr/shared/web.py
index 392f7728..03ab044f 100644
--- a/atr/shared/web.py
+++ b/atr/shared/web.py
@@ -112,17 +112,18 @@ async def check(
             ),
         )
 
-    fresh_form = form.render(
+    recheck_form = form.render(
         model_cls=form.Empty,
-        action=util.as_url(post.draft.fresh, 
project_name=release.project.name, version_name=release.version),
-        submit_label="Restart all checks",
+        action=util.as_url(post.draft.recheck, 
project_name=release.project.name, version_name=release.version),
+        submit_label="Disable global cache",
         submit_classes="btn btn-primary",
+        # confirm="Restart all checks without using cached results? This 
creates a new revision.",
     )
-    recheck_form = form.render(
+    cache_reset_form = form.render(
         model_cls=form.Empty,
-        action=util.as_url(post.draft.recheck, 
project_name=release.project.name, version_name=release.version),
-        submit_label="Recheck all without cache",
-        submit_classes="btn btn-outline-secondary",
+        action=util.as_url(post.draft.cache_reset, 
project_name=release.project.name, version_name=release.version),
+        submit_label="Enable global cache",
+        submit_classes="btn btn-primary",
         # confirm="Restart all checks without using cached results? This 
creates a new revision.",
     )
 
@@ -136,6 +137,8 @@ async def check(
     if revision_number is not None:
         blocker_errors = await interaction.has_blocker_checks(release, 
revision_number)
 
+    is_local_caching = release.check_cache_key is not None
+
     checks_summary_html = _render_checks_summary(info, release.project.name, 
release.version)
 
     return await template.render(
@@ -162,8 +165,9 @@ async def check(
         vote_task=vote_task,
         archive_url=archive_url,
         vote_task_warnings=vote_task_warnings,
-        fresh_form=fresh_form,
         recheck_form=recheck_form,
+        cache_reset_form=cache_reset_form,
+        is_local_caching=is_local_caching,
         csrf_input=str(form.csrf_input()),
         resolve_form=resolve_form,
         has_files=has_files,
diff --git a/atr/storage/writers/revision.py b/atr/storage/writers/revision.py
index 20be7bcd..16d1f00b 100644
--- a/atr/storage/writers/revision.py
+++ b/atr/storage/writers/revision.py
@@ -24,6 +24,7 @@ import datetime
 import pathlib
 import secrets
 import tempfile
+import uuid
 from typing import TYPE_CHECKING
 
 import aiofiles.os
@@ -111,7 +112,8 @@ class CommitteeParticipant(FoundationCommitter):
         version_name: str,
         asf_uid: str,
         description: str | None = None,
-        use_check_cache: bool = True,
+        set_local_cache: bool = False,
+        reset_to_global_cache: bool = False,
         modify: Callable[[pathlib.Path, sql.Revision | None], Awaitable[None]] 
| None = None,
         clone_from: str | None = None,
     ) -> sql.Revision:
@@ -128,6 +130,10 @@ class CommitteeParticipant(FoundationCommitter):
                 )
             else:
                 old_revision = await interaction.latest_revision(release)
+            if set_local_cache:
+                release.check_cache_key = str(uuid.uuid4())
+            if reset_to_global_cache:
+                release.check_cache_key = None
 
         if clone_from is not None:
             old_release_dir = util.release_directory_base(release) / clone_from
@@ -206,7 +212,6 @@ class CommitteeParticipant(FoundationCommitter):
                     created=datetime.datetime.now(datetime.UTC),
                     phase=release.phase,
                     description=description,
-                    use_check_cache=use_check_cache,
                 )
 
                 # Acquire the write lock and add the row
diff --git a/atr/tasks/checks/__init__.py b/atr/tasks/checks/__init__.py
index ffc8251e..45573245 100644
--- a/atr/tasks/checks/__init__.py
+++ b/atr/tasks/checks/__init__.py
@@ -35,7 +35,6 @@ if TYPE_CHECKING:
     import atr.models.schema as schema
 
 import atr.attestable as attestable
-import atr.config as config
 import atr.db as db
 import atr.file_paths as file_paths
 import atr.hashes as hashes
@@ -69,11 +68,11 @@ class Recorder:
     afresh: bool
     __cached: bool
     __input_hash: str | None
-    __use_check_cache: bool | None
 
     def __init__(
         self,
         checker: str | Callable[..., Any],
+        inputs_hash: str | None,
         project_name: str,
         version_name: str,
         revision_number: str,
@@ -90,8 +89,7 @@ class Recorder:
         self.constructed = False
         self.member_problems: dict[sql.CheckResultStatus, int] = {}
         self.__cached = False
-        self.__input_hash = None
-        self.__use_check_cache = None
+        self.__input_hash = inputs_hash
 
         self.project_name = project_name
         self.version_name = version_name
@@ -100,6 +98,7 @@ class Recorder:
     async def create(
         cls,
         checker: str | Callable[..., Any],
+        inputs_hash: str,
         project_name: str,
         version_name: str,
         revision_number: str,
@@ -109,6 +108,7 @@ class Recorder:
     ) -> Recorder:
         recorder = cls(
             checker,
+            inputs_hash,
             project_name,
             version_name,
             revision_number,
@@ -208,42 +208,6 @@ class Recorder:
         abs_path = await self.abs_path()
         return matches(str(abs_path))
 
-    async def cache_key_set(
-        self,
-        policy_keys: list[str],
-        version,
-        input_args: list[str] | None = None,
-        checker: str | None = None,
-    ) -> bool:
-        # TODO: Should this just be in the constructor?
-
-        if config.get().DISABLE_CHECK_CACHE:
-            return False
-
-        if not await self.use_check_cache():
-            return False
-
-        no_cache_file = self.abs_path_base() / ".atr-no-cache"
-        if await aiofiles.os.path.exists(no_cache_file):
-            return False
-
-        async with db.session() as data:
-            release = await data.release(
-                name=self.release_name, _release_policy=True, 
_project_release_policy=True, _project=True
-            ).demand(RuntimeError(f"Release {self.release_name} not found"))
-            args = await resolve_extra_args(input_args or [], release, 
self.primary_rel_path)
-            cache_key = await resolve_cache_key(
-                checker or self.checker,
-                version,
-                policy_keys,
-                release,
-                self.revision_number,
-                args,
-                file=self.primary_rel_path,
-            )
-            self.__input_hash = hashes.compute_dict_hash(cache_key) if 
cache_key else None
-        return True
-
     @property
     def cached(self) -> bool:
         return self.__cached
@@ -328,18 +292,6 @@ class Recorder:
         )
         return result
 
-    async def use_check_cache(self) -> bool:
-        if self.__use_check_cache is not None:
-            return self.__use_check_cache
-
-        async with db.session() as data:
-            revision = await data.revision(release_name=self.release_name, 
number=self.revision_number).get()
-        if revision is None:
-            self.__use_check_cache = True
-            return True
-        self.__use_check_cache = revision.use_check_cache
-        return self.__use_check_cache
-
     async def warning(
         self,
         message: str,
@@ -374,7 +326,7 @@ async def resolve_cache_key(
 ) -> dict[str, Any] | None:
     if not args:
         args = {}
-    cache_key = {"checker": function_key(checker)}
+    cache_key = {"checker": function_key(checker), "version": checker_version}
     file_hash = None
     attestable_data = await attestable.load(release.project_name, 
release.version, revision)
     if attestable_data:
@@ -390,6 +342,8 @@ async def resolve_cache_key(
             file_hash = await hashes.compute_file_hash(path)
     if file_hash:
         cache_key["file_hash"] = file_hash
+    if release.check_cache_key:
+        cache_key["release_cache_key"] = release.check_cache_key
 
     if (len(policy_keys) > 0) and (policy is not None):
         policy_dict = policy.model_dump(exclude_none=True)
diff --git a/atr/tasks/checks/compare.py b/atr/tasks/checks/compare.py
index 914ebbf2..72905a2d 100644
--- a/atr/tasks/checks/compare.py
+++ b/atr/tasks/checks/compare.py
@@ -94,8 +94,6 @@ async def source_trees(args: checks.FunctionArguments) -> 
results.Results | None
         )
         return None
 
-    await recorder.cache_key_set(INPUT_POLICY_KEYS, CHECK_VERSION, 
INPUT_EXTRA_ARGS)
-
     payload = await _load_tp_payload(args.project_name, args.version_name, 
args.revision_number)
     checkout_dir: str | None = None
     archive_dir: str | None = None
diff --git a/atr/tasks/checks/hashing.py b/atr/tasks/checks/hashing.py
index 60ef085f..dd11a427 100644
--- a/atr/tasks/checks/hashing.py
+++ b/atr/tasks/checks/hashing.py
@@ -42,8 +42,6 @@ async def check(args: checks.FunctionArguments) -> 
results.Results | None:
         await recorder.failure("Unsupported hash algorithm", {"algorithm": 
algorithm})
         return None
 
-    await recorder.cache_key_set(INPUT_POLICY_KEYS, CHECK_VERSION, 
INPUT_EXTRA_ARGS)
-
     # Remove the hash file suffix to get the artifact path
     # This replaces the last suffix, which is what we want
     # >>> pathlib.Path("a/b/c.d.e.f.g").with_suffix(".x")
diff --git a/atr/tasks/checks/license.py b/atr/tasks/checks/license.py
index fecb3fa9..449445ac 100644
--- a/atr/tasks/checks/license.py
+++ b/atr/tasks/checks/license.py
@@ -80,7 +80,7 @@ INCLUDED_PATTERNS: Final[list[str]] = [
 ]
 
 # Release policy fields which this check relies on - used for result caching
-INPUT_POLICY_KEYS: Final[list[str]] = [""]
+INPUT_POLICY_KEYS: Final[list[str]] = ["license_check_mode", 
"source_excludes_lightweight"]
 INPUT_EXTRA_ARGS: Final[list[str]] = ["is_podling"]
 CHECK_VERSION: Final[str] = "1"
 
@@ -140,7 +140,6 @@ async def files(args: checks.FunctionArguments) -> 
results.Results | None:
             return None
 
     is_podling = args.extra_args.get("is_podling", False)
-    await recorder.cache_key_set(INPUT_POLICY_KEYS, CHECK_VERSION, 
INPUT_EXTRA_ARGS)
 
     log.info(f"Checking license files for {artifact_abs_path} (rel: 
{args.primary_rel_path})")
 
@@ -173,8 +172,6 @@ async def headers(args: checks.FunctionArguments) -> 
results.Results | None:
         if project.policy_license_check_mode == sql.LicenseCheckMode.RAT:
             return None
 
-    await recorder.cache_key_set(INPUT_POLICY_KEYS, CHECK_VERSION, 
INPUT_EXTRA_ARGS)
-
     # if await recorder.check_cache(artifact_abs_path):
     #     log.info(f"Using cached license headers result for 
{artifact_abs_path} (rel: {args.primary_rel_path})")
     #     return None
diff --git a/atr/tasks/checks/paths.py b/atr/tasks/checks/paths.py
index c28900a8..62678b49 100644
--- a/atr/tasks/checks/paths.py
+++ b/atr/tasks/checks/paths.py
@@ -52,9 +52,11 @@ async def check(args: checks.FunctionArguments) -> 
results.Results | None:
     # https://infra.apache.org/release-distribution.html
     # - Incubation Policy (IP)
     # https://incubator.apache.org/policy/incubation.html
+    base_recorder = await args.recorder()
 
     recorder_errors = await checks.Recorder.create(
         checker=checks.function_key(check) + "_errors",
+        inputs_hash=base_recorder.input_hash or "",
         project_name=args.project_name,
         version_name=args.version_name,
         revision_number=args.revision_number,
@@ -63,6 +65,7 @@ async def check(args: checks.FunctionArguments) -> 
results.Results | None:
     )
     recorder_warnings = await checks.Recorder.create(
         checker=checks.function_key(check) + "_warnings",
+        inputs_hash=base_recorder.input_hash or "",
         project_name=args.project_name,
         version_name=args.version_name,
         revision_number=args.revision_number,
@@ -71,6 +74,7 @@ async def check(args: checks.FunctionArguments) -> 
results.Results | None:
     )
     recorder_success = await checks.Recorder.create(
         checker=checks.function_key(check) + "_success",
+        inputs_hash=base_recorder.input_hash or "",
         project_name=args.project_name,
         version_name=args.version_name,
         revision_number=args.revision_number,
@@ -90,16 +94,6 @@ async def check(args: checks.FunctionArguments) -> 
results.Results | None:
     relative_paths = [p async for p in util.paths_recursive(base_path)]
     relative_paths_set = set(str(p) for p in relative_paths)
 
-    await recorder_errors.cache_key_set(
-        INPUT_POLICY_KEYS, CHECK_VERSION, INPUT_EXTRA_ARGS, 
checker=checks.function_key(check)
-    )
-    await recorder_warnings.cache_key_set(
-        INPUT_POLICY_KEYS, CHECK_VERSION, INPUT_EXTRA_ARGS, 
checker=checks.function_key(check)
-    )
-    await recorder_success.cache_key_set(
-        INPUT_POLICY_KEYS, CHECK_VERSION, INPUT_EXTRA_ARGS, 
checker=checks.function_key(check)
-    )
-
     for relative_path in relative_paths:
         # Delegate processing of each path to the helper function
         await _check_path_process_single(
diff --git a/atr/tasks/checks/rat.py b/atr/tasks/checks/rat.py
index 486ad18c..d2e685b3 100644
--- a/atr/tasks/checks/rat.py
+++ b/atr/tasks/checks/rat.py
@@ -66,7 +66,7 @@ _STD_EXCLUSIONS_EXTENDED: Final[list[str]] = [
     "STANDARD_SCMS",
 ]
 # Release policy fields which this check relies on - used for result caching
-INPUT_POLICY_KEYS: Final[list[str]] = []
+INPUT_POLICY_KEYS: Final[list[str]] = ["license_check_mode", 
"source_excludes_rat"]
 INPUT_EXTRA_ARGS: Final[list[str]] = []
 CHECK_VERSION: Final[str] = "1"
 
@@ -89,8 +89,6 @@ async def check(args: checks.FunctionArguments) -> 
results.Results | None:
         log.info(f"Skipping RAT check for {artifact_abs_path} (mode is 
LIGHTWEIGHT)")
         return None
 
-    await recorder.cache_key_set(INPUT_POLICY_KEYS, CHECK_VERSION, 
INPUT_EXTRA_ARGS)
-
     log.info(f"Checking RAT licenses for {artifact_abs_path} (rel: 
{args.primary_rel_path})")
 
     is_source = await recorder.primary_path_is_source()
diff --git a/atr/tasks/checks/signature.py b/atr/tasks/checks/signature.py
index d6c88f2e..6c4bbd47 100644
--- a/atr/tasks/checks/signature.py
+++ b/atr/tasks/checks/signature.py
@@ -55,8 +55,6 @@ async def check(args: checks.FunctionArguments) -> 
results.Results | None:
         await recorder.exception("Committee name is required", 
{"committee_name": committee_name})
         return None
 
-    await recorder.cache_key_set(INPUT_POLICY_KEYS, CHECK_VERSION, 
INPUT_EXTRA_ARGS)
-
     log.info(
         f"Checking signature {primary_abs_path} for {artifact_abs_path}"
         f" using {committee_name} keys (rel: {primary_rel_path})"
diff --git a/atr/tasks/checks/targz.py b/atr/tasks/checks/targz.py
index 2415135d..19f753c8 100644
--- a/atr/tasks/checks/targz.py
+++ b/atr/tasks/checks/targz.py
@@ -43,8 +43,6 @@ async def integrity(args: checks.FunctionArguments) -> 
results.Results | None:
     if not (artifact_abs_path := await recorder.abs_path()):
         return None
 
-    await recorder.cache_key_set(INPUT_POLICY_KEYS, CHECK_VERSION, 
INPUT_EXTRA_ARGS)
-
     log.info(f"Checking integrity for {artifact_abs_path} (rel: 
{args.primary_rel_path})")
 
     chunk_size = 4096
@@ -102,8 +100,6 @@ async def structure(args: checks.FunctionArguments) -> 
results.Results | None:
     if await recorder.primary_path_is_binary():
         return None
 
-    await recorder.cache_key_set(INPUT_POLICY_KEYS, CHECK_VERSION, 
INPUT_EXTRA_ARGS)
-
     filename = artifact_abs_path.name
     basename_from_filename: Final[str] = (
         filename.removesuffix(".tar.gz") if filename.endswith(".tar.gz") else 
filename.removesuffix(".tgz")
diff --git a/atr/tasks/checks/zipformat.py b/atr/tasks/checks/zipformat.py
index 516f662f..74c13dee 100644
--- a/atr/tasks/checks/zipformat.py
+++ b/atr/tasks/checks/zipformat.py
@@ -38,8 +38,6 @@ async def integrity(args: checks.FunctionArguments) -> 
results.Results | None:
     if not (artifact_abs_path := await recorder.abs_path()):
         return None
 
-    await recorder.cache_key_set(INPUT_POLICY_KEYS, CHECK_VERSION, 
INPUT_EXTRA_ARGS)
-
     log.info(f"Checking zip integrity for {artifact_abs_path} (rel: 
{args.primary_rel_path})")
 
     try:
@@ -64,8 +62,6 @@ async def structure(args: checks.FunctionArguments) -> 
results.Results | None:
     if await recorder.primary_path_is_binary():
         return None
 
-    await recorder.cache_key_set(INPUT_POLICY_KEYS, CHECK_VERSION, 
INPUT_EXTRA_ARGS)
-
     log.info(f"Checking zip structure for {artifact_abs_path} (rel: 
{args.primary_rel_path})")
 
     try:
diff --git a/atr/templates/check-selected.html 
b/atr/templates/check-selected.html
index 3328b666..7e027018 100644
--- a/atr/templates/check-selected.html
+++ b/atr/templates/check-selected.html
@@ -189,14 +189,23 @@
       You can also <a href="{{ as_url(get.ignores.ignores, 
project_name=release.project_name) }}">manage which check results are 
ignored</a>.
     </p>
 
-    <h3 id="debugging" class="mt-4">Debugging</h3>
-    <div class="mb-2">
-      <p>The following form is for debugging purposes only. It will create a 
new revision.</p>
-    </div>
-    <div class="mb-2">{{ fresh_form|safe }}</div>
     {% if is_viewing_as_admin_fn(current_user.uid) %}
-      <div class="mb-2">{{ recheck_form|safe }}</div>
-      <p class="text-muted small mb-3">Rechecks without using cached 
results.</p>
+      <h3 id="cache-control" class="mt-4">Check cache control</h3>
+
+      {% if not is_local_caching %}
+        <div class="mb-2">
+          <p>Click the button below to opt this release out of the global 
check cache</p>
+          <p>This will create a new revision and restart all checks.</p>
+        </div>
+        <div class="mb-2">{{ recheck_form|safe }}</div>
+      {% else %}
+        <div class="mb-2">
+          <p>This release is using a local check cache.</p>
+          <p>Click the button below to reset back to using the default global 
cache</p>
+          <p>This will create a new revision and update check results from the 
cache or new checks as appropriate</p>
+        </div>
+        <div class="mb-2">{{ cache_reset_form |safe }}</div>
+      {% endif %}
     {% endif %}
 
     <h3 id="delete-draft" class="mt-4">Delete this draft</h3>
diff --git a/atr/worker.py b/atr/worker.py
index 8a23e699..ac19a940 100644
--- a/atr/worker.py
+++ b/atr/worker.py
@@ -121,6 +121,7 @@ async def _execute_check_task(
     async def recorder_factory() -> checks.Recorder:
         return await checks.Recorder.create(
             checker=handler,
+            inputs_hash=task_obj.inputs_hash or "",
             project_name=task_obj.project_name or "",
             version_name=task_obj.version_name or "",
             revision_number=task_obj.revision_number or "",
diff --git a/migrations/versions/0052_2026.02.20_96e1972f.py 
b/migrations/versions/0052_2026.02.20_96e1972f.py
new file mode 100644
index 00000000..721913b1
--- /dev/null
+++ b/migrations/versions/0052_2026.02.20_96e1972f.py
@@ -0,0 +1,33 @@
+"""Remove revision-based cache option and move to release-based cache busting
+
+Revision ID: 0052_2026.02.20_96e1972f
+Revises: 0051_2026.02.17_12ac0c6b
+Create Date: 2026-02-20 16:12:21.755711+00:00
+"""
+
+from collections.abc import Sequence
+
+import sqlalchemy as sa
+from alembic import op
+
+# Revision identifiers, used by Alembic
+revision: str = "0052_2026.02.20_96e1972f"
+down_revision: str | None = "0051_2026.02.17_12ac0c6b"
+branch_labels: str | Sequence[str] | None = None
+depends_on: str | Sequence[str] | None = None
+
+
+def upgrade() -> None:
+    with op.batch_alter_table("release", schema=None) as batch_op:
+        batch_op.add_column(sa.Column("check_cache_key", sa.String(), 
nullable=True))
+
+    with op.batch_alter_table("revision", schema=None) as batch_op:
+        batch_op.drop_column("use_check_cache")
+
+
+def downgrade() -> None:
+    with op.batch_alter_table("revision", schema=None) as batch_op:
+        batch_op.add_column(sa.Column("use_check_cache", sa.BOOLEAN(), 
server_default=sa.text("1"), nullable=False))
+
+    with op.batch_alter_table("release", schema=None) as batch_op:
+        batch_op.drop_column("check_cache_key")
diff --git a/tests/unit/recorders.py b/tests/unit/recorders.py
index 5d5ce29f..790cabd9 100644
--- a/tests/unit/recorders.py
+++ b/tests/unit/recorders.py
@@ -28,6 +28,7 @@ class RecorderStub(checks.Recorder):
     def __init__(self, path: pathlib.Path, checker: str) -> None:
         super().__init__(
             checker=checker,
+            inputs_hash=None,
             project_name="test",
             version_name="test",
             revision_number="00001",
@@ -41,11 +42,6 @@ class RecorderStub(checks.Recorder):
     async def abs_path(self, rel_path: str | None = None) -> pathlib.Path | 
None:
         return self._path if (rel_path is None) else self._path / rel_path
 
-    async def cache_key_set(
-        self, policy_keys: list[str], version: str, input_args: list[str] | 
None = None, checker: str | None = None
-    ) -> bool:
-        return False
-
     async def primary_path_is_binary(self) -> bool:
         return False
 
diff --git a/tests/unit/test_checks_compare.py 
b/tests/unit/test_checks_compare.py
index aff3a23a..db9a5f48 100644
--- a/tests/unit/test_checks_compare.py
+++ b/tests/unit/test_checks_compare.py
@@ -231,6 +231,7 @@ class RecorderStub(atr.tasks.checks.Recorder):
     def __init__(self, is_source: bool) -> None:
         super().__init__(
             checker="compare.source_trees",
+            inputs_hash=None,
             project_name="project",
             version_name="version",
             revision_number="00001",
@@ -242,11 +243,6 @@ class RecorderStub(atr.tasks.checks.Recorder):
         self.success_calls: list[tuple[str, object]] = []
         self._is_source = is_source
 
-    async def cache_key_set(
-        self, policy_keys: list[str], version: str, input_args: list[str] | 
None = None, checker: str | None = None
-    ) -> bool:
-        return False
-
     async def primary_path_is_source(self) -> bool:
         return self._is_source
 
diff --git a/tests/unit/test_create_revision.py 
b/tests/unit/test_create_revision.py
index 7d836f06..630919be 100644
--- a/tests/unit/test_create_revision.py
+++ b/tests/unit/test_create_revision.py
@@ -43,7 +43,6 @@ class FakeRevision:
         created: object,
         phase: sql.ReleasePhase,
         description: str | None,
-        use_check_cache: bool,
     ):
         self.asfuid = asfuid
         self.created = created
@@ -54,7 +53,6 @@ class FakeRevision:
         self.phase = phase
         self.release = release
         self.release_name = release_name
-        self.use_check_cache = use_check_cache
 
 
 class MockSafeData:


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to