This is an automated email from the ASF dual-hosted git repository.
dpgaspar pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/superset.git
The following commit(s) were added to refs/heads/master by this push:
new 28788fd1fa fix: centralize cache timeout -1 logic to prevent caching
(#34654)
28788fd1fa is described below
commit 28788fd1fa9b434fb980bc8ee64d667086fc404b
Author: Daniel Vaz Gaspar <[email protected]>
AuthorDate: Mon Aug 18 08:45:20 2025 +0100
fix: centralize cache timeout -1 logic to prevent caching (#34654)
---
superset/common/query_context_processor.py | 4 ++--
superset/constants.py | 4 ++++
superset/utils/cache.py | 10 +++++++++-
superset/viz.py | 3 ++-
tests/integration_tests/charts/data/api_tests.py | 3 ++-
5 files changed, 19 insertions(+), 5 deletions(-)
diff --git a/superset/common/query_context_processor.py
b/superset/common/query_context_processor.py
index a0228cedee..d806a4be9f 100644
--- a/superset/common/query_context_processor.py
+++ b/superset/common/query_context_processor.py
@@ -38,7 +38,7 @@ from superset.common.utils.time_range_utils import (
get_since_until_from_time_range,
)
from superset.connectors.sqla.models import BaseDatasource
-from superset.constants import CacheRegion, TimeGrain
+from superset.constants import CACHE_DISABLED_TIMEOUT, CacheRegion, TimeGrain
from superset.daos.annotation_layer import AnnotationLayerDAO
from superset.daos.chart import ChartDAO
from superset.exceptions import (
@@ -131,7 +131,7 @@ class QueryContextProcessor:
"""Handles caching around the df payload retrieval"""
cache_key = self.query_cache_key(query_obj)
timeout = self.get_cache_timeout()
- force_query = self._query_context.force or timeout == -1
+ force_query = self._query_context.force or timeout ==
CACHE_DISABLED_TIMEOUT
cache = QueryCacheManager.get(
key=cache_key,
region=CacheRegion.DATA,
diff --git a/superset/constants.py b/superset/constants.py
index f60b79a961..d285fb1f90 100644
--- a/superset/constants.py
+++ b/superset/constants.py
@@ -243,3 +243,7 @@ class CacheRegion(StrEnum):
DEFAULT = "default"
DATA = "data"
THUMBNAIL = "thumbnail"
+
+
+# Cache timeout constants
+CACHE_DISABLED_TIMEOUT = -1 # Special value indicating no caching should occur
diff --git a/superset/utils/cache.py b/superset/utils/cache.py
index f217b62a63..881b4abccb 100644
--- a/superset/utils/cache.py
+++ b/superset/utils/cache.py
@@ -28,6 +28,7 @@ from flask_caching.backends import NullCache
from werkzeug.wrappers import Response
from superset import db
+from superset.constants import CACHE_DISABLED_TIMEOUT
from superset.extensions import cache_manager
from superset.models.cache import CacheKey
from superset.utils.hashing import md5_sha_from_dict
@@ -56,6 +57,10 @@ def set_and_log_cache(
if cache_timeout is not None
else app.config["CACHE_DEFAULT_TIMEOUT"]
)
+
+ # Skip caching if timeout is CACHE_DISABLED_TIMEOUT (no caching requested)
+ if timeout == CACHE_DISABLED_TIMEOUT:
+ return
try:
dttm = datetime.utcnow().isoformat().split(".")[0]
value = {**cache_value, "dttm": dttm}
@@ -134,7 +139,10 @@ def memoized_func(key: str, cache: Cache =
cache_manager.cache) -> Callable[...,
if not force and obj is not None:
return obj
obj = f(*args, **kwargs)
- cache.set(cache_key, obj, timeout=cache_timeout)
+
+ # Skip caching if timeout is CACHE_DISABLED_TIMEOUT (no caching
requested)
+ if cache_timeout != CACHE_DISABLED_TIMEOUT:
+ cache.set(cache_key, obj, timeout=cache_timeout)
return obj
return wrapped_f
diff --git a/superset/viz.py b/superset/viz.py
index 25915a7e64..b821033c24 100644
--- a/superset/viz.py
+++ b/superset/viz.py
@@ -44,6 +44,7 @@ from geopy.point import Point
from pandas.tseries.frequencies import to_offset
from superset.common.db_query_status import QueryStatus
+from superset.constants import CACHE_DISABLED_TIMEOUT
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import (
CacheLoadError,
@@ -527,7 +528,7 @@ class BaseViz: # pylint: disable=too-many-public-methods
stacktrace = None
df = None
cache_timeout = self.cache_timeout
- force = self.force or cache_timeout == -1
+ force = self.force or cache_timeout == CACHE_DISABLED_TIMEOUT
if cache_key and cache_manager.data_cache and not force:
cache_value = cache_manager.data_cache.get(cache_key)
if cache_value:
diff --git a/tests/integration_tests/charts/data/api_tests.py
b/tests/integration_tests/charts/data/api_tests.py
index bccd093c6e..7e76a93cec 100644
--- a/tests/integration_tests/charts/data/api_tests.py
+++ b/tests/integration_tests/charts/data/api_tests.py
@@ -33,6 +33,7 @@ from superset.charts.data.api import ChartDataRestApi
from superset.commands.chart.data.get_data_command import ChartDataCommand
from superset.common.chart_data import ChartDataResultFormat,
ChartDataResultType
from superset.connectors.sqla.models import SqlaTable, TableColumn
+from superset.constants import CACHE_DISABLED_TIMEOUT
from superset.errors import SupersetErrorType
from superset.extensions import async_query_manager_factory, db
from superset.models.annotations import AnnotationLayer
@@ -1488,7 +1489,7 @@ def test_time_filter_with_grain(test_client,
login_as_admin, physical_query_cont
def test_force_cache_timeout(test_client, login_as_admin,
physical_query_context):
- physical_query_context["custom_cache_timeout"] = -1
+ physical_query_context["custom_cache_timeout"] = CACHE_DISABLED_TIMEOUT
test_client.post(CHART_DATA_URI, json=physical_query_context)
rv = test_client.post(CHART_DATA_URI, json=physical_query_context)
assert rv.json["result"][0]["cached_dttm"] is None