This is an automated email from the ASF dual-hosted git repository. akitouni pushed a commit to branch abderrahim/minimal-cache-query in repository https://gitbox.apache.org/repos/asf/buildstream.git
commit 9e4b088a9c911fbb38292534b2ed1ff39185687d Author: Abderrahim Kitouni <[email protected]> AuthorDate: Thu Oct 31 21:40:55 2024 +0100 _stream: Only query cache for the requested elements query_cache() used to query the cache for the elements passed and all their (build and runtime) dependencies. This is wasteful since most of the time, we don't need all this cache query. And for the cases we need, it is the job of the query_cache() caller to include the dependencies they need --- src/buildstream/_stream.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/buildstream/_stream.py b/src/buildstream/_stream.py index 2266b27f0..ac85db1dd 100644 --- a/src/buildstream/_stream.py +++ b/src/buildstream/_stream.py @@ -195,8 +195,6 @@ class Stream: assert not sources_of_cached_elements or not only_sources with self._context.messenger.simple_task("Query cache", silent_nested=True) as task: - # Enqueue complete build plan as this is required to determine `buildable` status. - plan = list(_pipeline.dependencies(elements, _Scope.ALL)) if self._context.remote_cache_spec: # Parallelize cache queries if a remote cache is configured @@ -207,11 +205,11 @@ class Stream: ), track=True, ) - self._enqueue_plan(plan) + self._enqueue_plan(elements) self._run() else: - task.set_maximum_progress(len(plan)) - for element in plan: + task.set_maximum_progress(len(elements)) + for element in elements: if element._can_query_cache(): # Cache status already available. # This is the case for artifact elements, which load the
