This is an automated email from the ASF dual-hosted git repository.

ab pushed a commit to branch branch_9x
in repository https://gitbox.apache.org/repos/asf/solr.git


The following commit(s) were added to refs/heads/branch_9x by this push:
     new 276b6109766 SOLR-17831: Fix the bug in QueryLimits initialization in 
SolrIndexSearcher. (#3448)
276b6109766 is described below

commit 276b6109766ff83ee774eb5c282de811a4ca8772
Author: Andrzej Białecki <[email protected]>
AuthorDate: Fri Aug 1 16:14:04 2025 +0200

    SOLR-17831: Fix the bug in QueryLimits initialization in SolrIndexSearcher. 
(#3448)
    
    Cherry-pick from PR #3446.
---
 solr/CHANGES.txt                                   |   9 +
 .../java/org/apache/solr/search/QueryLimits.java   |   4 +-
 .../org/apache/solr/search/SolrIndexSearcher.java  |  32 ++-
 .../solr/core/ExitableDirectoryReaderTest.java     | 225 +++++----------------
 ...rectoryReaderTest.java => TimeAllowedTest.java} |   6 +-
 .../solr/search/CallerSpecificQueryLimit.java      | 109 ----------
 .../org/apache/solr/search/TestQueryLimits.java    |  16 +-
 .../solr/search/CallerSpecificQueryLimit.java      | 213 +++++++++++++++++++
 .../solr/search/CallerSpecificQueryLimitTest.java  | 151 ++++++++++++++
 9 files changed, 465 insertions(+), 300 deletions(-)

diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index f0f9b65f403..f65dab8dd9a 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -34,6 +34,15 @@ Other Changes
 ---------------------
 (No changes)
 
+==================  9.9.1 ==================
+Bug Fixes
+---------------------
+* SOLR-17831: ExitableDirectoryReader always initialized with QueryLimits.NONE 
(Andrzej Białecki)
+
+Dependency Upgrades
+---------------------
+(No changes)
+
 ==================  9.9.0 ==================
 New Features
 ---------------------
diff --git a/solr/core/src/java/org/apache/solr/search/QueryLimits.java 
b/solr/core/src/java/org/apache/solr/search/QueryLimits.java
index e6e0db5eed9..60d27f06128 100644
--- a/solr/core/src/java/org/apache/solr/search/QueryLimits.java
+++ b/solr/core/src/java/org/apache/solr/search/QueryLimits.java
@@ -37,12 +37,12 @@ import org.apache.solr.util.TestInjection;
  * or other resource limits. Exceeding any specified limit will cause {@link 
#shouldExit()} to
  * return true the next time it is checked (it may be checked in either Lucene 
code or Solr code)
  */
-public class QueryLimits implements QueryTimeout {
+public final class QueryLimits implements QueryTimeout {
   public static final String UNLIMITED = "This request is unlimited.";
   private final List<QueryLimit> limits =
       new ArrayList<>(3); // timeAllowed, cpu, and memory anticipated
 
-  public static QueryLimits NONE = new QueryLimits();
+  public static final QueryLimits NONE = new QueryLimits();
 
   private final SolrQueryResponse rsp;
   private final boolean allowPartialResults;
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java 
b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index f4d0304ec35..c82588fc147 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -51,6 +51,7 @@ import org.apache.lucene.index.LeafReader;
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.index.MultiPostingsEnum;
 import org.apache.lucene.index.PostingsEnum;
+import org.apache.lucene.index.QueryTimeout;
 import org.apache.lucene.index.StoredFieldVisitor;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.index.Terms;
@@ -137,16 +138,15 @@ public class SolrIndexSearcher extends IndexSearcher 
implements Closeable, SolrI
 
   public static final String STATS_SOURCE = "org.apache.solr.stats_source";
   public static final String STATISTICS_KEY = "searcher";
+
+  public static final String EXITABLE_READER_PROPERTY = 
"solr.useExitableDirectoryReader";
+
   // These should *only* be used for debugging or monitoring purposes
   public static final AtomicLong numOpens = new AtomicLong();
   public static final AtomicLong numCloses = new AtomicLong();
   private static final Map<String, SolrCache<?, ?>> NO_GENERIC_CACHES = 
Collections.emptyMap();
   private static final SolrCache<?, ?>[] NO_CACHES = new SolrCache<?, ?>[0];
 
-  // If you find this useful, let us know in [email protected].  Likely to 
be removed eventually.
-  private static final boolean useExitableDirectoryReader =
-      Boolean.getBoolean("solr.useExitableDirectoryReader");
-
   public static final int EXECUTOR_MAX_CPU_THREADS = 
Runtime.getRuntime().availableProcessors();
 
   private final SolrCore core;
@@ -207,14 +207,31 @@ public class SolrIndexSearcher extends IndexSearcher 
implements Closeable, SolrI
     }
   }
 
+  private static final class QueryLimitsTimeout implements QueryTimeout {
+    private static final QueryLimitsTimeout INSTANCE = new 
QueryLimitsTimeout();
+
+    private QueryLimitsTimeout() {}
+
+    @Override
+    public boolean shouldExit() {
+      return QueryLimits.getCurrentLimits().shouldExit();
+    }
+
+    @Override
+    public String toString() {
+      return QueryLimits.getCurrentLimits().limitStatusMessage();
+    }
+  }
+
   // TODO: wrap elsewhere and return a "map" from the schema that overrides 
get() ?
   // this reader supports reopen
   private static DirectoryReader wrapReader(SolrCore core, DirectoryReader 
reader)
       throws IOException {
     assert reader != null;
     reader = UninvertingReader.wrap(reader, 
core.getLatestSchema().getUninversionMapper());
-    if (useExitableDirectoryReader) { // SOLR-16693 legacy; may be removed.  
Probably inefficient.
-      reader = ExitableDirectoryReader.wrap(reader, 
QueryLimits.getCurrentLimits());
+    // see SOLR-16693 and SOLR-17831 for more details
+    if (EnvUtils.getPropertyAsBool(EXITABLE_READER_PROPERTY, Boolean.FALSE)) {
+      reader = ExitableDirectoryReader.wrap(reader, 
QueryLimitsTimeout.INSTANCE);
     }
     return reader;
   }
@@ -775,7 +792,8 @@ public class SolrIndexSearcher extends IndexSearcher 
implements Closeable, SolrI
   protected void search(List<LeafReaderContext> leaves, Weight weight, 
Collector collector)
       throws IOException {
     QueryLimits queryLimits = QueryLimits.getCurrentLimits();
-    if (useExitableDirectoryReader || !queryLimits.isLimitsEnabled()) {
+    if (EnvUtils.getPropertyAsBool(EXITABLE_READER_PROPERTY, Boolean.FALSE)
+        || !queryLimits.isLimitsEnabled()) {
       // no timeout.  Pass through to super class
       super.search(leaves, weight, collector);
     } else {
diff --git 
a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java 
b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java
index f2e4aaa3c8f..86e4dcc51a0 100644
--- a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java
+++ b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java
@@ -16,33 +16,18 @@
  */
 package org.apache.solr.core;
 
-import static org.apache.solr.common.util.Utils.fromJSONString;
-
 import java.util.Map;
 import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.metrics.MetricsMap;
-import org.apache.solr.metrics.SolrMetricManager;
-import org.apache.solr.response.SolrQueryResponse;
-import org.junit.BeforeClass;
+import org.apache.solr.search.CallerSpecificQueryLimit;
+import org.apache.solr.search.SolrIndexSearcher;
+import org.apache.solr.util.TestInjection;
+import org.junit.After;
 import org.junit.Test;
 
-/**
- * Test that checks that long-running queries are exited by Solr using the 
SolrQueryTimeoutImpl
- * implementation.
- */
 public class ExitableDirectoryReaderTest extends SolrTestCaseJ4 {
-
   static final int NUM_DOCS = 100;
   static final String assertionString = "/response/numFound==" + NUM_DOCS;
   static final String failureAssertionString = 
"/responseHeader/partialResults==true]";
-  static final String longTimeout = "10000";
-  static final String sleep = "2";
-
-  @BeforeClass
-  public static void beforeClass() throws Exception {
-    initCore("solrconfig-delaying-component.xml", "schema_latest.xml");
-    createIndex();
-  }
 
   public static void createIndex() {
     for (int i = 0; i < NUM_DOCS; i++) {
@@ -59,167 +44,65 @@ public class ExitableDirectoryReaderTest extends 
SolrTestCaseJ4 {
     assertU(commit());
   }
 
-  @Test
-  public void testPrefixQuery() throws Exception {
-    String q = "name:a*";
-    assertJQ(req("q", q, "timeAllowed", "1", "sleep", sleep), 
failureAssertionString);
-
-    // do the same query and test for both success, and that the number of 
documents matched (i.e.
-    // make sure no partial results were cached)
-    assertJQ(req("q", q, "timeAllowed", longTimeout), assertionString);
-
-    // this time we should get a query cache hit and hopefully no exception?  
this may change in the
-    // future if time checks are put into other places.
-
-    // 2024-4-15: it did change..., and now this fails with 1 or 2 ms and 
passes with 3ms... I see
-    // no way this won't be terribly brittle. Maybe TestInjection of some sort 
to bring this back?
-
-    // assertJQ(req("q", q, "timeAllowed", "2", "sleep", sleep), 
assertionString);
-
-    // The idea that the request won't time out due to caching is a flawed 
test methodology,
-    // It relies on the test running quickly and not stalling. The above test 
should possibly
-    // be doing something along the lines of this (but we lack api for it)
-    //
-    //    SolrCores solrCores = 
ExitableDirectoryReaderTest.h.getCoreContainer().solrCores;
-    //    List<SolrCore> cores = solrCores.getCores();
-    //    for (SolrCore core : cores) {
-    //      if (<<< find the right core >>> ) {
-    //        ((SolrCache)core.getSearcher().get().<<<check cache for a key 
like name:a* >>>
-    //      }
-    //    }
-
-    // now do the same for the filter cache
-    // 2024-4-15: this still passes probably because *:* is so fast, but it 
still worries me
-    assertJQ(req("q", "*:*", "fq", q, "timeAllowed", "1", "sleep", sleep), 
failureAssertionString);
-
-    // make sure that the result succeeds this time, and that a bad filter 
wasn't cached
-    assertJQ(req("q", "*:*", "fq", q, "timeAllowed", longTimeout), 
assertionString);
-
-    // test that Long.MAX_VALUE works
-    assertJQ(req("q", "name:b*", "timeAllowed", 
Long.toString(Long.MAX_VALUE)), assertionString);
-
-    // negative timeAllowed should disable timeouts.
-    assertJQ(req("q", "name:c*", "timeAllowed", "-7"), assertionString);
+  @After
+  public void tearDownCore() {
+    deleteCore();
   }
 
-  // There are lots of assumptions about how/when cache entries should be 
changed in this method.
-  // The simple case above shows the root problem without the confusion. 
testFilterSimpleCase should
-  // be removed once it is running and this test should be un-ignored and the 
assumptions verified.
-  // With all the weirdness, I'm not going to vouch for this test. Feel free 
to change it.
   @Test
-  public void testCacheAssumptions() throws Exception {
-    String fq = "name:d*";
-    SolrCore core = h.getCore();
-    MetricsMap filterCacheStats =
-        (MetricsMap)
-            ((SolrMetricManager.GaugeWrapper<?>)
-                    core.getCoreMetricManager()
-                        .getRegistry()
-                        .getMetrics()
-                        .get("CACHE.searcher.filterCache"))
-                .getGauge();
-    long fqInserts = (long) filterCacheStats.getValue().get("inserts");
-
-    MetricsMap queryCacheStats =
-        (MetricsMap)
-            ((SolrMetricManager.GaugeWrapper<?>)
-                    core.getCoreMetricManager()
-                        .getRegistry()
-                        .getMetrics()
-                        .get("CACHE.searcher.queryResultCache"))
-                .getGauge();
-    long qrInserts = (long) queryCacheStats.getValue().get("inserts");
-
-    // This gets 0 docs back. Use 10000 instead of 1 for timeAllowed, and it 
gets 100 back and the
-    // for loop below succeeds.
-    String response =
-        JQ(req("q", "*:*", "fq", fq, "indent", "true", "timeAllowed", "1", 
"sleep", sleep));
-    Map<?, ?> res = (Map<?, ?>) fromJSONString(response);
-    Map<?, ?> body = (Map<?, ?>) (res.get("response"));
-    assertTrue("Should have fewer docs than " + NUM_DOCS, (long) 
(body.get("numFound")) < NUM_DOCS);
-
-    Map<?, ?> header = (Map<?, ?>) (res.get("responseHeader"));
-    assertTrue(
-        "Should have partial results",
-        (Boolean) 
(header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)));
-
-    assertEquals(
-        "Should NOT have inserted partial results in the cache!",
-        (long) queryCacheStats.getValue().get("inserts"),
-        qrInserts);
-
-    assertEquals(
-        "Should NOT have another insert",
-        fqInserts,
-        (long) filterCacheStats.getValue().get("inserts"));
-
-    // At the end of all this, we should have no hits in the queryResultCache.
-    response = JQ(req("q", "*:*", "fq", fq, "indent", "true", "timeAllowed", 
longTimeout));
-
-    // Check that we did insert this one.
-    assertEquals("Hits should still be 0", (long) 
filterCacheStats.getValue().get("hits"), 0L);
-    assertEquals(
-        "Inserts should be bumped",
-        (long) filterCacheStats.getValue().get("inserts"),
-        fqInserts + 1);
-
-    res = (Map<?, ?>) fromJSONString(response);
-    body = (Map<?, ?>) (res.get("response"));
-
-    assertEquals("Should have exactly " + NUM_DOCS, (long) 
(body.get("numFound")), NUM_DOCS);
-    header = (Map<?, ?>) (res.get("responseHeader"));
-    assertNull(
-        "Should NOT have partial results",
-        header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY));
+  public void testWithExitableDirectoryReader() throws Exception {
+    doTestWithExitableDirectoryReader(true);
   }
 
-  // When looking at a problem raised on the user's list I ran across this 
anomaly with timeAllowed
-  // This tests for the second query NOT returning partial results, along with 
some other
   @Test
-  public void testQueryResults() throws Exception {
-    String q = "name:e*";
-    SolrCore core = h.getCore();
-    MetricsMap queryCacheStats =
-        (MetricsMap)
-            ((SolrMetricManager.GaugeWrapper<?>)
-                    core.getCoreMetricManager()
-                        .getRegistry()
-                        .getMetrics()
-                        .get("CACHE.searcher.queryResultCache"))
-                .getGauge();
-    Map<String, Object> nl = queryCacheStats.getValue();
-    long inserts = (long) nl.get("inserts");
-
-    String response = JQ(req("q", q, "indent", "true", "timeAllowed", "1", 
"sleep", sleep));
-
-    // The queryResultCache should NOT get an entry here.
-    nl = queryCacheStats.getValue();
-    assertEquals("Should NOT have inserted partial results!", inserts, (long) 
nl.get("inserts"));
-
-    Map<?, ?> res = (Map<?, ?>) fromJSONString(response);
-    Map<?, ?> body = (Map<?, ?>) (res.get("response"));
-    Map<?, ?> header = (Map<?, ?>) (res.get("responseHeader"));
-
-    assertTrue("Should have fewer docs than " + NUM_DOCS, (long) 
(body.get("numFound")) < NUM_DOCS);
-    assertTrue(
-        "Should have partial results",
-        (Boolean) 
(header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY)));
-
-    response = JQ(req("q", q, "indent", "true", "timeAllowed", longTimeout));
+  public void testWithoutExitableDirectoryReader() throws Exception {
+    doTestWithExitableDirectoryReader(false);
+  }
 
-    // Check that we did insert this one.
-    Map<String, Object> nl2 = queryCacheStats.getValue();
-    assertEquals("Hits should still be 0", (long) nl.get("hits"), (long) 
nl2.get("hits"));
-    assertTrue("Inserts should be bumped", inserts < (long) 
nl2.get("inserts"));
+  private void doTestWithExitableDirectoryReader(boolean 
withExitableDirectoryReader)
+      throws Exception {
+    System.setProperty(
+        SolrIndexSearcher.EXITABLE_READER_PROPERTY, 
String.valueOf(withExitableDirectoryReader));
+    initCore("solrconfig-delaying-component.xml", "schema_latest.xml");
+    createIndex();
 
-    res = (Map<?, ?>) fromJSONString(response);
-    body = (Map<?, ?>) (res.get("response"));
-    header = (Map<?, ?>) (res.get("responseHeader"));
+    // create a limit that will not trip but will report calls to shouldExit()
+    // NOTE: we need to use the inner class name to capture the calls
+    String callerExpr = "ExitableTermsEnum:-1";
+    CallerSpecificQueryLimit queryLimit = new 
CallerSpecificQueryLimit(callerExpr);
+    TestInjection.queryTimeout = queryLimit;
+    String q = "name:a*";
+    assertJQ(req("q", q), assertionString);
+    Map<String, Integer> callCounts = queryLimit.getCallCounts();
+    if (withExitableDirectoryReader) {
+      assertTrue(
+          "there should be some calls from ExitableTermsEnum: " + callCounts,
+          callCounts.get(callerExpr) > 0);
+    } else {
+      assertEquals(
+          "there should be no calls from ExitableTermsEnum", 0, (int) 
callCounts.get(callerExpr));
+    }
 
-    assertEquals("Should have exactly " + NUM_DOCS, NUM_DOCS, (long) 
(body.get("numFound")));
-    Boolean test = (Boolean) 
(header.get(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY));
-    if (test != null) {
-      assertFalse("Should NOT have partial results", test);
+    // check that the limits are tripped in ExitableDirectoryReader if it's in 
use
+    int maxCount = random().nextInt(10) + 1;
+    callerExpr = "ExitableTermsEnum:" + maxCount;
+    queryLimit = new CallerSpecificQueryLimit(callerExpr);
+    TestInjection.queryTimeout = queryLimit;
+    // avoid using the cache
+    q = "name:b*";
+    if (withExitableDirectoryReader) {
+      assertJQ(req("q", q), failureAssertionString);
+    } else {
+      assertJQ(req("q", q), assertionString);
+    }
+    callCounts = queryLimit.getCallCounts();
+    if (withExitableDirectoryReader) {
+      assertTrue(
+          "there should be at least " + maxCount + " calls from 
ExitableTermsEnum: " + callCounts,
+          callCounts.get(callerExpr) >= maxCount);
+    } else {
+      assertEquals(
+          "there should be no calls from ExitableTermsEnum", 0, (int) 
callCounts.get(callerExpr));
     }
   }
 }
diff --git 
a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java 
b/solr/core/src/test/org/apache/solr/core/TimeAllowedTest.java
similarity index 98%
copy from 
solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java
copy to solr/core/src/test/org/apache/solr/core/TimeAllowedTest.java
index f2e4aaa3c8f..32521802d13 100644
--- a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java
+++ b/solr/core/src/test/org/apache/solr/core/TimeAllowedTest.java
@@ -27,10 +27,10 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 
 /**
- * Test that checks that long-running queries are exited by Solr using the 
SolrQueryTimeoutImpl
- * implementation.
+ * Test that checks that long-running queries are exited by Solr using the 
{@link
+ * org.apache.solr.search.QueryLimits} implementation.
  */
-public class ExitableDirectoryReaderTest extends SolrTestCaseJ4 {
+public class TimeAllowedTest extends SolrTestCaseJ4 {
 
   static final int NUM_DOCS = 100;
   static final String assertionString = "/response/numFound==" + NUM_DOCS;
diff --git 
a/solr/core/src/test/org/apache/solr/search/CallerSpecificQueryLimit.java 
b/solr/core/src/test/org/apache/solr/search/CallerSpecificQueryLimit.java
deleted file mode 100644
index 96a6bd5ee93..00000000000
--- a/solr/core/src/test/org/apache/solr/search/CallerSpecificQueryLimit.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.search;
-
-import java.lang.invoke.MethodHandles;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Helper class to simulate query timeouts at specific points in various 
components that call {@link
- * QueryLimits#shouldExit()}. These calling points are identified by the 
calling class' simple name
- * and optionally a method name, e.g. <code>MoreLikeThisComponent</code> or 
<code>
- * ClusteringComponent.finishStage</code>.
- */
-public class CallerSpecificQueryLimit implements QueryLimit {
-  private static final Logger log = 
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-  final StackWalker stackWalker =
-      StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE);
-  final Map<String, Set<String>> interestingCallers = new HashMap<>();
-  public String trippedBy;
-
-  /**
-   * Signal a timeout in places that match the calling classes (and methods).
-   *
-   * @param callerExprs list of expressions in the format of 
<code>simpleClassName[.methodName]
-   *     </code>. If the list is empty or null then the first call to {@link 
#shouldExit()} from any
-   *     caller will match.
-   */
-  public CallerSpecificQueryLimit(String... callerExprs) {
-    if (callerExprs != null && callerExprs.length > 0) {
-      for (String callerExpr : callerExprs) {
-        String[] clazzMethod = callerExpr.split("\\.");
-        if (clazzMethod.length > 2) {
-          throw new RuntimeException("Invalid callerExpr: " + callerExpr);
-        }
-        Set<String> methods =
-            interestingCallers.computeIfAbsent(clazzMethod[0], c -> new 
HashSet<>());
-        if (clazzMethod.length > 1) {
-          methods.add(clazzMethod[1]);
-        }
-      }
-    }
-  }
-
-  @Override
-  public boolean shouldExit() {
-    Optional<String> matchingExpr =
-        stackWalker.walk(
-            s ->
-                s.filter(
-                        frame -> {
-                          Class<?> declaring = frame.getDeclaringClass();
-                          // skip bottom-most frames: myself and QueryLimits
-                          if (declaring == this.getClass() || declaring == 
QueryLimits.class) {
-                            return false;
-                          }
-                          String method = frame.getMethodName();
-                          if (interestingCallers.isEmpty()) {
-                            // any caller is an interesting caller
-                            return true;
-                          }
-                          Set<String> methods = 
interestingCallers.get(declaring.getSimpleName());
-                          if (methods == null) {
-                            return false;
-                          }
-                          return methods.isEmpty() || methods.contains(method);
-                        })
-                    .map(
-                        frame ->
-                            
(frame.getDeclaringClass().getSimpleName().isBlank()
-                                    ? frame.getClassName()
-                                    : 
frame.getDeclaringClass().getSimpleName())
-                                + "."
-                                + frame.getMethodName())
-                    .findFirst());
-    if (matchingExpr.isPresent()) {
-      if (log.isInfoEnabled()) {
-        log.info("++++ Limit tripped by caller: {} ++++", matchingExpr.get());
-      }
-      trippedBy = matchingExpr.get();
-    }
-    return matchingExpr.isPresent();
-  }
-
-  @Override
-  public Object currentValue() {
-    return "This class just for testing, not a real limit";
-  }
-}
diff --git a/solr/core/src/test/org/apache/solr/search/TestQueryLimits.java 
b/solr/core/src/test/org/apache/solr/search/TestQueryLimits.java
index 7b984bbf05b..01d7447c181 100644
--- a/solr/core/src/test/org/apache/solr/search/TestQueryLimits.java
+++ b/solr/core/src/test/org/apache/solr/search/TestQueryLimits.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.search;
 
+import java.util.Map;
 import org.apache.lucene.tests.util.TestUtil;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
@@ -66,7 +67,7 @@ public class TestQueryLimits extends SolrCloudTestCase {
           "SearchHandler.handleRequestBody",
           "QueryComponent",
           "QueryComponent.process",
-          "FacetComponent.process"
+          "FacetComponent.process:2"
         };
     for (String matchingExpr : matchingExprTests) {
       CallerSpecificQueryLimit limit = new 
CallerSpecificQueryLimit(matchingExpr);
@@ -88,13 +89,12 @@ public class TestQueryLimits extends SolrCloudTestCase {
       assertNotNull(
           "should have partial results for expr " + matchingExpr,
           rsp.getHeader().get("partialResults"));
-      if (matchingExpr.contains(".")) {
-        assertEquals(matchingExpr, limit.trippedBy);
-      } else {
-        assertTrue(
-            "expected result to start with " + matchingExpr + " but was " + 
limit.trippedBy,
-            limit.trippedBy.startsWith(matchingExpr));
-      }
+      assertFalse("should have trippedBy info", 
limit.getTrippedBy().isEmpty());
+      assertTrue(
+          "expected result to start with " + matchingExpr + " but was " + 
limit.getTrippedBy(),
+          limit.getTrippedBy().iterator().next().startsWith(matchingExpr));
+      Map<String, Integer> callCounts = limit.getCallCounts();
+      assertTrue("call count should be > 0", callCounts.get(matchingExpr) > 0);
     }
   }
 }
diff --git 
a/solr/test-framework/src/java/org/apache/solr/search/CallerSpecificQueryLimit.java
 
b/solr/test-framework/src/java/org/apache/solr/search/CallerSpecificQueryLimit.java
new file mode 100644
index 00000000000..a979e1eff7d
--- /dev/null
+++ 
b/solr/test-framework/src/java/org/apache/solr/search/CallerSpecificQueryLimit.java
@@ -0,0 +1,213 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search;
+
+import java.lang.invoke.MethodHandles;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Collectors;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Helper class to simulate query timeouts at specific points in various 
components that call {@link
+ * QueryLimits#shouldExit()}. These calling points are identified by the 
calling class' simple name
+ * and optionally a method name and the optional maximum count, e.g. 
<code>MoreLikeThisComponent
+ * </code> or <code>
+ * ClusteringComponent.finishStage</code>, 
<code>ClusteringComponent.finishStage:100</code>.
+ *
+ * <p>NOTE: implementation details cause the expression 
<code>simpleName</code> to be disabled when
+ * also any <code>simpleName.anyMethod[:NNN]</code> expression is used for the 
same class name.
+ *
+ * <p>NOTE 2: when maximum count is a negative number e.g. 
<code>simpleName.someMethod:-1</code>
+ * then only the number of calls to {@link QueryLimits#shouldExit()} for that 
expression will be
+ * reported but no limit will be enforced.
+ */
+public class CallerSpecificQueryLimit implements QueryLimit {
+  private static final Logger log = 
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private final StackWalker stackWalker =
+      StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE);
+  // className -> set of method names
+  private final Map<String, Set<String>> interestingCallers = new HashMap<>();
+  // expr -> initial count
+  private final Map<String, Integer> maxCounts = new HashMap<>();
+  // expr -> current count
+  private final Map<String, AtomicInteger> callCounts = new HashMap<>();
+  private Set<String> trippedBy = new LinkedHashSet<>();
+
+  /**
+   * Signal a timeout in places that match the calling classes (and methods).
+   *
+   * @param callerExprs list of expressions in the format of <code>
+   *     simpleClassName[.methodName][:NNN]</code>. If the list is empty or 
null then the first call
+   *     to {@link #shouldExit()} from any caller will match.
+   */
+  public CallerSpecificQueryLimit(String... callerExprs) {
+    this(callerExprs != null ? Arrays.asList(callerExprs) : List.of());
+  }
+
+  public CallerSpecificQueryLimit(Collection<String> callerExprs) {
+    for (String callerExpr : callerExprs) {
+      String[] exprCount = callerExpr.split(":");
+      if (exprCount.length > 2) {
+        throw new RuntimeException("Invalid count in callerExpr: " + 
callerExpr);
+      }
+      String[] clazzMethod = exprCount[0].split("\\.");
+      if (clazzMethod.length > 2) {
+        throw new RuntimeException("Invalid method in callerExpr: " + 
callerExpr);
+      }
+      Set<String> methods =
+          interestingCallers.computeIfAbsent(clazzMethod[0], c -> new 
HashSet<>());
+      if (clazzMethod.length > 1) {
+        methods.add(clazzMethod[1]);
+      }
+      if (exprCount.length > 1) {
+        try {
+          int count = Integer.parseInt(exprCount[1]);
+          maxCounts.put(exprCount[0], count);
+          callCounts.put(exprCount[0], new AtomicInteger(0));
+        } catch (NumberFormatException e) {
+          throw new RuntimeException("Invalid count in callerExpr: " + 
callerExpr, e);
+        }
+      }
+    }
+  }
+
+  /** Returns the set of caller expressions that were tripped. */
+  public Set<String> getTrippedBy() {
+    return trippedBy;
+  }
+
+  /** Returns a map of tripped caller expressions to their current call 
counts. */
+  public Map<String, Integer> getCallCounts() {
+    return callCounts.entrySet().stream()
+        .collect(
+            Collectors.toMap(
+                e ->
+                    e.getKey()
+                        + (maxCounts.containsKey(e.getKey())
+                            ? ":" + maxCounts.get(e.getKey())
+                            : ""),
+                e -> e.getValue().get()));
+  }
+
+  @Override
+  public boolean shouldExit() {
+    Optional<String> matchingExpr =
+        stackWalker.walk(
+            s ->
+                s.filter(
+                        frame -> {
+                          Class<?> declaring = frame.getDeclaringClass();
+                          // skip bottom-most frames: myself and QueryLimits
+                          if (declaring == this.getClass() || declaring == 
QueryLimits.class) {
+                            return false;
+                          }
+                          String method = frame.getMethodName();
+                          if (interestingCallers.isEmpty()) {
+                            // any caller is an offending caller
+                            String expr = declaring.getSimpleName() + "." + 
method;
+                            if (log.isInfoEnabled()) {
+                              log.info("++++ Limit tripped by any first 
caller: {} ++++", expr);
+                            }
+                            trippedBy.add(expr);
+                            callCounts
+                                .computeIfAbsent(expr, k -> new 
AtomicInteger())
+                                .incrementAndGet();
+                            return true;
+                          }
+                          Set<String> methods = 
interestingCallers.get(declaring.getSimpleName());
+                          if (methods == null) {
+                            // no class and no methods specified for this 
class, so skip
+                            return false;
+                          }
+                          // MATCH. Class name was specified, possibly with 
methods.
+                          // If methods is empty then all methods match, 
otherwise only the
+                          // specified methods match.
+                          if (methods.isEmpty() || methods.contains(method)) {
+                            String expr = declaring.getSimpleName();
+                            if (methods.contains(method)) {
+                              expr = expr + "." + method;
+                            } else {
+                              // even though we don't match/enforce at the 
method level, still
+                              // record the method counts to give better 
insight into the callers
+                              callCounts
+                                  .computeIfAbsent(
+                                      declaring.getSimpleName() + "." + method,
+                                      k -> new AtomicInteger(0))
+                                  .incrementAndGet();
+                            }
+                            int currentCount =
+                                callCounts
+                                    .computeIfAbsent(expr, k -> new 
AtomicInteger(0))
+                                    .incrementAndGet();
+                            // check if we have a max count for this expression
+                            if (maxCounts.containsKey(expr)) {
+                              int maxCount = maxCounts.getOrDefault(expr, 0);
+                              // if max count is negative then just report the 
call count
+                              if (maxCount < 0) {
+                                maxCount = Integer.MAX_VALUE;
+                              }
+                              if (currentCount > maxCount) {
+                                if (log.isInfoEnabled()) {
+                                  log.info(
+                                      "++++ Limit tripped by caller: {}, 
current count: {}, max: {} ++++",
+                                      expr,
+                                      currentCount,
+                                      maxCounts.get(expr));
+                                }
+                                trippedBy.add(expr + ":" + 
maxCounts.get(expr));
+                                return true;
+                              } else {
+                                return false; // max count not reached, not 
tripped yet
+                              }
+                            } else {
+                              trippedBy.add(expr);
+                              if (log.isInfoEnabled()) {
+                                log.info("++++ Limit tripped by caller: {} 
++++", expr);
+                              }
+                              return true; // no max count, so tripped on 
first call
+                            }
+                          } else {
+                            return false;
+                          }
+                        })
+                    .map(
+                        frame ->
+                            
(frame.getDeclaringClass().getSimpleName().isBlank()
+                                    ? frame.getClassName()
+                                    : 
frame.getDeclaringClass().getSimpleName())
+                                + "."
+                                + frame.getMethodName())
+                    .findFirst());
+    return matchingExpr.isPresent();
+  }
+
+  @Override
+  public Object currentValue() {
+    return "This class just for testing, not a real limit";
+  }
+}
diff --git 
a/solr/test-framework/src/test/org/apache/solr/search/CallerSpecificQueryLimitTest.java
 
b/solr/test-framework/src/test/org/apache/solr/search/CallerSpecificQueryLimitTest.java
new file mode 100644
index 00000000000..da8f58c73cf
--- /dev/null
+++ 
b/solr/test-framework/src/test/org/apache/solr/search/CallerSpecificQueryLimitTest.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import org.apache.solr.SolrTestCaseJ4;
+import org.junit.Test;
+
+public class CallerSpecificQueryLimitTest extends SolrTestCaseJ4 {
+
+  private static class LimitedWorker {
+    private final CallerSpecificQueryLimit limit;
+
+    static class NestedLimitedWorker {
+      private final CallerSpecificQueryLimit limit;
+
+      NestedLimitedWorker(CallerSpecificQueryLimit limit) {
+        this.limit = limit;
+      }
+
+      public void doWork() {
+        limit.shouldExit();
+      }
+    }
+
+    private NestedLimitedWorker nestedLimitedWorker;
+
+    LimitedWorker(CallerSpecificQueryLimit limit) {
+      this.limit = limit;
+      nestedLimitedWorker = new NestedLimitedWorker(limit);
+    }
+
+    public void doWork() {
+      nestedLimitedWorker.doWork();
+      limit.shouldExit();
+    }
+  }
+
+  private static class LimitedWorker2 {
+    private final CallerSpecificQueryLimit limit;
+
+    LimitedWorker2(CallerSpecificQueryLimit limit) {
+      this.limit = limit;
+    }
+
+    public void doWork() {
+      limit.shouldExit();
+    }
+  }
+
+  private static final boolean[] values = {true, false};
+
+  @Test
+  public void testLimits() {
+    for (boolean withNested : values) {
+      for (boolean withMethod : values) {
+        for (boolean withCount : values) {
+          for (boolean shouldTrip : values) {
+            doTestWithLimit(withNested, withMethod, withCount, shouldTrip);
+          }
+        }
+      }
+    }
+  }
+
+  private void doTestWithLimit(
+      boolean withNested, boolean withMethod, boolean withCount, boolean 
shouldTrip) {
+    List<String> nonMatchingCallerExprs = new ArrayList<>();
+    List<String> matchingCallCounts = new ArrayList<>();
+    String matchingClassName =
+        withNested
+            ? LimitedWorker.NestedLimitedWorker.class.getSimpleName()
+            : LimitedWorker.class.getSimpleName();
+    String callerExpr = matchingClassName;
+    if (withMethod) {
+      callerExpr += ".doWork";
+    }
+    int count = 1;
+    if (withCount) {
+      count = random().nextInt(9) + 1;
+      if (shouldTrip) {
+        callerExpr += ":" + count;
+      } else {
+        callerExpr += ":-" + count;
+      }
+    } else if (!shouldTrip) {
+      callerExpr += ":-1"; // no limit, should not trip
+    }
+    nonMatchingCallerExprs.add(LimitedWorker2.class.getSimpleName());
+    nonMatchingCallerExprs.add(LimitedWorker2.class.getSimpleName() + 
".doWork");
+    if (withNested) {
+      nonMatchingCallerExprs.add(LimitedWorker.class.getSimpleName());
+    } else {
+      
nonMatchingCallerExprs.add(LimitedWorker.NestedLimitedWorker.class.getSimpleName());
+    }
+    matchingCallCounts.add(callerExpr);
+    if (!withMethod) {
+      matchingCallCounts.add(matchingClassName + ".doWork");
+    }
+
+    CallerSpecificQueryLimit limit = new CallerSpecificQueryLimit(callerExpr);
+    LimitedWorker limitedWorker = new LimitedWorker(limit);
+    LimitedWorker2 limitedWorker2 = new LimitedWorker2(limit);
+    for (int i = 0; i < count * 2; i++) {
+      limitedWorker2.doWork();
+      limitedWorker.doWork();
+    }
+    Set<String> trippedBy = limit.getTrippedBy();
+    if (shouldTrip) {
+      assertFalse("Limit should have been tripped, callerExpr: " + callerExpr, 
trippedBy.isEmpty());
+      for (String nonMatchingCallerExpr : nonMatchingCallerExprs) {
+        assertFalse(
+            "Limit should not have been tripped by "
+                + nonMatchingCallerExpr
+                + " but was: "
+                + trippedBy,
+            trippedBy.contains(nonMatchingCallerExpr));
+      }
+    } else {
+      assertTrue(
+          "Limit should not have been tripped, callerExpr: "
+              + callerExpr
+              + ", trippedBy: "
+              + trippedBy,
+          trippedBy.isEmpty());
+    }
+    Map<String, Integer> callCounts = limit.getCallCounts();
+    for (String matchingCallCount : matchingCallCounts) {
+      assertTrue(
+          "Call count for " + matchingCallCount + " should be > 0, callCounts: 
" + callCounts,
+          callCounts.getOrDefault(matchingCallCount, 0) > 0);
+    }
+  }
+}


Reply via email to