jenkins-bot has submitted this change and it was merged. ( 
https://gerrit.wikimedia.org/r/376721 )

Change subject: Adding the fb-contrib extension to spotbugs
......................................................................


Adding the fb-contrib extension to spotbugs

While this has quite a few false-positive, it seems to have a few rules which
make sense. Let me know if you think this is useful or should be dropped.

Change-Id: I41093398b1894083f1ca7237f14ecbf192241eda
---
M pom.xml
A src/dev-tools/spotbugs-excludes.xml
M src/main/java/org/wikimedia/search/extra/fuzzylike/FuzzyLikeThisQuery.java
M src/main/java/org/wikimedia/search/extra/latency/LatencyStatsAction.java
M src/main/java/org/wikimedia/search/extra/latency/SearchLatencyListener.java
M 
src/main/java/org/wikimedia/search/extra/levenshtein/LevenshteinDistanceScore.java
M src/main/java/org/wikimedia/search/extra/lombok.config
M src/main/java/org/wikimedia/search/extra/regex/SourceRegexQueryBuilder.java
M 
src/main/java/org/wikimedia/search/extra/regex/UnacceleratedSourceRegexQuery.java
M 
src/main/java/org/wikimedia/search/extra/regex/expression/AbstractCompositeExpression.java
M src/main/java/org/wikimedia/search/extra/regex/ngram/NGramAutomaton.java
M 
src/main/java/org/wikimedia/search/extra/router/AbstractRouterQueryBuilder.java
M 
src/main/java/org/wikimedia/search/extra/router/DegradedRouterQueryBuilder.java
13 files changed, 111 insertions(+), 34 deletions(-)

Approvals:
  jenkins-bot: Verified
  DCausse: Looks good to me, approved



diff --git a/pom.xml b/pom.xml
index b1e55d3..64c3bb0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -78,6 +78,14 @@
         <configuration>
           <effort>high</effort>
           <threshold>low</threshold>
+          
<excludeFilterFile>${project.basedir}/src/dev-tools/spotbugs-excludes.xml</excludeFilterFile>
+          <plugins>
+            <plugin>
+              <groupId>com.mebigfatguy.fb-contrib</groupId>
+              <artifactId>fb-contrib</artifactId>
+              <version>7.0.4.sb</version>
+            </plugin>
+          </plugins>
         </configuration>
         <executions>
           <execution>
diff --git a/src/dev-tools/spotbugs-excludes.xml 
b/src/dev-tools/spotbugs-excludes.xml
new file mode 100644
index 0000000..3039b90
--- /dev/null
+++ b/src/dev-tools/spotbugs-excludes.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0"?>
+<FindBugsFilter>
+    <!-- the following bugs are always ignored -->
+    <Match>
+        <!--
+            Manually sizing collections is error prone and in most cases tend
+            to be a micro optimization.
+        -->
+        <Bug pattern="PSC_PRESIZE_COLLECTIONS"/>
+    </Match>
+
+    <Match>
+        <!-- Softening exception is an acceptable pattern in this project. -->
+        <Or>
+            <Bug pattern="EXS_EXCEPTION_SOFTENING_HAS_CHECKED"/>
+            <Bug pattern="EXS_EXCEPTION_SOFTENING_NO_CONSTRAINTS"/>
+        </Or>
+    </Match>
+
+    <Match>
+        <!--
+            Inverting comparison order when String literals are involved leads
+            to unnatural code. Proper null checking should always be done
+            anyway.
+        -->
+        <Bug pattern="LSC_LITERAL_STRING_COMPARISON"/>
+    </Match>
+
+    <!-- specific exceptions -->
+    <Match 
classregex="org\.wikimedia\.search\.extra\.regex\.SourceRegexQueryBuilder.*">
+        <!-- In this case, an array is accessed with known locations in array 
-->
+        <Bug pattern="CLI_CONSTANT_LIST_INDEX"/>
+    </Match>
+
+</FindBugsFilter>
\ No newline at end of file
diff --git 
a/src/main/java/org/wikimedia/search/extra/fuzzylike/FuzzyLikeThisQuery.java 
b/src/main/java/org/wikimedia/search/extra/fuzzylike/FuzzyLikeThisQuery.java
index 2ed2746..e876512 100644
--- a/src/main/java/org/wikimedia/search/extra/fuzzylike/FuzzyLikeThisQuery.java
+++ b/src/main/java/org/wikimedia/search/extra/fuzzylike/FuzzyLikeThisQuery.java
@@ -17,6 +17,7 @@
  * limitations under the License.
  */
 
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
 import lombok.EqualsAndHashCode;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.TokenStream;
@@ -186,7 +187,10 @@
     }
   }
 
-  @Override
+    @Override
+    @SuppressFBWarnings(
+          value = "PCAIL_POSSIBLE_CONSTANT_ALLOCATION_IN_LOOP",
+          justification = "builder should not be reused")
     public Query rewrite(IndexReader reader) throws IOException
     {
         //load up the list of possible terms
diff --git 
a/src/main/java/org/wikimedia/search/extra/latency/LatencyStatsAction.java 
b/src/main/java/org/wikimedia/search/extra/latency/LatencyStatsAction.java
index 2b67355..1232f9e 100644
--- a/src/main/java/org/wikimedia/search/extra/latency/LatencyStatsAction.java
+++ b/src/main/java/org/wikimedia/search/extra/latency/LatencyStatsAction.java
@@ -2,6 +2,7 @@
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.Sets;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
 import lombok.AccessLevel;
 import lombok.Getter;
 import org.elasticsearch.action.Action;
@@ -127,6 +128,9 @@
             super(node);
             empty();
         }
+        @SuppressFBWarnings(
+                value = "PCOA_PARTIALLY_CONSTRUCTED_OBJECT_ACCESS",
+                justification = "readFrom has a well understood contract")
         LatencyStatsNodeResponse(StreamInput in) throws IOException {
             readFrom(in);
         }
@@ -143,7 +147,7 @@
             statDetails.readFrom(in);
         }
 
-        void empty() {
+        final void empty() {
             statDetails = new StatDetails();
         }
 
@@ -192,6 +196,9 @@
                     .collect(toList());
         }
 
+        @SuppressFBWarnings(
+                value = "PCOA_PARTIALLY_CONSTRUCTED_OBJECT_ACCESS",
+                justification = "readFrom has a well understood contract")
         StatDetails(StreamInput in) throws IOException {
             readFrom(in);
         }
@@ -206,6 +213,7 @@
         }
 
         @Override
+        @SuppressFBWarnings("PRMC_POSSIBLY_REDUNDANT_METHOD_CALLS")
         public XContentBuilder toXContent(XContentBuilder builder, Params 
params) throws IOException {
             Map<String, List<LatencyStat>> byBucket = latencies.stream()
                     .collect(groupingBy(LatencyStat::getBucket));
diff --git 
a/src/main/java/org/wikimedia/search/extra/latency/SearchLatencyListener.java 
b/src/main/java/org/wikimedia/search/extra/latency/SearchLatencyListener.java
index 0e9792e..0e2bedd 100644
--- 
a/src/main/java/org/wikimedia/search/extra/latency/SearchLatencyListener.java
+++ 
b/src/main/java/org/wikimedia/search/extra/latency/SearchLatencyListener.java
@@ -1,6 +1,7 @@
 package org.wikimedia.search.extra.latency;
 
 import com.google.common.annotations.VisibleForTesting;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
 import org.HdrHistogram.Histogram;
 import org.HdrHistogram.Recorder;
 import org.elasticsearch.common.component.AbstractLifecycleComponent;
@@ -45,7 +46,7 @@
 
     @Override
     protected void doStart() {
-        if (cancelRotation != null) {
+        if (cancelRotation == null) {
             cancelRotation = 
threadPoolSupplier.get().scheduleWithFixedDelay(this::rotate, ROTATION_DELAY, 
ThreadPool.Names.GENERIC);
         }
     }
@@ -84,6 +85,7 @@
                 .collect(toList());
     }
 
+    @SuppressFBWarnings("PRMC_POSSIBLY_REDUNDANT_METHOD_CALLS")
     public void onQueryPhase(SearchContext searchContext, long tookInNanos) {
         if (searchContext.groupStats() == null) {
             return;
diff --git 
a/src/main/java/org/wikimedia/search/extra/levenshtein/LevenshteinDistanceScore.java
 
b/src/main/java/org/wikimedia/search/extra/levenshtein/LevenshteinDistanceScore.java
index 6bfd9a1..e6af566 100644
--- 
a/src/main/java/org/wikimedia/search/extra/levenshtein/LevenshteinDistanceScore.java
+++ 
b/src/main/java/org/wikimedia/search/extra/levenshtein/LevenshteinDistanceScore.java
@@ -81,7 +81,7 @@
             }
 
             @Override
-            public Explanation explainScore(int docId, Explanation 
subQueryScore) throws IOException {
+            public Explanation explainScore(int docId, Explanation 
subQueryScore) {
                 double score = score(docId, subQueryScore.getValue());
                 String explanation = "LevenshteinDistanceScore";
                 explanation += " with parameters:\n text:" + value;
diff --git a/src/main/java/org/wikimedia/search/extra/lombok.config 
b/src/main/java/org/wikimedia/search/extra/lombok.config
index e572d96..7cc30ff 100644
--- a/src/main/java/org/wikimedia/search/extra/lombok.config
+++ b/src/main/java/org/wikimedia/search/extra/lombok.config
@@ -1 +1,2 @@
-lombok.equalsAndHashCode.callSuper = call
\ No newline at end of file
+lombok.equalsAndHashCode.callSuper = call
+lombok.extern.findbugs.addSuppressFBWarnings = true
diff --git 
a/src/main/java/org/wikimedia/search/extra/regex/SourceRegexQueryBuilder.java 
b/src/main/java/org/wikimedia/search/extra/regex/SourceRegexQueryBuilder.java
index 50023d5..2f8724a 100644
--- 
a/src/main/java/org/wikimedia/search/extra/regex/SourceRegexQueryBuilder.java
+++ 
b/src/main/java/org/wikimedia/search/extra/regex/SourceRegexQueryBuilder.java
@@ -1,5 +1,6 @@
 package org.wikimedia.search.extra.regex;
 
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
 import lombok.AccessLevel;
 import lombok.EqualsAndHashCode;
 import lombok.Getter;
@@ -36,6 +37,7 @@
 @Accessors(chain = true, fluent = true)
 @Getter
 @Setter
+@SuppressFBWarnings("CLI_CONSTANT_LIST_INDEX")
 public class SourceRegexQueryBuilder extends 
AbstractQueryBuilder<SourceRegexQueryBuilder> {
     public static final ParseField NAME = new ParseField("source_regex", 
"sourceRegex", "source-regex");
 
@@ -48,27 +50,29 @@
     public static final boolean DEFAULT_LOAD_FROM_SOURCE = true;
     public static final int DEFAULT_GRAM_SIZE = 3;
 
-    private static final ConstructingObjectParser<SourceRegexQueryBuilder, 
QueryParseContext> PARSER;
+    private static final ConstructingObjectParser<SourceRegexQueryBuilder, 
QueryParseContext> PARSER = constructParser();
 
-    static {
-        PARSER = new ConstructingObjectParser<SourceRegexQueryBuilder, 
QueryParseContext>(NAME.getPreferredName(),
+    private static ConstructingObjectParser<SourceRegexQueryBuilder, 
QueryParseContext> constructParser() {
+        ConstructingObjectParser<SourceRegexQueryBuilder, QueryParseContext> 
parser =
+                new ConstructingObjectParser<>(NAME.getPreferredName(),
                 (o) -> new SourceRegexQueryBuilder((String) o[0], (String) 
o[1]));
-        PARSER.declareString(constructorArg(), FIELD);
-        PARSER.declareString(constructorArg(), REGEX);
-        PARSER.declareBoolean(SourceRegexQueryBuilder::loadFromSource, 
LOAD_FROM_SOURCE);
-        PARSER.declareString(SourceRegexQueryBuilder::ngramField, NGRAM_FIELD);
-        PARSER.declareInt(SourceRegexQueryBuilder::gramSize, GRAM_SIZE);
-        PARSER.declareInt((x,i) -> x.settings().maxExpand(i), 
Settings.MAX_EXPAND);
-        PARSER.declareInt((x,i) -> x.settings().maxStatesTraced(i), 
Settings.MAX_STATES_TRACED);
-        PARSER.declareInt((x,i) -> x.settings().maxDeterminizedStates(i), 
Settings.MAX_DETERMINIZED_STATES);
-        PARSER.declareInt((x,i) -> x.settings().maxNgramsExtracted(i), 
Settings.MAX_NGRAMS_EXTRACTED);
-        PARSER.declareInt((x,i) -> x.settings().maxInspect(i), 
Settings.MAX_INSPECT);
-        PARSER.declareBoolean((x,b) -> x.settings().caseSensitive(b), 
Settings.CASE_SENSITIVE);
-        PARSER.declareString((x,s) -> 
x.settings().locale(LocaleUtils.parse(s)), Settings.LOCALE);
-        PARSER.declareBoolean((x,b) -> x.settings().rejectUnaccelerated(b), 
Settings.REJECT_UNACCELERATED);
-        PARSER.declareInt((x,i) -> x.settings().maxNgramClauses(i), 
Settings.MAX_NGRAM_CLAUSES);
-        PARSER.declareString((x,s) -> x.settings().timeout(s), 
Settings.TIMEOUT);
-        declareStandardFields(PARSER);
+        parser.declareString(constructorArg(), FIELD);
+        parser.declareString(constructorArg(), REGEX);
+        parser.declareBoolean(SourceRegexQueryBuilder::loadFromSource, 
LOAD_FROM_SOURCE);
+        parser.declareString(SourceRegexQueryBuilder::ngramField, NGRAM_FIELD);
+        parser.declareInt(SourceRegexQueryBuilder::gramSize, GRAM_SIZE);
+        parser.declareInt((x,i) -> x.settings().maxExpand(i), 
Settings.MAX_EXPAND);
+        parser.declareInt((x,i) -> x.settings().maxStatesTraced(i), 
Settings.MAX_STATES_TRACED);
+        parser.declareInt((x,i) -> x.settings().maxDeterminizedStates(i), 
Settings.MAX_DETERMINIZED_STATES);
+        parser.declareInt((x,i) -> x.settings().maxNgramsExtracted(i), 
Settings.MAX_NGRAMS_EXTRACTED);
+        parser.declareInt((x,i) -> x.settings().maxInspect(i), 
Settings.MAX_INSPECT);
+        parser.declareBoolean((x,b) -> x.settings().caseSensitive(b), 
Settings.CASE_SENSITIVE);
+        parser.declareString((x,s) -> 
x.settings().locale(LocaleUtils.parse(s)), Settings.LOCALE);
+        parser.declareBoolean((x,b) -> x.settings().rejectUnaccelerated(b), 
Settings.REJECT_UNACCELERATED);
+        parser.declareInt((x,i) -> x.settings().maxNgramClauses(i), 
Settings.MAX_NGRAM_CLAUSES);
+        parser.declareString((x,s) -> x.settings().timeout(s), 
Settings.TIMEOUT);
+        declareStandardFields(parser);
+        return parser;
     }
 
     private final String field;
diff --git 
a/src/main/java/org/wikimedia/search/extra/regex/UnacceleratedSourceRegexQuery.java
 
b/src/main/java/org/wikimedia/search/extra/regex/UnacceleratedSourceRegexQuery.java
index 3fe3052..976390e 100644
--- 
a/src/main/java/org/wikimedia/search/extra/regex/UnacceleratedSourceRegexQuery.java
+++ 
b/src/main/java/org/wikimedia/search/extra/regex/UnacceleratedSourceRegexQuery.java
@@ -152,7 +152,7 @@
 
         private static final Collector NULL_COLLECTOR = new SimpleCollector() {
             @Override
-            public void collect(int doc) throws IOException {}
+            public void collect(int doc) {}
             @Override
             public boolean needsScores() {
                 return true;
diff --git 
a/src/main/java/org/wikimedia/search/extra/regex/expression/AbstractCompositeExpression.java
 
b/src/main/java/org/wikimedia/search/extra/regex/expression/AbstractCompositeExpression.java
index 6d4dfc7..bd6ed0e 100644
--- 
a/src/main/java/org/wikimedia/search/extra/regex/expression/AbstractCompositeExpression.java
+++ 
b/src/main/java/org/wikimedia/search/extra/regex/expression/AbstractCompositeExpression.java
@@ -2,6 +2,7 @@
 
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Sets;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
 import lombok.EqualsAndHashCode;
 
 import java.util.ArrayList;
@@ -244,6 +245,10 @@
     }
 
     @Override
+    @SuppressFBWarnings(
+            value = "STT_TOSTRING_STORED_IN_FIELD",
+            justification = "Here we do want to cache the value of " +
+                    "toString() as its computation can be expensive.")
     public String toString() {
         if (toString != null) {
             return toString;
@@ -261,7 +266,7 @@
             } else {
                 b.append(toStringJoiner());
             }
-            b.append(component.toString());
+            b.append(component);
         }
         b.append(')');
         if (b.length() > MAX_COMPONENT_STRING_LENGTH) {
diff --git 
a/src/main/java/org/wikimedia/search/extra/regex/ngram/NGramAutomaton.java 
b/src/main/java/org/wikimedia/search/extra/regex/ngram/NGramAutomaton.java
index ce9967c..de73ff0 100644
--- a/src/main/java/org/wikimedia/search/extra/regex/ngram/NGramAutomaton.java
+++ b/src/main/java/org/wikimedia/search/extra/regex/ngram/NGramAutomaton.java
@@ -1,6 +1,7 @@
 package org.wikimedia.search.extra.regex.ngram;
 
 import com.google.common.collect.ImmutableSet;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
 import lombok.EqualsAndHashCode;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.TokenStream;
@@ -27,6 +28,9 @@
  * A finite automaton who's transitions are ngrams that must be in the string 
or
  * ngrams we can't check for. Not thread safe one bit.
  */
+@SuppressFBWarnings(value = "DLC_DUBIOUS_LIST_COLLECTION", justification = 
"Need more time to investigate")
+// TODO: It might be possible to convert acceptStates to a Set, which would be
+// more efficient and better represent the intent if this is actually the case.
 public class NGramAutomaton {
     private final Automaton source;
     private final int gramSize;
@@ -82,10 +86,10 @@
                 b.append(" 
[shape=circle,label=\"").append(state).append("\"];\n");
             }
             if (state.initial) {
-                b.append("  initial -> ").append(state.dotName()).append("\n");
+                b.append("  initial -> ").append(state.dotName()).append('\n');
             }
             for (NGramTransition transition : state.outgoingTransitions) {
-                b.append("  ").append(transition).append("\n");
+                b.append("  ").append(transition).append('\n');
             }
         }
         return b.append("}\n").toString();
@@ -222,7 +226,7 @@
                     throw new IllegalArgumentException("Analyzer provided 
generate more than one tokens, " +
                             "if using 3grams make sure to use a 3grams 
analyzer, " +
                             "for input [" + ngram + "] first is [" + ngram + 
"] " +
-                            "but [" + cattr.toString() + "] was generated.");
+                            "but [" + cattr + "] was generated.");
                 }
             }
         } catch (IOException ioe) {
@@ -356,7 +360,7 @@
             StringBuilder b = new StringBuilder();
             b.append(from.dotName()).append(" -> ").append(to.dotName());
             if (ngram != null) {
-                b.append(" [label=\"").append(ngram.replace(" ", 
"_")).append("\"]");
+                b.append(" [label=\"").append(ngram.replace(' ', 
'_')).append("\"]");
             }
             return b.toString();
         }
diff --git 
a/src/main/java/org/wikimedia/search/extra/router/AbstractRouterQueryBuilder.java
 
b/src/main/java/org/wikimedia/search/extra/router/AbstractRouterQueryBuilder.java
index 180616f..5a151b8 100644
--- 
a/src/main/java/org/wikimedia/search/extra/router/AbstractRouterQueryBuilder.java
+++ 
b/src/main/java/org/wikimedia/search/extra/router/AbstractRouterQueryBuilder.java
@@ -1,6 +1,7 @@
 package org.wikimedia.search.extra.router;
 
 import com.google.common.annotations.VisibleForTesting;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
 import lombok.AccessLevel;
 import lombok.EqualsAndHashCode;
 import lombok.Getter;
@@ -90,14 +91,17 @@
     }
 
     @Override
+    @SuppressFBWarnings("ACEM_ABSTRACT_CLASS_EMPTY_METHODS")
     protected Query doToQuery(QueryShardContext queryShardContext) throws 
IOException {
         throw new UnsupportedOperationException("This query must be 
rewritten.");
     }
 
+    @SuppressFBWarnings("ACEM_ABSTRACT_CLASS_EMPTY_METHODS")
     protected void addXContent(XContentBuilder builder, Params params) throws 
IOException {
     }
 
     @Override
+    @SuppressFBWarnings("PRMC_POSSIBLY_REDUNDANT_METHOD_CALLS")
     protected void doXContent(XContentBuilder builder, Params params) throws 
IOException {
         builder.startObject(getWriteableName());
         if (fallback() != null) {
@@ -116,6 +120,7 @@
         builder.endObject();
     }
 
+    @SuppressFBWarnings(value = "OCP_OVERLY_CONCRETE_PARAMETER", justification 
= "No need to be generic in this case")
     static <C extends Condition, CPS extends AbstractConditionParserState<C>> 
C parseCondition(
             ObjectParser<CPS, QueryParseContext> condParser, XContentParser 
parser, QueryParseContext parseContext
     ) throws IOException {
@@ -125,6 +130,7 @@
     }
 
 
+    @SuppressFBWarnings(value = "LEST_LOST_EXCEPTION_STACK_TRACE", 
justification = "The new exception contains all needed context")
     static <QB extends AbstractRouterQueryBuilder<?, QB>> Optional<QB> 
fromXContent(
             ObjectParser<QB, QueryParseContext> objectParser, 
QueryParseContext parseContext) throws IOException {
         XContentParser parser = parseContext.parser();
@@ -234,7 +240,7 @@
 
         abstract C condition();
 
-        void checkValid() throws IllegalArgumentException {
+        void checkValid() {
             if (query == null) {
                 throw new IllegalArgumentException("Missing field [query] in 
condition");
             }
diff --git 
a/src/main/java/org/wikimedia/search/extra/router/DegradedRouterQueryBuilder.java
 
b/src/main/java/org/wikimedia/search/extra/router/DegradedRouterQueryBuilder.java
index 88a98b9..86921ad 100644
--- 
a/src/main/java/org/wikimedia/search/extra/router/DegradedRouterQueryBuilder.java
+++ 
b/src/main/java/org/wikimedia/search/extra/router/DegradedRouterQueryBuilder.java
@@ -142,7 +142,7 @@
         load((bucket, percentile, stats) -> stats.get1MinuteLoadAverage()),
         latency((bucket, percentile, stats) -> stats.getLatency(bucket, 
percentile)) {
             @Override
-            public void checkValid(String bucket, Double percentile) throws 
IllegalArgumentException {
+            public void checkValid(String bucket, Double percentile) {
                 if (bucket == null) {
                     throw new IllegalArgumentException("Missing field [bucket] 
in condition");
                 }
@@ -175,7 +175,7 @@
             return values()[ord];
         }
 
-        void checkValid(String bucket, Double percentile) throws 
IllegalArgumentException {
+        void checkValid(String bucket, Double percentile) {
             if (bucket != null) {
                 throw new IllegalArgumentException("Extra field [bucket] in 
condition");
             }
@@ -197,7 +197,7 @@
         }
 
         @Override
-        void checkValid() throws IllegalArgumentException {
+        void checkValid() {
             super.checkValid();
             type.checkValid(bucket, percentile);
         }

-- 
To view, visit https://gerrit.wikimedia.org/r/376721
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings

Gerrit-MessageType: merged
Gerrit-Change-Id: I41093398b1894083f1ca7237f14ecbf192241eda
Gerrit-PatchSet: 5
Gerrit-Project: search/extra
Gerrit-Branch: master
Gerrit-Owner: Gehel <guillaume.leder...@wikimedia.org>
Gerrit-Reviewer: DCausse <dcau...@wikimedia.org>
Gerrit-Reviewer: EBernhardson <ebernhard...@wikimedia.org>
Gerrit-Reviewer: Gehel <guillaume.leder...@wikimedia.org>
Gerrit-Reviewer: jenkins-bot <>

_______________________________________________
MediaWiki-commits mailing list
MediaWiki-commits@lists.wikimedia.org
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits

Reply via email to