gianm closed pull request #6490: Fix various bugs; Enable more IntelliJ 
inspections and update error-prone
URL: https://github.com/apache/incubator-druid/pull/6490
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/.idea/inspectionProfiles/Druid.xml 
b/.idea/inspectionProfiles/Druid.xml
index b1748e7c921..4ada21411dc 100644
--- a/.idea/inspectionProfiles/Druid.xml
+++ b/.idea/inspectionProfiles/Druid.xml
@@ -11,28 +11,35 @@
     <inspection_tool class="ArrayObjectsEquals" enabled="true" level="ERROR" 
enabled_by_default="true" />
     <inspection_tool class="ArraysAsListWithZeroOrOneArgument" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="AssertWithSideEffects" enabled="true" 
level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="CapturingCleaner" enabled="true" level="ERROR" 
enabled_by_default="true" />
     <inspection_tool class="CastConflictsWithInstanceof" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="CastToIncompatibleInterface" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="CatchMayIgnoreException" enabled="true" 
level="WARNING" enabled_by_default="true">
       <option name="m_ignoreCatchBlocksWithComments" value="false" />
     </inspection_tool>
     <inspection_tool class="CheckValidXmlInScriptTagBody" enabled="true" 
level="WARNING" enabled_by_default="true" />
+    <inspection_tool class="ClassGetClass" enabled="true" level="ERROR" 
enabled_by_default="true" />
     <inspection_tool class="ClassNewInstance" enabled="true" level="WARNING" 
enabled_by_default="true" />
     <inspection_tool class="CollectionAddedToSelf" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="ComparableImplementedButEqualsNotOverridden" 
enabled="true" level="WARNING" enabled_by_default="true" />
     <inspection_tool class="ComparatorMethodParameterNotUsed" enabled="true" 
level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="ComparatorResultComparison" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="CompareToUsesNonFinalVariable" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="ConstantAssertCondition" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="Contract" enabled="true" level="ERROR" 
enabled_by_default="true" />
+    <inspection_tool class="CopyConstructorMissesField" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="CovariantEquals" enabled="true" level="ERROR" 
enabled_by_default="true" />
     <inspection_tool class="EmptyInitializer" enabled="true" level="ERROR" 
enabled_by_default="true" />
     <inspection_tool class="EmptyStatementBody" enabled="true" level="WARNING" 
enabled_by_default="true">
       <option name="m_reportEmptyBlocks" value="true" />
       <option name="commentsAreContent" value="true" />
     </inspection_tool>
+    <inspection_tool class="EndlessStream" enabled="true" level="ERROR" 
enabled_by_default="true" />
     <inspection_tool class="EqualsAndHashcode" enabled="true" level="WARNING" 
enabled_by_default="true" />
     <inspection_tool class="EqualsBetweenInconvertibleTypes" enabled="true" 
level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="EqualsOnSuspiciousObject" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="EqualsUsesNonFinalVariable" enabled="true" 
level="WARNING" enabled_by_default="true" />
+    <inspection_tool class="EqualsWhichDoesntCheckParameterClass" 
enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="EqualsWithItself" enabled="true" level="ERROR" 
enabled_by_default="true" />
     <inspection_tool class="FieldCanBeLocal" enabled="true" level="WARNING" 
enabled_by_default="true">
       <option name="EXCLUDE_ANNOS">
@@ -72,6 +79,17 @@
     <inspection_tool class="MathRandomCastToInt" enabled="true" level="ERROR" 
enabled_by_default="true" />
     <inspection_tool class="MavenModelInspection" enabled="true" 
level="WARNING" enabled_by_default="true" />
     <inspection_tool class="MismatchedArrayReadWrite" enabled="true" 
level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="MismatchedCollectionQueryUpdate" enabled="true" 
level="ERROR" enabled_by_default="true">
+      <option name="queryNames">
+        <value />
+      </option>
+      <option name="updateNames">
+        <value />
+      </option>
+      <option name="ignoredClasses">
+        <value />
+      </option>
+    </inspection_tool>
     <inspection_tool class="MismatchedStringBuilderQueryUpdate" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="MissingOverrideAnnotation" enabled="true" 
level="WARNING" enabled_by_default="true">
       <scope name="NonGeneratedFiles" level="ERROR" enabled="true">
@@ -93,6 +111,7 @@
     </inspection_tool>
     <inspection_tool class="ObjectEqualsNull" enabled="true" level="ERROR" 
enabled_by_default="true" />
     <inspection_tool class="ObjectToString" enabled="true" level="ERROR" 
enabled_by_default="true" />
+    <inspection_tool class="OverwrittenKey" enabled="true" level="ERROR" 
enabled_by_default="true" />
     <inspection_tool class="PrimitiveArrayArgumentToVariableArgMethod" 
enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="RedundantThrows" enabled="true" level="ERROR" 
enabled_by_default="true" />
     <inspection_tool class="RedundantTypeArguments" enabled="true" 
level="ERROR" enabled_by_default="true" />
@@ -182,22 +201,38 @@
         <constraint name="E" 
regexp="java\.lang\.UnsupportedOperationException" within="" contains="" />
         <constraint name="x" minCount="0" maxCount="2147483647" within="" 
contains="" />
       </searchConfiguration>
+      <searchConfiguration name="Use TypeReference&lt;List&lt;...&gt;&gt; 
instead" created="1539884261626" 
text="TypeReference&lt;ArrayList&lt;$E$&gt;&gt;" recursive="false" 
caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="E" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use TypeReference&lt;Map&lt;...&gt;&gt; 
instead" created="1539884261626" text="TypeReference&lt;HashMap&lt;$K$, 
$V$&gt;&gt;" recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="K" within="" contains="" />
+        <constraint name="V" within="" contains="" />
+      </searchConfiguration>
+      <searchConfiguration name="Use TypeReference&lt;Set&lt;...&gt;&gt; 
instead" created="1539884261626" text="TypeReference&lt;HashSet&lt;$E$&gt;&gt;" 
recursive="false" caseInsensitive="true" type="JAVA">
+        <constraint name="__context__" target="true" within="" contains="" />
+        <constraint name="E" within="" contains="" />
+      </searchConfiguration>
     </inspection_tool>
     <inspection_tool class="SpellCheckingInspection" enabled="false" 
level="TYPO" enabled_by_default="false">
       <option name="processCode" value="true" />
       <option name="processLiterals" value="true" />
       <option name="processComments" value="true" />
     </inspection_tool>
-    <inspection_tool class="StaticCallOnSubclass" enabled="true" 
level="WARNING" enabled_by_default="true" />
-    <inspection_tool class="StaticFieldReferenceOnSubclass" enabled="true" 
level="WARNING" enabled_by_default="true" />
+    <inspection_tool class="StaticCallOnSubclass" enabled="true" level="ERROR" 
enabled_by_default="true" />
+    <inspection_tool class="StaticFieldReferenceOnSubclass" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="StringConcatenationInFormatCall" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="StringConcatenationInMessageFormatCall" 
enabled="true" level="ERROR" enabled_by_default="true" />
     <inspection_tool class="StringConcatenationMissingWhitespace" 
enabled="true" level="WARNING" enabled_by_default="true" />
     <inspection_tool class="StringEquality" enabled="true" level="ERROR" 
enabled_by_default="true" />
+    <inspection_tool class="StringEqualsCharSequence" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="StringTokenizerDelimiter" enabled="true" 
level="ERROR" enabled_by_default="true" />
-    <inspection_tool class="SubtractionInCompareTo" enabled="true" 
level="WARNING" enabled_by_default="true" />
+    <inspection_tool class="SubtractionInCompareTo" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="SuspiciousArrayCast" enabled="true" 
level="WARNING" enabled_by_default="true" />
+    <inspection_tool class="SuspiciousArrayMethodCall" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="SuspiciousIndentAfterControlStatement" 
enabled="true" level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="SuspiciousListRemoveInLoop" enabled="true" 
level="ERROR" enabled_by_default="true" />
     <inspection_tool class="SuspiciousMethodCalls" enabled="true" 
level="ERROR" enabled_by_default="true">
       <option name="REPORT_CONVERTIBLE_METHOD_CALLS" value="true" />
     </inspection_tool>
@@ -226,6 +261,11 @@
       <option name="ignoreInModuleStatements" value="true" />
     </inspection_tool>
     <inspection_tool class="UnnecessaryInterfaceModifier" enabled="true" 
level="ERROR" enabled_by_default="true" />
+    <inspection_tool class="UnusedAssignment" enabled="true" level="ERROR" 
enabled_by_default="true">
+      <option name="REPORT_PREFIX_EXPRESSIONS" value="true" />
+      <option name="REPORT_POSTFIX_EXPRESSIONS" value="true" />
+      <option name="REPORT_REDUNDANT_INITIALIZER" value="true" />
+    </inspection_tool>
     <inspection_tool class="UnusedCatchParameter" enabled="true" 
level="WARNING" enabled_by_default="true">
       <option name="m_ignoreCatchBlocksWithComments" value="false" />
       <option name="m_ignoreTestCases" value="false" />
diff --git 
a/benchmarks/src/main/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java
 
b/benchmarks/src/main/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java
index 63533a995a9..d719fe25ff8 100644
--- 
a/benchmarks/src/main/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java
+++ 
b/benchmarks/src/main/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java
@@ -95,7 +95,6 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ExecutorService;
@@ -167,15 +166,13 @@ public int columnCacheSizeBytes()
     INDEX_MERGER_V9 = new IndexMergerV9(JSON_MAPPER, INDEX_IO, 
OffHeapMemorySegmentWriteOutMediumFactory.instance());
   }
 
-  private static final Map<String, Map<String, Object>> SCHEMA_QUERY_MAP = new 
LinkedHashMap<>();
-
   private void setupQueries()
   {
     // queries for the basic schema
-    Map<String, Object> basicQueries = new LinkedHashMap<>();
     BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic");
 
-    QuerySegmentSpec intervalSpec = new 
MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
+    QuerySegmentSpec intervalSpec =
+        new 
MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
 
     long startMillis = basicSchema.getDataInterval().getStartMillis();
     long endMillis = basicSchema.getDataInterval().getEndMillis();
@@ -204,9 +201,7 @@ private void setupQueries()
       );
       queryAggs.add(
           new FilteredAggregatorFactory(
-              new LongSumAggregatorFactory(
-                  "_cmp_sumLongSequential", "sumLongSequential"
-              ),
+              new LongSumAggregatorFactory("_cmp_sumLongSequential", 
"sumLongSequential"),
               new IntervalDimFilter(
                   ColumnHolder.TIME_COLUMN_NAME,
                   Collections.singletonList(previous),
@@ -235,8 +230,6 @@ private void setupQueries()
           new TopNQueryQueryToolChest(new TopNQueryConfig(), 
QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()),
           QueryBenchmarkUtil.NOOP_QUERYWATCHER
       );
-
-      basicQueries.put("topNTimeCompare", queryBuilderA);
     }
     { // basic.timeseriesTimeCompare
       List<AggregatorFactory> queryAggs = new ArrayList<>();
@@ -265,24 +258,21 @@ private void setupQueries()
           )
       );
 
-      Druids.TimeseriesQueryBuilder timeseriesQueryBuilder = 
Druids.newTimeseriesQueryBuilder()
-                                                                   
.dataSource("blah")
-                                                                   
.granularity(Granularities.ALL)
-                                                                   
.intervals(intervalSpec)
-                                                                   
.aggregators(queryAggs)
-                                                                   
.descending(false);
+      Druids.TimeseriesQueryBuilder timeseriesQueryBuilder = Druids
+          .newTimeseriesQueryBuilder()
+          .dataSource("blah")
+          .granularity(Granularities.ALL)
+          .intervals(intervalSpec)
+          .aggregators(queryAggs)
+          .descending(false);
 
       timeseriesQuery = timeseriesQueryBuilder.build();
       timeseriesFactory = new TimeseriesQueryRunnerFactory(
-          new TimeseriesQueryQueryToolChest(
-              QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()
-          ),
+          new 
TimeseriesQueryQueryToolChest(QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()),
           new TimeseriesQueryEngine(),
           QueryBenchmarkUtil.NOOP_QUERYWATCHER
       );
     }
-
-    SCHEMA_QUERY_MAP.put("basic", basicQueries);
   }
 
 
diff --git 
a/core/src/main/java/org/apache/druid/java/util/common/guava/Comparators.java 
b/core/src/main/java/org/apache/druid/java/util/common/guava/Comparators.java
index 500d5046bf1..fec9879826a 100644
--- 
a/core/src/main/java/org/apache/druid/java/util/common/guava/Comparators.java
+++ 
b/core/src/main/java/org/apache/druid/java/util/common/guava/Comparators.java
@@ -59,26 +59,6 @@ public int compare(@Nullable Object left, @Nullable Object 
right)
     return NATURAL_NULLS_FIRST;
   }
 
-  /**
-   * This is a "reverse" comparator.  Positive becomes negative, negative 
becomes positive and 0 (equal) stays the same.
-   * This was poorly named as "inverse" as it's not really inverting a 
true/false relationship
-   *
-   * @param baseComp
-   * @param <T>
-   * @return
-   */
-  public static <T> Comparator<T> inverse(final Comparator<T> baseComp)
-  {
-    return new Comparator<T>()
-    {
-      @Override
-      public int compare(T t, T t1)
-      {
-        return baseComp.compare(t1, t);
-      }
-    };
-  }
-
   /**
    * Use Guava Ordering.natural() instead
    *
diff --git 
a/core/src/main/java/org/apache/druid/java/util/emitter/core/Emitters.java 
b/core/src/main/java/org/apache/druid/java/util/emitter/core/Emitters.java
index b7b4423f186..00d424b88fc 100644
--- a/core/src/main/java/org/apache/druid/java/util/emitter/core/Emitters.java
+++ b/core/src/main/java/org/apache/druid/java/util/emitter/core/Emitters.java
@@ -50,7 +50,7 @@ public static Emitter create(
       Lifecycle lifecycle
   )
   {
-    Map<String, Object> jsonified = new HashMap<>();
+    Map<String, Object> jsonified;
     if (props.getProperty(LOG_EMITTER_PROP) != null) {
       jsonified = makeLoggingMap(props);
       jsonified.put("type", "logging");
diff --git 
a/core/src/main/java/org/apache/druid/java/util/http/client/pool/ChannelResourceFactory.java
 
b/core/src/main/java/org/apache/druid/java/util/http/client/pool/ChannelResourceFactory.java
index b52b55d682e..9c7f4c7d0ae 100644
--- 
a/core/src/main/java/org/apache/druid/java/util/http/client/pool/ChannelResourceFactory.java
+++ 
b/core/src/main/java/org/apache/druid/java/util/http/client/pool/ChannelResourceFactory.java
@@ -77,7 +77,7 @@ public ChannelResourceFactory(
   public ChannelFuture generate(final String hostname)
   {
     log.debug("Generating: %s", hostname);
-    URL url = null;
+    URL url;
     try {
       url = new URL(hostname);
     }
diff --git a/core/src/main/java/org/apache/druid/math/expr/ExprEval.java 
b/core/src/main/java/org/apache/druid/math/expr/ExprEval.java
index 20982e6b0f9..60009122de1 100644
--- a/core/src/main/java/org/apache/druid/math/expr/ExprEval.java
+++ b/core/src/main/java/org/apache/druid/math/expr/ExprEval.java
@@ -52,6 +52,9 @@ public static ExprEval of(double doubleValue)
 
   public static ExprEval of(@Nullable String stringValue)
   {
+    if (stringValue == null) {
+      return StringExprEval.OF_NULL;
+    }
     return new StringExprEval(stringValue);
   }
 
@@ -180,7 +183,11 @@ public final ExprEval castTo(ExprType castTo)
         case DOUBLE:
           return this;
         case LONG:
-          return ExprEval.of(value == null ? null : asLong());
+          if (value == null) {
+            return ExprEval.of(null);
+          } else {
+            return ExprEval.of(asLong());
+          }
         case STRING:
           return ExprEval.of(asString());
       }
@@ -218,7 +225,11 @@ public final ExprEval castTo(ExprType castTo)
     {
       switch (castTo) {
         case DOUBLE:
-          return ExprEval.of(value == null ? null : asDouble());
+          if (value == null) {
+            return ExprEval.of(null);
+          } else {
+            return ExprEval.of(asDouble());
+          }
         case LONG:
           return this;
         case STRING:
@@ -236,6 +247,8 @@ public Expr toExpr()
 
   private static class StringExprEval extends ExprEval<String>
   {
+    private static final StringExprEval OF_NULL = new StringExprEval(null);
+
     private Number numericVal;
 
     private StringExprEval(@Nullable String value)
diff --git 
a/core/src/test/java/org/apache/druid/collections/OrderedMergeIteratorTest.java 
b/core/src/test/java/org/apache/druid/collections/OrderedMergeIteratorTest.java
index c636e9d7d96..e58b83b4e1a 100644
--- 
a/core/src/test/java/org/apache/druid/collections/OrderedMergeIteratorTest.java
+++ 
b/core/src/test/java/org/apache/druid/collections/OrderedMergeIteratorTest.java
@@ -26,6 +26,7 @@
 
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.Iterator;
 import java.util.NoSuchElementException;
 
@@ -170,10 +171,9 @@ public Integer next()
   @Test(expected = NoSuchElementException.class)
   public void testNoElementInNext()
   {
-    final ArrayList<Iterator<Integer>> iterators = new ArrayList<>();
-    OrderedMergeIterator<Integer> iter = new OrderedMergeIterator<Integer>(
+    OrderedMergeIterator<Integer> iter = new OrderedMergeIterator<>(
         Ordering.natural(),
-        iterators.iterator()
+        Collections.emptyIterator()
     );
     iter.next();
   }
@@ -181,10 +181,9 @@ public void testNoElementInNext()
   @Test(expected = UnsupportedOperationException.class)
   public void testRemove()
   {
-    final ArrayList<Iterator<Integer>> iterators = new ArrayList<>();
-    OrderedMergeIterator<Integer> iter = new OrderedMergeIterator<Integer>(
+    OrderedMergeIterator<Integer> iter = new OrderedMergeIterator<>(
         Ordering.natural(),
-        iterators.iterator()
+        Collections.emptyIterator()
     );
     iter.remove();
   }
diff --git 
a/core/src/test/java/org/apache/druid/java/util/common/guava/ComparatorsTest.java
 
b/core/src/test/java/org/apache/druid/java/util/common/guava/ComparatorsTest.java
index 61ab1e4577d..816e2d5a32a 100644
--- 
a/core/src/test/java/org/apache/druid/java/util/common/guava/ComparatorsTest.java
+++ 
b/core/src/test/java/org/apache/druid/java/util/common/guava/ComparatorsTest.java
@@ -31,34 +31,6 @@
  */
 public class ComparatorsTest
 {
-  @Test
-  public void testInverse()
-  {
-    Comparator<Integer> normal = Comparators.comparable();
-    Comparator<Integer> inverted = Comparators.inverse(normal);
-
-    Assert.assertEquals(-1, normal.compare(0, 1));
-    Assert.assertEquals(1, normal.compare(1, 0));
-    Assert.assertEquals(0, normal.compare(1, 1));
-    Assert.assertEquals(1, inverted.compare(0, 1));
-    Assert.assertEquals(-1, inverted.compare(1, 0));
-    Assert.assertEquals(0, inverted.compare(1, 1));
-  }
-
-  @Test
-  public void testInverseOverflow()
-  {
-    Comparator<Integer> invertedSimpleIntegerComparator = 
Comparators.inverse(new Comparator<Integer>()
-    {
-      @Override
-      public int compare(Integer o1, Integer o2)
-      {
-        return o1 - o2;
-      }
-    });
-    Assert.assertTrue(invertedSimpleIntegerComparator.compare(0, 
Integer.MIN_VALUE) < 0);
-  }
-
   @Test
   public void testIntervalsByStartThenEnd()
   {
diff --git 
a/core/src/test/java/org/apache/druid/java/util/common/guava/ConcatSequenceTest.java
 
b/core/src/test/java/org/apache/druid/java/util/common/guava/ConcatSequenceTest.java
index 481d1d11c32..cf5cabe99a8 100644
--- 
a/core/src/test/java/org/apache/druid/java/util/common/guava/ConcatSequenceTest.java
+++ 
b/core/src/test/java/org/apache/druid/java/util/common/guava/ConcatSequenceTest.java
@@ -138,7 +138,7 @@ public void cleanup(Iterator<Sequence<Integer>> 
iterFromMake)
               public Integer accumulate(Integer accumulated, Integer in)
               {
                 Assert.assertEquals(accumulated, in);
-                return ++accumulated;
+                return accumulated + 1;
               }
             }
         ).intValue()
@@ -154,7 +154,7 @@ public Integer accumulate(Integer accumulated, Integer in)
           public Integer accumulate(Integer accumulated, Integer in)
           {
             Assert.assertEquals(accumulated, in);
-            return ++accumulated;
+            return accumulated + 1;
           }
         }
     );
diff --git 
a/core/src/test/java/org/apache/druid/java/util/common/guava/SequenceTestHelper.java
 
b/core/src/test/java/org/apache/druid/java/util/common/guava/SequenceTestHelper.java
index e4450210ae2..52082e3c339 100644
--- 
a/core/src/test/java/org/apache/druid/java/util/common/guava/SequenceTestHelper.java
+++ 
b/core/src/test/java/org/apache/druid/java/util/common/guava/SequenceTestHelper.java
@@ -134,7 +134,7 @@ public static void testClosed(AtomicInteger closedCounter, 
Sequence<Integer> seq
             @Override
             public Integer accumulate(Integer accumulated, Integer in)
             {
-              return ++accumulated;
+              return accumulated + 1;
             }
           }
       );
@@ -156,7 +156,7 @@ public Integer accumulate(Integer accumulated, Integer in)
             @Override
             public Integer accumulate(Integer accumulated, Integer in)
             {
-              return ++accumulated;
+              return accumulated + 1;
             }
           }
       );
diff --git 
a/extendedset/src/main/java/org/apache/druid/extendedset/intset/BitIterator.java
 
b/extendedset/src/main/java/org/apache/druid/extendedset/intset/BitIterator.java
index fb96e433031..e6d3c6ea6de 100644
--- 
a/extendedset/src/main/java/org/apache/druid/extendedset/intset/BitIterator.java
+++ 
b/extendedset/src/main/java/org/apache/druid/extendedset/intset/BitIterator.java
@@ -29,7 +29,7 @@
   private boolean literalAndZeroFill;
   private int nextIndex = 0;
   private int nextOffset = 0;
-  private int next = -1;
+  private int next;
 
   BitIterator(ImmutableConciseSet immutableConciseSet)
   {
diff --git 
a/extendedset/src/main/java/org/apache/druid/extendedset/intset/ConciseSet.java 
b/extendedset/src/main/java/org/apache/druid/extendedset/intset/ConciseSet.java
index 782f5fcff9b..c944195de34 100755
--- 
a/extendedset/src/main/java/org/apache/druid/extendedset/intset/ConciseSet.java
+++ 
b/extendedset/src/main/java/org/apache/druid/extendedset/intset/ConciseSet.java
@@ -1272,9 +1272,9 @@ public int compareTo(IntSet o)
     final ConciseSet other = convert(o);
 
     // the word at the end must be the same
-    int res = this.last - other.last;
+    int res = Integer.compare(this.last, other.last);
     if (res != 0) {
-      return res < 0 ? -1 : 1;
+      return res;
     }
 
     // scan words from MSB to LSB
@@ -1295,9 +1295,9 @@ public int compareTo(IntSet o)
               return -1;
             }
             // compare two sequences of zeros
-            res = getSequenceCount(otherWord) - getSequenceCount(thisWord);
+            res = Integer.compare(getSequenceCount(otherWord), 
getSequenceCount(thisWord));
             if (res != 0) {
-              return res < 0 ? -1 : 1;
+              return res;
             }
           } else {
             if (isZeroSequence(otherWord)) {
@@ -1305,9 +1305,9 @@ public int compareTo(IntSet o)
               return 1;
             }
             // compare two sequences of ones
-            res = getSequenceCount(thisWord) - getSequenceCount(otherWord);
+            res = Integer.compare(getSequenceCount(thisWord), 
getSequenceCount(otherWord));
             if (res != 0) {
-              return res < 0 ? -1 : 1;
+              return res;
             }
           }
           // if the sequences are the same (both zeros or both ones)
@@ -1363,9 +1363,10 @@ public int compareTo(IntSet o)
           otherWord--;
         }
       } else {
-        res = thisWord - otherWord; // equals getLiteralBits(thisWord) - 
getLiteralBits(otherWord)
+        // equals compare(getLiteralBits(thisWord), getLiteralBits(otherWord))
+        res = Integer.compare(thisWord, otherWord);
         if (res != 0) {
-          return res < 0 ? -1 : 1;
+          return res;
         }
         if (--thisIndex >= 0) {
           thisWord = this.words[thisIndex];
diff --git 
a/extendedset/src/main/java/org/apache/druid/extendedset/intset/ImmutableConciseSet.java
 
b/extendedset/src/main/java/org/apache/druid/extendedset/intset/ImmutableConciseSet.java
index 06f53c8774e..43591e2b59d 100755
--- 
a/extendedset/src/main/java/org/apache/druid/extendedset/intset/ImmutableConciseSet.java
+++ 
b/extendedset/src/main/java/org/apache/druid/extendedset/intset/ImmutableConciseSet.java
@@ -216,7 +216,7 @@ public static ImmutableConciseSet 
complement(ImmutableConciseSet set, int length
     int endIndex = length - 1;
 
     int wordsWalked = 0;
-    int last = 0;
+    int last;
 
     WordIterator iter = set.newWordIterator();
 
diff --git 
a/extensions-contrib/azure-extensions/src/main/java/org/apache/druid/firehose/azure/AzureBlob.java
 
b/extensions-contrib/azure-extensions/src/main/java/org/apache/druid/firehose/azure/AzureBlob.java
index 32fc16ff516..eb30a16d21d 100644
--- 
a/extensions-contrib/azure-extensions/src/main/java/org/apache/druid/firehose/azure/AzureBlob.java
+++ 
b/extensions-contrib/azure-extensions/src/main/java/org/apache/druid/firehose/azure/AzureBlob.java
@@ -30,11 +30,11 @@
 {
   @JsonProperty
   @NotNull
-  private String container = null;
+  private String container;
 
   @JsonProperty
   @NotNull
-  private String path = null;
+  private String path;
 
   @JsonCreator
   public AzureBlob(@JsonProperty("container") String container, 
@JsonProperty("path") String path)
diff --git 
a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPuller.java
 
b/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPuller.java
index dbb5771db8f..a94b6bfd9fd 100644
--- 
a/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPuller.java
+++ 
b/extensions-contrib/cassandra-storage/src/main/java/org/apache/druid/storage/cassandra/CassandraDataSegmentPuller.java
@@ -62,9 +62,8 @@ public CassandraDataSegmentPuller(CassandraDataSegmentConfig 
config)
     final File tmpFile = new File(outDir, "index.zip");
     log.info("Pulling to temporary local cache [%s]", 
tmpFile.getAbsolutePath());
 
-    final FileUtils.FileCopyResult localResult;
     try {
-      localResult = RetryUtils.retry(
+      RetryUtils.retry(
           () -> {
             try (OutputStream os = new FileOutputStream(tmpFile)) {
               ChunkedStorage
diff --git 
a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/CloudFilesBlob.java
 
b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/CloudFilesBlob.java
index 45fbdadd456..edc0bbb65c0 100644
--- 
a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/CloudFilesBlob.java
+++ 
b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/firehose/cloudfiles/CloudFilesBlob.java
@@ -29,15 +29,15 @@
 {
   @JsonProperty
   @NotNull
-  private String container = null;
+  private String container;
 
   @JsonProperty
   @NotNull
-  private String path = null;
+  private String path;
 
   @JsonProperty
   @NotNull
-  private String region = null;
+  private String region;
 
   @JsonCreator
   public CloudFilesBlob(
diff --git 
a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java
 
b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java
index 9628b411c44..c54342fd32d 100644
--- 
a/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java
+++ 
b/extensions-contrib/cloudfiles-extensions/src/main/java/org/apache/druid/storage/cloudfiles/CloudFilesStorageDruidModule.java
@@ -96,7 +96,7 @@ public CloudFilesApi getCloudFilesApi(final 
CloudFilesAccountConfig config)
   {
     log.info("Building Cloud Files Api...");
 
-    Iterable<com.google.inject.Module> modules = null;
+    Iterable<com.google.inject.Module> modules;
     if (config.getUseServiceNet()) {
       log.info("Configuring Cloud Files Api to use the internal service 
network...");
       modules = ImmutableSet.of(new SLF4JLoggingModule(), new 
InternalUrlModule());
diff --git 
a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/org/apache/druid/firehose/kafka/KafkaSimpleConsumer.java
 
b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/org/apache/druid/firehose/kafka/KafkaSimpleConsumer.java
index ec204f9df4d..038fb2db90f 100644
--- 
a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/org/apache/druid/firehose/kafka/KafkaSimpleConsumer.java
+++ 
b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/org/apache/druid/firehose/kafka/KafkaSimpleConsumer.java
@@ -197,7 +197,7 @@ private long getOffset(boolean earliest) throws 
InterruptedException
         )
     );
     OffsetRequest request = new OffsetRequest(requestInfo, 
kafka.api.OffsetRequest.CurrentVersion(), clientId);
-    OffsetResponse response = null;
+    OffsetResponse response;
     try {
       response = consumer.getOffsetsBefore(request);
     }
@@ -219,7 +219,7 @@ private long getOffset(boolean earliest) throws 
InterruptedException
 
   public Iterable<BytesMessageWithOffset> fetch(long offset, int timeoutMs) 
throws InterruptedException
   {
-    FetchResponse response = null;
+    FetchResponse response;
     Broker previousLeader = leaderBroker;
     while (true) {
       ensureConsumer(previousLeader);
diff --git 
a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisor.java
 
b/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisor.java
index 41e58e9554a..2aa05e39ee4 100644
--- 
a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisor.java
+++ 
b/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisor.java
@@ -327,7 +327,8 @@ void checkSegmentsAndSubmitTasks()
     // use max created_date of base segments as the version of derivative 
segments
     Map<Interval, String> maxCreatedDate = baseSegmentsSnapshot.lhs;
     Map<Interval, String> derivativeVersion = derivativeSegmentsSnapshot.lhs;
-    SortedMap<Interval, String> sortedToBuildInterval = new 
TreeMap<>(Comparators.inverse(Comparators.intervalsByStartThenEnd()));
+    SortedMap<Interval, String> sortedToBuildInterval =
+        new TreeMap<>(Comparators.intervalsByStartThenEnd().reversed());
     // find the intervals to drop and to build
     MapDifference<Interval, String> difference = 
Maps.difference(maxCreatedDate, derivativeVersion);
     Map<Interval, String> toBuildInterval = new 
HashMap<>(difference.entriesOnlyOnLeft());
diff --git 
a/extensions-contrib/orc-extensions/src/test/java/org/apache/druid/data/input/orc/OrcIndexGeneratorJobTest.java
 
b/extensions-contrib/orc-extensions/src/test/java/org/apache/druid/data/input/orc/OrcIndexGeneratorJobTest.java
index 9f07a235e87..6aa4715920a 100644
--- 
a/extensions-contrib/orc-extensions/src/test/java/org/apache/druid/data/input/orc/OrcIndexGeneratorJobTest.java
+++ 
b/extensions-contrib/orc-extensions/src/test/java/org/apache/druid/data/input/orc/OrcIndexGeneratorJobTest.java
@@ -54,7 +54,7 @@
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.orc.CompressionKind;
 import org.apache.orc.OrcFile;
 import org.apache.orc.TypeDescription;
@@ -225,7 +225,7 @@ public void setUp() throws Exception
                 false,
                 false,
                 false,
-                ImmutableMap.of(JobContext.NUM_REDUCES, "0"), //verifies that 
set num reducers is ignored
+                ImmutableMap.of(MRJobConfig.NUM_REDUCES, "0"), //verifies that 
set num reducers is ignored
                 false,
                 true,
                 null,
diff --git 
a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchBuildAggregator.java
 
b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchBuildAggregator.java
index 6d121b5a159..bd46fc5166f 100644
--- 
a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchBuildAggregator.java
+++ 
b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchBuildAggregator.java
@@ -19,6 +19,7 @@
 
 package org.apache.druid.query.aggregation.datasketches.quantiles;
 
+import com.yahoo.sketches.quantiles.DoublesSketch;
 import com.yahoo.sketches.quantiles.UpdateDoublesSketch;
 import org.apache.druid.query.aggregation.Aggregator;
 import org.apache.druid.segment.ColumnValueSelector;
@@ -35,7 +36,7 @@ public DoublesSketchBuildAggregator(final 
ColumnValueSelector<Double> valueSelec
   {
     this.valueSelector = valueSelector;
     this.size = size;
-    sketch = UpdateDoublesSketch.builder().setK(size).build();
+    sketch = DoublesSketch.builder().setK(size).build();
   }
 
   @Override
diff --git 
a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchBuildBufferAggregator.java
 
b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchBuildBufferAggregator.java
index a1e52162800..ead9a6aa280 100644
--- 
a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchBuildBufferAggregator.java
+++ 
b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchBuildBufferAggregator.java
@@ -20,6 +20,7 @@
 package org.apache.druid.query.aggregation.datasketches.quantiles;
 
 import com.yahoo.memory.WritableMemory;
+import com.yahoo.sketches.quantiles.DoublesSketch;
 import com.yahoo.sketches.quantiles.UpdateDoublesSketch;
 import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
 import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
@@ -54,7 +55,7 @@ public synchronized void init(final ByteBuffer buffer, final 
int position)
   {
     final WritableMemory mem = getMemory(buffer);
     final WritableMemory region = mem.writableRegion(position, 
maxIntermediateSize);
-    final UpdateDoublesSketch sketch = 
UpdateDoublesSketch.builder().setK(size).build(region);
+    final UpdateDoublesSketch sketch = 
DoublesSketch.builder().setK(size).build(region);
     putSketch(buffer, position, sketch);
   }
 
diff --git 
a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchHolder.java
 
b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchHolder.java
index 1b5b2bb3a00..1e70a7197d7 100644
--- 
a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchHolder.java
+++ 
b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchHolder.java
@@ -90,6 +90,7 @@ public int compare(Sketch o1, Sketch o2)
 
   private static final Comparator<Memory> MEMORY_COMPARATOR = new 
Comparator<Memory>()
   {
+    @SuppressWarnings("SubtractionInCompareTo")
     @Override
     public int compare(Memory o1, Memory o2)
     {
diff --git 
a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchToQuantilesSketchPostAggregator.java
 
b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchToQuantilesSketchPostAggregator.java
index 1f4575bc287..220c1cd6ac3 100644
--- 
a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchToQuantilesSketchPostAggregator.java
+++ 
b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchToQuantilesSketchPostAggregator.java
@@ -74,7 +74,7 @@ public ArrayOfDoublesSketchToQuantilesSketchPostAggregator(
   public DoublesSketch compute(final Map<String, Object> combinedAggregators)
   {
     final ArrayOfDoublesSketch sketch = (ArrayOfDoublesSketch) 
getField().compute(combinedAggregators);
-    final UpdateDoublesSketch qs = 
UpdateDoublesSketch.builder().setK(k).build();
+    final UpdateDoublesSketch qs = DoublesSketch.builder().setK(k).build();
     final ArrayOfDoublesSketchIterator it = sketch.iterator();
     while (it.next()) {
       qs.update(it.getValues()[column - 1]); // convert 1-based column number 
to zero-based index
diff --git 
a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/GenerateTestData.java
 
b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/GenerateTestData.java
index 48d8205f56a..ea9d32851b7 100644
--- 
a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/GenerateTestData.java
+++ 
b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/GenerateTestData.java
@@ -19,6 +19,7 @@
 
 package org.apache.druid.query.aggregation.datasketches.quantiles;
 
+import com.yahoo.sketches.quantiles.DoublesSketch;
 import com.yahoo.sketches.quantiles.UpdateDoublesSketch;
 import org.apache.commons.codec.binary.Base64;
 
@@ -44,7 +45,7 @@ public static void main(String[] args) throws Exception
     int sequenceNumber = 0;
     for (int i = 0; i < 20; i++) {
       int product = rand.nextInt(10);
-      UpdateDoublesSketch sketch = UpdateDoublesSketch.builder().build();
+      UpdateDoublesSketch sketch = DoublesSketch.builder().build();
       for (int j = 0; j < 20; j++) {
         double value = rand.nextDouble();
         buildData.write("2016010101");
diff --git 
a/extensions-core/druid-kerberos/src/main/java/org/apache/druid/security/kerberos/DruidKerberosAuthenticationHandler.java
 
b/extensions-core/druid-kerberos/src/main/java/org/apache/druid/security/kerberos/DruidKerberosAuthenticationHandler.java
index b6f91b5369d..12ebb4be4e1 100644
--- 
a/extensions-core/druid-kerberos/src/main/java/org/apache/druid/security/kerberos/DruidKerberosAuthenticationHandler.java
+++ 
b/extensions-core/druid-kerberos/src/main/java/org/apache/druid/security/kerberos/DruidKerberosAuthenticationHandler.java
@@ -143,15 +143,17 @@ public GSSManager run()
   public AuthenticationToken authenticate(HttpServletRequest request, final 
HttpServletResponse response)
       throws IOException, AuthenticationException
   {
-    AuthenticationToken token = null;
-    String authorization = 
request.getHeader(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.AUTHORIZATION);
+    AuthenticationToken token;
+    String authorization = request
+        
.getHeader(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.AUTHORIZATION);
 
-    if (authorization == null
-        || 
!authorization.startsWith(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE))
 {
+    if (authorization == null ||
+        
!authorization.startsWith(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE))
 {
       return null;
     } else {
-      authorization = 
authorization.substring(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE
-                                                  .length()).trim();
+      authorization = authorization
+          
.substring(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE.length())
+          .trim();
       final Base64 base64 = new Base64(0);
       final byte[] clientToken = base64.decode(authorization);
       final String serverName = request.getServerName();
diff --git 
a/extensions-core/druid-kerberos/src/main/java/org/apache/druid/security/kerberos/KerberosAuthenticator.java
 
b/extensions-core/druid-kerberos/src/main/java/org/apache/druid/security/kerberos/KerberosAuthenticator.java
index 394e82625ef..07b27a69930 100644
--- 
a/extensions-core/druid-kerberos/src/main/java/org/apache/druid/security/kerberos/KerberosAuthenticator.java
+++ 
b/extensions-core/druid-kerberos/src/main/java/org/apache/druid/security/kerberos/KerberosAuthenticator.java
@@ -248,7 +248,7 @@ public void doFilter(ServletRequest request, 
ServletResponse response, FilterCha
         if (isExcluded(path)) {
           filterChain.doFilter(request, response);
         } else {
-          String clientPrincipal = null;
+          String clientPrincipal;
           try {
             Cookie[] cookies = httpReq.getCookies();
             if (cookies == null) {
diff --git 
a/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusherTest.java
 
b/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusherTest.java
index 630aa2447fd..03f45080865 100644
--- 
a/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusherTest.java
+++ 
b/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusherTest.java
@@ -73,6 +73,15 @@
  */
 public class HdfsDataSegmentPusherTest
 {
+  static TestObjectMapper objectMapper;
+
+  static {
+    objectMapper = new TestObjectMapper();
+    InjectableValues.Std injectableValues = new InjectableValues.Std();
+    injectableValues.addValue(ObjectMapper.class, objectMapper);
+    injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, 
DataSegment.PruneLoadSpecHolder.DEFAULT);
+    objectMapper.setInjectableValues(injectableValues);
+  }
 
   @Rule
   public final TemporaryFolder tempFolder = new TemporaryFolder();
@@ -80,23 +89,15 @@
   @Rule
   public final ExpectedException expectedException = ExpectedException.none();
 
-  static TestObjectMapper objectMapper = new TestObjectMapper();
-
   private HdfsDataSegmentPusher hdfsDataSegmentPusher;
+
   @Before
-  public void setUp() throws IOException
+  public void setUp()
   {
     HdfsDataSegmentPusherConfig hdfsDataSegmentPusherConf = new 
HdfsDataSegmentPusherConfig();
     hdfsDataSegmentPusherConf.setStorageDirectory("path/to/");
     hdfsDataSegmentPusher = new 
HdfsDataSegmentPusher(hdfsDataSegmentPusherConf, new Configuration(true), 
objectMapper);
   }
-  static {
-    objectMapper = new TestObjectMapper();
-    InjectableValues.Std injectableValues = new InjectableValues.Std();
-    injectableValues.addValue(ObjectMapper.class, objectMapper);
-    injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, 
DataSegment.PruneLoadSpecHolder.DEFAULT);
-    objectMapper.setInjectableValues(injectableValues);
-  }
 
   @Test
   public void testPushWithScheme() throws Exception
diff --git 
a/extensions-core/histogram/src/main/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogram.java
 
b/extensions-core/histogram/src/main/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogram.java
index 9c5c8e4e156..0135c8de18d 100644
--- 
a/extensions-core/histogram/src/main/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogram.java
+++ 
b/extensions-core/histogram/src/main/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogram.java
@@ -363,7 +363,6 @@ public void offer(float value)
       mergeValue = true;
     }
     if (deltaLeft < minDelta) {
-      minDelta = deltaLeft;
       minPos = insertAt - 1;
       mergeValue = true;
     }
@@ -1563,7 +1562,7 @@ public double sum(final float b)
       int i = 0;
       int sum = 0;
       int k = 1;
-      long count = 0;
+      long count;
       while (k <= this.binCount()) {
         count = bins[k - 1];
         if (sum + count > s) {
@@ -1583,7 +1582,7 @@ public double sum(final float b)
         final double c = -2 * d;
         final long a = bins[i] - bins[i - 1];
         final long b = 2 * bins[i - 1];
-        double z = 0;
+        double z;
         if (a == 0) {
           z = -c / b;
         } else {
diff --git 
a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramTest.java
 
b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramTest.java
index ddb71ed8327..c63ead315bb 100644
--- 
a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramTest.java
+++ 
b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramTest.java
@@ -22,6 +22,7 @@
 import com.google.common.collect.Iterators;
 import org.apache.druid.java.util.common.StringUtils;
 import org.junit.Assert;
+import org.junit.Ignore;
 import org.junit.Test;
 
 import java.nio.ByteBuffer;
@@ -221,6 +222,7 @@ public void testFoldNothing2()
   }
 
     //@Test
+  @Ignore
   @SuppressWarnings("unused") //TODO rewrite using JMH and move to the 
benchmarks module
   public void testFoldSpeed()
   {
diff --git 
a/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisor.java
 
b/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisor.java
index 8a199d4a3de..6609b2412c0 100644
--- 
a/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisor.java
+++ 
b/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisor.java
@@ -34,7 +34,6 @@
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
 import com.google.common.primitives.Longs;
 import com.google.common.util.concurrent.Futures;
 import com.google.common.util.concurrent.ListenableFuture;
@@ -43,6 +42,7 @@
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.druid.indexer.TaskLocation;
 import org.apache.druid.indexer.TaskStatus;
+import org.apache.druid.indexing.common.IndexTaskClient;
 import org.apache.druid.indexing.common.TaskInfoProvider;
 import org.apache.druid.indexing.common.stats.RowIngestionMetersFactory;
 import org.apache.druid.indexing.common.task.RealtimeIndexTask;
@@ -129,7 +129,6 @@
   private static final long INITIAL_GET_OFFSET_DELAY_MILLIS = 15000;
   private static final long INITIAL_EMIT_LAG_METRIC_DELAY_MILLIS = 25000;
   private static final int MAX_INITIALIZATION_RETRIES = 20;
-  private static final CopyOnWriteArrayList EMPTY_LIST = 
Lists.newCopyOnWriteArrayList();
 
   public static final String IS_INCREMENTAL_HANDOFF_SUPPORTED = 
"IS_INCREMENTAL_HANDOFF_SUPPORTED";
 
@@ -337,7 +336,7 @@ public TaskLocation getTaskLocation(final String id)
     this.futureTimeoutInSeconds = Math.max(
         MINIMUM_FUTURE_TIMEOUT_IN_SECONDS,
         tuningConfig.getChatRetries() * 
(tuningConfig.getHttpTimeout().getStandardSeconds()
-                                         + 
KafkaIndexTaskClient.MAX_RETRY_WAIT_SECONDS)
+                                         + 
IndexTaskClient.MAX_RETRY_WAIT_SECONDS)
     );
 
     int chatThreads = (this.tuningConfig.getChatThreads() != null
@@ -468,6 +467,12 @@ public void stop(boolean stopGracefully)
     }
   }
 
+  private boolean someTaskGroupsPendingCompletion(Integer groupId)
+  {
+    CopyOnWriteArrayList<TaskGroup> taskGroups = 
pendingCompletionTaskGroups.get(groupId);
+    return taskGroups != null && taskGroups.size() > 0;
+  }
+
   @Override
   public SupervisorReport getStatus()
   {
@@ -1341,7 +1346,7 @@ private void verifyAndMergeCheckpoints(final TaskGroup 
taskGroup)
                     partitionOffset.getValue() :
                     latestOffsetsFromDb.getOrDefault(partitionOffset.getKey(), 
partitionOffset.getValue())
                 ) == 0) && earliestConsistentSequenceId.compareAndSet(-1, 
sequenceCheckpoint.getKey())) || (
-                pendingCompletionTaskGroups.getOrDefault(groupId, 
EMPTY_LIST).size() > 0
+                someTaskGroupsPendingCompletion(groupId)
                 && earliestConsistentSequenceId.compareAndSet(-1, 
taskCheckpoints.firstKey()))) {
           final SortedMap<Integer, Map<Integer, Long>> latestCheckpoints = new 
TreeMap<>(
               taskCheckpoints.tailMap(earliestConsistentSequenceId.get())
@@ -1378,7 +1383,7 @@ private void verifyAndMergeCheckpoints(final TaskGroup 
taskGroup)
     }
 
     if ((tasksToKill.size() > 0 && tasksToKill.size() == 
taskGroup.tasks.size()) ||
-        (taskGroup.tasks.size() == 0 && 
pendingCompletionTaskGroups.getOrDefault(groupId, EMPTY_LIST).size() == 0)) {
+        (taskGroup.tasks.size() == 0 && 
!someTaskGroupsPendingCompletion(groupId))) {
       // killing all tasks or no task left in the group ?
       // clear state about the taskgroup so that get latest offset information 
is fetched from metadata store
       log.warn("Clearing task group [%d] information as no valid tasks left 
the group", groupId);
diff --git 
a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/test/TestBroker.java
 
b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/test/TestBroker.java
index 4b8e02c2d73..a57d22b1446 100644
--- 
a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/test/TestBroker.java
+++ 
b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/test/TestBroker.java
@@ -29,7 +29,7 @@
 import org.apache.kafka.clients.producer.KafkaProducer;
 import org.apache.kafka.common.serialization.ByteArrayDeserializer;
 import org.apache.kafka.common.serialization.ByteArraySerializer;
-import org.apache.kafka.common.utils.SystemTime;
+import org.apache.kafka.common.utils.Time;
 import scala.Some;
 import scala.collection.immutable.List$;
 
@@ -81,7 +81,7 @@ public void start()
 
     final KafkaConfig config = new KafkaConfig(props);
 
-    server = new KafkaServer(config, SystemTime.SYSTEM, 
Some.apply(StringUtils.format("TestingBroker[%d]-", id)), 
List$.MODULE$.empty());
+    server = new KafkaServer(config, Time.SYSTEM, 
Some.apply(StringUtils.format("TestingBroker[%d]-", id)), 
List$.MODULE$.empty());
     server.startup();
   }
 
diff --git 
a/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/cache/CacheSchedulerTest.java
 
b/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/cache/CacheSchedulerTest.java
index d22ce75f09d..41a861621dc 100644
--- 
a/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/cache/CacheSchedulerTest.java
+++ 
b/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/cache/CacheSchedulerTest.java
@@ -311,7 +311,7 @@ public void testSimpleDelete() throws InterruptedException
     testDelete();
   }
 
-  public void testDelete() throws InterruptedException
+  private void testDelete() throws InterruptedException
   {
     final long period = 1_000L; // Give it some time between attempts to update
     final UriExtractionNamespace namespace = getUriExtractionNamespace(period);
diff --git 
a/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java
 
b/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java
index 139c8a017a8..fd89b7bac68 100644
--- 
a/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java
+++ 
b/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java
@@ -519,7 +519,7 @@ public void testSerde() throws IOException
   private void waitForUpdates(long timeout, long numUpdates) throws 
InterruptedException
   {
     long startTime = System.currentTimeMillis();
-    long pre = 0L;
+    long pre;
     updateLock.lockInterruptibly();
     try {
       pre = updates.get();
@@ -527,7 +527,7 @@ private void waitForUpdates(long timeout, long numUpdates) 
throws InterruptedExc
     finally {
       updateLock.unlock();
     }
-    long post = 0L;
+    long post;
     do {
       // Sleep to spare a few cpu cycles
       Thread.sleep(5);
diff --git 
a/extensions-core/protobuf-extensions/src/main/java/org/apache/druid/data/input/protobuf/ProtobufInputRowParser.java
 
b/extensions-core/protobuf-extensions/src/main/java/org/apache/druid/data/input/protobuf/ProtobufInputRowParser.java
index 25eedc4b174..e47adeeeaa8 100644
--- 
a/extensions-core/protobuf-extensions/src/main/java/org/apache/druid/data/input/protobuf/ProtobufInputRowParser.java
+++ 
b/extensions-core/protobuf-extensions/src/main/java/org/apache/druid/data/input/protobuf/ProtobufInputRowParser.java
@@ -111,7 +111,7 @@ private Descriptor getDescriptor(String descriptorFilePath)
 
     fin = 
this.getClass().getClassLoader().getResourceAsStream(descriptorFilePath);
     if (fin == null) {
-      URL url = null;
+      URL url;
       try {
         url = new URL(descriptorFilePath);
       }
@@ -126,7 +126,7 @@ private Descriptor getDescriptor(String descriptorFilePath)
       }
     }
 
-    DynamicSchema dynamicSchema = null;
+    DynamicSchema dynamicSchema;
     try {
       dynamicSchema = DynamicSchema.parseFrom(fin);
     }
diff --git 
a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceGroupByQueryTest.java
 
b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceGroupByQueryTest.java
index 4f327e75793..90f61128da6 100644
--- 
a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceGroupByQueryTest.java
+++ 
b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceGroupByQueryTest.java
@@ -125,12 +125,12 @@ public void testGroupBy()
         .setDataSource(QueryRunnerTestHelper.dataSource)
         .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
         .setDimensions(new DefaultDimensionSpec("quality", "alias"))
-        .setAggregatorSpecs(VarianceTestHelper.rowsCount,
-                            VarianceTestHelper.indexVarianceAggr,
-                            new LongSumAggregatorFactory("idx", "index"))
-        .setPostAggregatorSpecs(
-            Collections.singletonList(VarianceTestHelper.stddevOfIndexPostAggr)
+        .setAggregatorSpecs(
+            QueryRunnerTestHelper.rowsCount,
+            VarianceTestHelper.indexVarianceAggr,
+            new LongSumAggregatorFactory("idx", "index")
         )
+        
.setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.stddevOfIndexPostAggr))
         .setGranularity(QueryRunnerTestHelper.dayGran)
         .build();
 
@@ -178,12 +178,14 @@ public void testPostAggHavingSpec()
 
     GroupByQuery query = GroupByQuery
         .builder()
-        .setDataSource(VarianceTestHelper.dataSource)
+        .setDataSource(QueryRunnerTestHelper.dataSource)
         .setInterval("2011-04-02/2011-04-04")
         .setDimensions(new DefaultDimensionSpec("quality", "alias"))
-        .setAggregatorSpecs(VarianceTestHelper.rowsCount,
-                            VarianceTestHelper.indexLongSum,
-                            VarianceTestHelper.indexVarianceAggr)
+        .setAggregatorSpecs(
+            QueryRunnerTestHelper.rowsCount,
+            QueryRunnerTestHelper.indexLongSum,
+            VarianceTestHelper.indexVarianceAggr
+        )
         
.setPostAggregatorSpecs(ImmutableList.of(VarianceTestHelper.stddevOfIndexPostAggr))
         .setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
         .setHavingSpec(
diff --git 
a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java
 
b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java
index c1a98d0984c..c5dcecea9bf 100644
--- 
a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java
+++ 
b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java
@@ -23,6 +23,7 @@
 import org.apache.druid.query.Druids;
 import org.apache.druid.query.QueryPlus;
 import org.apache.druid.query.QueryRunner;
+import org.apache.druid.query.QueryRunnerTestHelper;
 import org.apache.druid.query.Result;
 import org.apache.druid.query.aggregation.AggregatorFactory;
 import org.apache.druid.query.timeseries.TimeseriesQuery;
@@ -59,13 +60,13 @@ public VarianceTimeseriesQueryTest(QueryRunner runner, 
boolean descending, List<
   public void testTimeseriesWithNullFilterOnNonExistentDimension()
   {
     TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
-                                  .dataSource(VarianceTestHelper.dataSource)
-                                  .granularity(VarianceTestHelper.dayGran)
+                                  .dataSource(QueryRunnerTestHelper.dataSource)
+                                  .granularity(QueryRunnerTestHelper.dayGran)
                                   .filters("bobby", null)
-                                  .intervals(VarianceTestHelper.firstToThird)
+                                  
.intervals(QueryRunnerTestHelper.firstToThird)
                                   
.aggregators(VarianceTestHelper.commonPlusVarAggregators)
                                   .postAggregators(
-                                      VarianceTestHelper.addRowsIndexConstant,
+                                      
QueryRunnerTestHelper.addRowsIndexConstant,
                                       VarianceTestHelper.stddevOfIndexPostAggr
                                   )
                                   .descending(descending)
@@ -75,11 +76,11 @@ public void 
testTimeseriesWithNullFilterOnNonExistentDimension()
         new Result<>(
             DateTimes.of("2011-04-01"),
             new TimeseriesResultValue(
-                VarianceTestHelper.of(
+                QueryRunnerTestHelper.of(
                     "rows", 13L,
                     "index", 6626.151596069336,
                     "addRowsIndexConstant", 6640.151596069336,
-                    "uniques", VarianceTestHelper.UNIQUES_9,
+                    "uniques", QueryRunnerTestHelper.UNIQUES_9,
                     "index_var", descending ? 368885.6897238851 : 
368885.689155086,
                     "index_stddev", descending ? 607.3596049490657 : 
607.35960448081
                 )
@@ -88,11 +89,11 @@ public void 
testTimeseriesWithNullFilterOnNonExistentDimension()
         new Result<>(
             DateTimes.of("2011-04-02"),
             new TimeseriesResultValue(
-                VarianceTestHelper.of(
+                QueryRunnerTestHelper.of(
                     "rows", 13L,
                     "index", 5833.2095947265625,
                     "addRowsIndexConstant", 5847.2095947265625,
-                    "uniques", VarianceTestHelper.UNIQUES_9,
+                    "uniques", QueryRunnerTestHelper.UNIQUES_9,
                     "index_var", descending ? 259061.6037088883 : 
259061.60216419376,
                     "index_stddev", descending ? 508.9809463122252 : 
508.98094479478675
                 )
diff --git a/hll/src/main/java/org/apache/druid/hll/HyperLogLogCollector.java 
b/hll/src/main/java/org/apache/druid/hll/HyperLogLogCollector.java
index bc8dbbbebe6..5fd7df778de 100644
--- a/hll/src/main/java/org/apache/druid/hll/HyperLogLogCollector.java
+++ b/hll/src/main/java/org/apache/druid/hll/HyperLogLogCollector.java
@@ -358,7 +358,7 @@ public void add(short bucket, byte positionOf1)
     short numNonZeroRegisters = addNibbleRegister(bucket, (byte) ((0xff & 
positionOf1) - registerOffset));
     setNumNonZeroRegisters(numNonZeroRegisters);
     if (numNonZeroRegisters == NUM_BUCKETS) {
-      setRegisterOffset(++registerOffset);
+      setRegisterOffset((byte) (registerOffset + 1));
       setNumNonZeroRegisters(decrementBuckets());
     }
   }
@@ -421,7 +421,7 @@ public HyperLogLogCollector fold(@Nullable 
HyperLogLogCollector other)
         }
         if (numNonZero == NUM_BUCKETS) {
           numNonZero = decrementBuckets();
-          setRegisterOffset(++myOffset);
+          setRegisterOffset((byte) (myOffset + 1));
           setNumNonZeroRegisters(numNonZero);
         }
       } else { // dense
@@ -437,7 +437,7 @@ public HyperLogLogCollector fold(@Nullable 
HyperLogLogCollector other)
         }
         if (numNonZero == NUM_BUCKETS) {
           numNonZero = decrementBuckets();
-          setRegisterOffset(++myOffset);
+          setRegisterOffset((byte) (myOffset + 1));
           setNumNonZeroRegisters(numNonZero);
         }
       }
diff --git 
a/hll/src/test/java/org/apache/druid/hll/HyperLogLogCollectorBenchmark.java 
b/hll/src/test/java/org/apache/druid/hll/HyperLogLogCollectorBenchmark.java
index aea0e4265e3..938bd9c7345 100644
--- a/hll/src/test/java/org/apache/druid/hll/HyperLogLogCollectorBenchmark.java
+++ b/hll/src/test/java/org/apache/druid/hll/HyperLogLogCollectorBenchmark.java
@@ -41,6 +41,7 @@
 {
   private final HashFunction fn = Hashing.murmur3_128();
 
+  @SuppressWarnings("MismatchedQueryAndUpdateOfCollection") // TODO understand 
if this field should be used or not
   private final List<HyperLogLogCollector> collectors = new ArrayList<>();
 
   @Param({"true"}) boolean targetIsDirect;
diff --git 
a/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java
 
b/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java
index d19bcf55fcd..8e191c7fba9 100644
--- 
a/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java
+++ 
b/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java
@@ -54,7 +54,7 @@
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.SequenceFile.Writer;
 import org.apache.hadoop.io.compress.CompressionCodec;
-import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeComparator;
@@ -524,7 +524,7 @@ public void setUp() throws Exception
                 false,
                 false,
                 false,
-                ImmutableMap.of(JobContext.NUM_REDUCES, "0"), //verifies that 
set num reducers is ignored
+                ImmutableMap.of(MRJobConfig.NUM_REDUCES, "0"), //verifies that 
set num reducers is ignored
                 false,
                 useCombiner,
                 null,
diff --git 
a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java
 
b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java
index 40cd7d519e9..104123fc100 100644
--- 
a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java
+++ 
b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java
@@ -280,7 +280,7 @@ private TaskStatus runInternal(TaskToolbox toolbox) throws 
Exception
         toolbox.getSegmentPusher().getPathForHadoop()
     };
 
-    HadoopIngestionSpec indexerSchema = null;
+    HadoopIngestionSpec indexerSchema;
     final ClassLoader oldLoader = 
Thread.currentThread().getContextClassLoader();
     Class<?> determinePartitionsRunnerClass = 
determinePartitionsInnerProcessingRunner.getClass();
     Method determinePartitionsInnerProcessingRunTask = 
determinePartitionsRunnerClass.getMethod(
diff --git 
a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/hrtr/HttpRemoteTaskRunner.java
 
b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/hrtr/HttpRemoteTaskRunner.java
index 899be15789a..3d36000c9af 100644
--- 
a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/hrtr/HttpRemoteTaskRunner.java
+++ 
b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/hrtr/HttpRemoteTaskRunner.java
@@ -963,7 +963,7 @@ private void addPendingTaskToExecutor(final String taskId)
     pendingTasksExec.execute(
         () -> {
           while (!Thread.interrupted() && lifecycleLock.awaitStarted(1, 
TimeUnit.MILLISECONDS)) {
-            ImmutableWorkerInfo immutableWorker = null;
+            ImmutableWorkerInfo immutableWorker;
             HttpRemoteTaskRunnerWorkItem taskItem = null;
             try {
               synchronized (statusLock) {
diff --git 
a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/supervisor/SupervisorManager.java
 
b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/supervisor/SupervisorManager.java
index f2c78a66b8a..4a8ae2f96cd 100644
--- 
a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/supervisor/SupervisorManager.java
+++ 
b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/supervisor/SupervisorManager.java
@@ -289,7 +289,7 @@ private boolean 
createAndStartSupervisorInternal(SupervisorSpec spec, boolean pe
       metadataSupervisorManager.insert(id, spec);
     }
 
-    Supervisor supervisor = null;
+    Supervisor supervisor;
     try {
       supervisor = spec.createSupervisor();
       supervisor.start();
diff --git 
a/indexing-service/src/main/java/org/apache/druid/indexing/worker/WorkerTaskManager.java
 
b/indexing-service/src/main/java/org/apache/druid/indexing/worker/WorkerTaskManager.java
index a858f88fd7e..561a6e1befa 100644
--- 
a/indexing-service/src/main/java/org/apache/druid/indexing/worker/WorkerTaskManager.java
+++ 
b/indexing-service/src/main/java/org/apache/druid/indexing/worker/WorkerTaskManager.java
@@ -576,7 +576,7 @@ public String getTaskId()
     @Override
     public void handle()
     {
-      TaskAnnouncement announcement = null;
+      TaskAnnouncement announcement;
       synchronized (lock) {
         if (runningTasks.containsKey(task.getId()) || 
completedTasks.containsKey(task.getId())) {
           log.warn(
diff --git 
a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java
 
b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java
index 7fc311a3f6d..9db06583a80 100644
--- 
a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java
+++ 
b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java
@@ -174,7 +174,6 @@
     final IndexerSQLMetadataStorageCoordinator mdc = new 
IndexerSQLMetadataStorageCoordinator(null, null, null)
     {
       private final Set<DataSegment> published = new HashSet<>();
-      private final Set<DataSegment> nuked = new HashSet<>();
 
       @Override
       public List<DataSegment> getUsedSegmentsForInterval(String dataSource, 
Interval interval)
@@ -210,7 +209,7 @@
       @Override
       public void deleteSegments(Set<DataSegment> segments)
       {
-        nuked.addAll(segments);
+        // do nothing
       }
     };
     final LocalTaskActionClientFactory tac = new LocalTaskActionClientFactory(
@@ -596,7 +595,6 @@ public void testGetUniqueDimensionsAndMetrics()
     final int numSegmentsPerPartitionChunk = 5;
     final int numPartitionChunksPerTimelineObject = 10;
     final int numSegments = numSegmentsPerPartitionChunk * 
numPartitionChunksPerTimelineObject;
-    final List<DataSegment> segments = new ArrayList<>(numSegments);
     final Interval interval = Intervals.of("2017-01-01/2017-01-02");
     final String version = "1";
 
@@ -621,7 +619,6 @@ public void testGetUniqueDimensionsAndMetrics()
             1,
             1
         );
-        segments.add(segment);
 
         final PartitionChunk<DataSegment> partitionChunk = new 
NumberedPartitionChunk<>(
             i,
diff --git 
a/indexing-service/src/test/java/org/apache/druid/indexing/test/TestDataSegmentKiller.java
 
b/indexing-service/src/test/java/org/apache/druid/indexing/test/TestDataSegmentKiller.java
index 515db19dff7..33421eb1a5c 100644
--- 
a/indexing-service/src/test/java/org/apache/druid/indexing/test/TestDataSegmentKiller.java
+++ 
b/indexing-service/src/test/java/org/apache/druid/indexing/test/TestDataSegmentKiller.java
@@ -19,20 +19,15 @@
 
 package org.apache.druid.indexing.test;
 
-import com.google.common.collect.Sets;
 import org.apache.druid.segment.loading.DataSegmentKiller;
 import org.apache.druid.timeline.DataSegment;
 
-import java.util.Set;
-
 public class TestDataSegmentKiller implements DataSegmentKiller
 {
-  private final Set<DataSegment> killedSegments = Sets.newConcurrentHashSet();
-
   @Override
   public void kill(DataSegment segment)
   {
-    killedSegments.add(segment);
+    // do nothing
   }
 
   @Override
diff --git 
a/indexing-service/src/test/java/org/apache/druid/indexing/test/TestDataSegmentPusher.java
 
b/indexing-service/src/test/java/org/apache/druid/indexing/test/TestDataSegmentPusher.java
index 8a47742cdb9..543fa76d6d7 100644
--- 
a/indexing-service/src/test/java/org/apache/druid/indexing/test/TestDataSegmentPusher.java
+++ 
b/indexing-service/src/test/java/org/apache/druid/indexing/test/TestDataSegmentPusher.java
@@ -19,19 +19,15 @@
 
 package org.apache.druid.indexing.test;
 
-import com.google.common.collect.Sets;
 import org.apache.druid.segment.loading.DataSegmentPusher;
 import org.apache.druid.timeline.DataSegment;
 
 import java.io.File;
 import java.net.URI;
 import java.util.Map;
-import java.util.Set;
 
 public class TestDataSegmentPusher implements DataSegmentPusher
 {
-  private final Set<DataSegment> pushedSegments = Sets.newConcurrentHashSet();
-
   @Deprecated
   @Override
   public String getPathForHadoop(String dataSource)
@@ -48,7 +44,6 @@ public String getPathForHadoop()
   @Override
   public DataSegment push(File file, DataSegment segment, boolean 
useUniquePath)
   {
-    pushedSegments.add(segment);
     return segment;
   }
 
diff --git 
a/integration-tests/src/main/java/org/apache/druid/testing/clients/CoordinatorResourceTestClient.java
 
b/integration-tests/src/main/java/org/apache/druid/testing/clients/CoordinatorResourceTestClient.java
index eff8989b23a..a3a6683395b 100644
--- 
a/integration-tests/src/main/java/org/apache/druid/testing/clients/CoordinatorResourceTestClient.java
+++ 
b/integration-tests/src/main/java/org/apache/druid/testing/clients/CoordinatorResourceTestClient.java
@@ -88,12 +88,12 @@ private String getLoadStatusURL()
   // return a list of the segment dates for the specified datasource
   public List<String> getMetadataSegments(final String dataSource)
   {
-    ArrayList<String> segments = null;
+    ArrayList<String> segments;
     try {
       StatusResponseHolder response = makeRequest(HttpMethod.GET, 
getMetadataSegmentsURL(dataSource));
 
       segments = jsonMapper.readValue(
-          response.getContent(), new TypeReference<ArrayList<String>>()
+          response.getContent(), new TypeReference<List<String>>()
           {
           }
       );
@@ -107,12 +107,12 @@ private String getLoadStatusURL()
   // return a list of the segment dates for the specified datasource
   public List<String> getSegmentIntervals(final String dataSource)
   {
-    ArrayList<String> segments = null;
+    ArrayList<String> segments;
     try {
       StatusResponseHolder response = makeRequest(HttpMethod.GET, 
getIntervalsURL(dataSource));
 
       segments = jsonMapper.readValue(
-          response.getContent(), new TypeReference<ArrayList<String>>()
+          response.getContent(), new TypeReference<List<String>>()
           {
           }
       );
@@ -125,7 +125,7 @@ private String getLoadStatusURL()
 
   private Map<String, Integer> getLoadStatus()
   {
-    Map<String, Integer> status = null;
+    Map<String, Integer> status;
     try {
       StatusResponseHolder response = makeRequest(HttpMethod.GET, 
getLoadStatusURL());
 
diff --git 
a/integration-tests/src/test/java/org/apache/druid/tests/hadoop/ITHadoopIndexTest.java
 
b/integration-tests/src/test/java/org/apache/druid/tests/hadoop/ITHadoopIndexTest.java
index ebfe22d9595..6e6b298aedf 100644
--- 
a/integration-tests/src/test/java/org/apache/druid/tests/hadoop/ITHadoopIndexTest.java
+++ 
b/integration-tests/src/test/java/org/apache/druid/tests/hadoop/ITHadoopIndexTest.java
@@ -60,7 +60,7 @@ public void testHadoopIndex() throws Exception
 
   private void loadData(String hadoopDir)
   {
-    String indexerSpec = "";
+    String indexerSpec;
 
     try {
       LOG.info("indexerFile name: [%s]", BATCH_TASK);
diff --git 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractITRealtimeIndexTaskTest.java
 
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractITRealtimeIndexTaskTest.java
index 4eaa40c26bd..3d7802a7a47 100644
--- 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractITRealtimeIndexTaskTest.java
+++ 
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/AbstractITRealtimeIndexTaskTest.java
@@ -100,7 +100,7 @@ void doTest()
       TimeUnit.SECONDS.sleep(5);
 
       // put the timestamps into the query structure
-      String query_response_template = null;
+      String query_response_template;
       InputStream is = 
ITRealtimeIndexTaskTest.class.getResourceAsStream(getQueriesResource());
       if (null == is) {
         throw new ISE("could not open query file: %s", getQueriesResource());
diff --git 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAppenderatorDriverRealtimeIndexTaskTest.java
 
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAppenderatorDriverRealtimeIndexTaskTest.java
index e6ca33d1d61..bdd0df78ee3 100644
--- 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAppenderatorDriverRealtimeIndexTaskTest.java
+++ 
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITAppenderatorDriverRealtimeIndexTaskTest.java
@@ -61,7 +61,7 @@ public void postEvents() throws Exception
     final ServerDiscoverySelector eventReceiverSelector = 
factory.createSelector(EVENT_RECEIVER_SERVICE_NAME);
     eventReceiverSelector.start();
     BufferedReader reader = null;
-    InputStreamReader isr = null;
+    InputStreamReader isr;
     try {
       isr = new InputStreamReader(
           ITRealtimeIndexTaskTest.class.getResourceAsStream(EVENT_DATA_FILE),
diff --git 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITKafkaTest.java
 
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITKafkaTest.java
index 6e29c3ae097..4078984e854 100644
--- 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITKafkaTest.java
+++ 
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITKafkaTest.java
@@ -197,7 +197,6 @@ public void testKafka()
       consumerProperties.put("zookeeper.connection.timeout.ms", "15000");
       consumerProperties.put("zookeeper.sync.time.ms", "5000");
       consumerProperties.put("group.id", 
Long.toString(System.currentTimeMillis()));
-      consumerProperties.put("zookeeper.sync.time.ms", "5000");
       consumerProperties.put("fetch.message.max.bytes", "1048586");
       consumerProperties.put("auto.offset.reset", "smallest");
       consumerProperties.put("auto.commit.enable", "false");
@@ -249,20 +248,20 @@ public Boolean call()
     segmentsExist = true;
 
     // put the timestamps into the query structure
-    String query_response_template = null;
+    String queryResponseTemplate;
     InputStream is = ITKafkaTest.class.getResourceAsStream(QUERIES_FILE);
     if (null == is) {
       throw new ISE("could not open query file: %s", QUERIES_FILE);
     }
 
     try {
-      query_response_template = IOUtils.toString(is, "UTF-8");
+      queryResponseTemplate = IOUtils.toString(is, "UTF-8");
     }
     catch (IOException e) {
       throw new ISE(e, "could not read query file: %s", QUERIES_FILE);
     }
 
-    String queryStr = query_response_template
+    String queryStr = queryResponseTemplate
         .replaceAll("%%DATASOURCE%%", DATASOURCE)
         // time boundary
         .replace("%%TIMEBOUNDARY_RESPONSE_TIMESTAMP%%", 
TIMESTAMP_FMT.print(dtFirst))
diff --git 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITRealtimeIndexTaskTest.java
 
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITRealtimeIndexTaskTest.java
index 75c43fc3940..af30db33474 100644
--- 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITRealtimeIndexTaskTest.java
+++ 
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITRealtimeIndexTaskTest.java
@@ -74,7 +74,7 @@ public void postEvents() throws Exception
     final ServerDiscoverySelector eventReceiverSelector = 
factory.createSelector(EVENT_RECEIVER_SERVICE_NAME);
     eventReceiverSelector.start();
     BufferedReader reader = null;
-    InputStreamReader isr = null;
+    InputStreamReader isr;
     try {
       isr = new InputStreamReader(
           ITRealtimeIndexTaskTest.class.getResourceAsStream(EVENT_DATA_FILE),
diff --git a/pom.xml b/pom.xml
index a2f5ecf557f..54fb8c81c09 100644
--- a/pom.xml
+++ b/pom.xml
@@ -337,7 +337,7 @@
             <dependency>
                 <groupId>com.google.errorprone</groupId>
                 <artifactId>error_prone_annotations</artifactId>
-                <version>2.2.0</version>
+                <version>2.3.2</version>
             </dependency>
             <dependency>
                 <groupId>com.ibm.icu</groupId>
@@ -1265,8 +1265,8 @@
                                 
<arg>-Xep:PreconditionsInvalidPlaceholder:ERROR</arg>
                                 <arg>-Xep:MissingOverride:ERROR</arg>
                                 <arg>-Xep:DefaultCharset:ERROR</arg>
+                                
<arg>-Xep:QualifierOrScopeOnInjectMethod:ERROR</arg>
 
-                                <arg>-Xep:ArgumentParameterSwap</arg>
                                 
<arg>-Xep:AssistedInjectAndInjectOnSameConstructor</arg>
                                 <arg>-Xep:AutoFactoryAtInject</arg>
                                 <arg>-Xep:ClassName</arg>
@@ -1289,7 +1289,6 @@
                                 <arg>-Xep:NumericEquality</arg>
                                 <arg>-Xep:ParameterPackage</arg>
                                 
<arg>-Xep:ProtoStringFieldReferenceEquality</arg>
-                                
<arg>-Xep:QualifierOnMethodWithoutProvides</arg>
                                 <arg>-Xep:UnlockMethod</arg>
                             </compilerArgs>
                         </configuration>
@@ -1297,14 +1296,14 @@
                             <dependency>
                                 <groupId>org.codehaus.plexus</groupId>
                                 
<artifactId>plexus-compiler-javac-errorprone</artifactId>
-                                <version>2.8.1</version>
+                                <version>2.8.5</version>
                             </dependency>
                             <!-- override plexus-compiler-javac-errorprone's 
dependency on
                                  Error Prone with the latest version -->
                             <dependency>
                                 <groupId>com.google.errorprone</groupId>
                                 <artifactId>error_prone_core</artifactId>
-                                <version>2.0.19</version>
+                                <version>2.3.2</version>
                             </dependency>
                         </dependencies>
                     </plugin>
diff --git 
a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java 
b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java
index 9975c817025..1c7fb6c1408 100644
--- a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java
+++ b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java
@@ -95,11 +95,14 @@ public static Builder builder()
 
   private final VirtualColumns virtualColumns;
   private final LimitSpec limitSpec;
+  @Nullable
   private final HavingSpec havingSpec;
+  @Nullable
   private final DimFilter dimFilter;
   private final List<DimensionSpec> dimensions;
   private final List<AggregatorFactory> aggregatorSpecs;
   private final List<PostAggregator> postAggregatorSpecs;
+  @Nullable
   private final List<List<String>> subtotalsSpec;
 
   private final boolean applyLimitPushDown;
@@ -115,9 +118,9 @@ public GroupByQuery(
       @JsonProperty("dimensions") List<DimensionSpec> dimensions,
       @JsonProperty("aggregations") List<AggregatorFactory> aggregatorSpecs,
       @JsonProperty("postAggregations") List<PostAggregator> 
postAggregatorSpecs,
-      @JsonProperty("having") HavingSpec havingSpec,
+      @JsonProperty("having") @Nullable HavingSpec havingSpec,
       @JsonProperty("limitSpec") LimitSpec limitSpec,
-      @JsonProperty("subtotalsSpec") List<List<String>> subtotalsSpec,
+      @JsonProperty("subtotalsSpec") @Nullable List<List<String>> 
subtotalsSpec,
       @JsonProperty("context") Map<String, Object> context
   )
   {
@@ -168,12 +171,12 @@ private GroupByQuery(
       final DataSource dataSource,
       final QuerySegmentSpec querySegmentSpec,
       final VirtualColumns virtualColumns,
-      final DimFilter dimFilter,
+      final @Nullable DimFilter dimFilter,
       final Granularity granularity,
-      final List<DimensionSpec> dimensions,
-      final List<AggregatorFactory> aggregatorSpecs,
-      final List<PostAggregator> postAggregatorSpecs,
-      final HavingSpec havingSpec,
+      final @Nullable List<DimensionSpec> dimensions,
+      final @Nullable List<AggregatorFactory> aggregatorSpecs,
+      final @Nullable List<PostAggregator> postAggregatorSpecs,
+      final @Nullable HavingSpec havingSpec,
       final LimitSpec limitSpec,
       final @Nullable List<List<String>> subtotalsSpec,
       final @Nullable Function<Sequence<Row>, Sequence<Row>> postProcessingFn,
@@ -198,7 +201,7 @@ private GroupByQuery(
     this.havingSpec = havingSpec;
     this.limitSpec = LimitSpec.nullToNoopLimitSpec(limitSpec);
 
-    this.subtotalsSpec = verifySubtotalsSpec(subtotalsSpec, dimensions);
+    this.subtotalsSpec = verifySubtotalsSpec(subtotalsSpec, this.dimensions);
 
     // Verify no duplicate names between dimensions, aggregators, and 
postAggregators.
     // They will all end up in the same namespace in the returned Rows and we 
can't have them clobbering each other.
@@ -211,7 +214,11 @@ private GroupByQuery(
     this.applyLimitPushDown = determineApplyLimitPushDown();
   }
 
-  private List<List<String>> verifySubtotalsSpec(List<List<String>> 
subtotalsSpec, List<DimensionSpec> dimensions)
+  @Nullable
+  private List<List<String>> verifySubtotalsSpec(
+      @Nullable List<List<String>> subtotalsSpec,
+      List<DimensionSpec> dimensions
+  )
   {
     // if subtotalsSpec exists then validate that all are subsets of 
dimensions spec and are in same order.
     // For example if we had {D1, D2, D3} in dimensions spec then
@@ -736,20 +743,37 @@ private static void verifyOutputNames(
 
   public static class Builder
   {
+    @Nullable
+    private static List<List<String>> copySubtotalSpec(@Nullable 
List<List<String>> subtotalsSpec)
+    {
+      if (subtotalsSpec == null) {
+        return null;
+      }
+      return 
subtotalsSpec.stream().map(ArrayList::new).collect(Collectors.toList());
+    }
+
     private DataSource dataSource;
     private QuerySegmentSpec querySegmentSpec;
     private VirtualColumns virtualColumns;
+    @Nullable
     private DimFilter dimFilter;
     private Granularity granularity;
+    @Nullable
     private List<DimensionSpec> dimensions;
+    @Nullable
     private List<AggregatorFactory> aggregatorSpecs;
+    @Nullable
     private List<PostAggregator> postAggregatorSpecs;
+    @Nullable
     private HavingSpec havingSpec;
 
     private Map<String, Object> context;
 
+    @Nullable
     private List<List<String>> subtotalsSpec = null;
+    @Nullable
     private LimitSpec limitSpec = null;
+    @Nullable
     private Function<Sequence<Row>, Sequence<Row>> postProcessingFn;
     private List<OrderByColumnSpec> orderByColumnSpecs = new ArrayList<>();
     private int limit = Integer.MAX_VALUE;
@@ -787,6 +811,7 @@ public Builder(Builder builder)
       postAggregatorSpecs = builder.postAggregatorSpecs;
       havingSpec = builder.havingSpec;
       limitSpec = builder.limitSpec;
+      subtotalsSpec = copySubtotalSpec(builder.subtotalsSpec);
       postProcessingFn = builder.postProcessingFn;
       limit = builder.limit;
       orderByColumnSpecs = new ArrayList<>(builder.orderByColumnSpecs);
diff --git 
a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/BufferArrayGrouper.java
 
b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/BufferArrayGrouper.java
index 67f4232df96..4bc541f4a3c 100644
--- 
a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/BufferArrayGrouper.java
+++ 
b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/BufferArrayGrouper.java
@@ -233,7 +233,7 @@ public void close()
 
     return new CloseableIterator<Entry<Integer>>()
     {
-      int cur = -1;
+      int cur;
       boolean findNext = false;
 
       {
diff --git 
a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/BufferHashGrouper.java
 
b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/BufferHashGrouper.java
index ca3b6c83412..5f54fad9133 100644
--- 
a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/BufferHashGrouper.java
+++ 
b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/BufferHashGrouper.java
@@ -173,6 +173,7 @@ public void reset()
     }
 
     if (sorted) {
+      @SuppressWarnings("MismatchedQueryAndUpdateOfCollection")
       final List<Integer> wrappedOffsets = new AbstractList<Integer>()
       {
         @Override
diff --git 
a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java
 
b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java
index 2ec78598891..c8d97eafc1b 100644
--- 
a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java
+++ 
b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/LimitedBufferHashGrouper.java
@@ -254,6 +254,7 @@ public int getHeapIndexForOffset(int bucketOffset)
   {
     final int size = offsetHeap.getHeapSize();
 
+    @SuppressWarnings("MismatchedQueryAndUpdateOfCollection")
     final List<Integer> wrappedOffsets = new AbstractList<Integer>()
     {
       @Override
diff --git 
a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/SpillingGrouper.java
 
b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/SpillingGrouper.java
index a24798342e0..5dce77ffcb8 100644
--- 
a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/SpillingGrouper.java
+++ 
b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/SpillingGrouper.java
@@ -72,7 +72,7 @@
   private final List<File> dictionaryFiles = new ArrayList<>();
   private final boolean sortHasNonGroupingFields;
 
-  private boolean spillingAllowed = false;
+  private boolean spillingAllowed;
 
   public SpillingGrouper(
       final Supplier<ByteBuffer> bufferSupplier,
diff --git 
a/processing/src/main/java/org/apache/druid/query/ordering/StringComparators.java
 
b/processing/src/main/java/org/apache/druid/query/ordering/StringComparators.java
index 1f991737ee2..dae5cdac2bd 100644
--- 
a/processing/src/main/java/org/apache/druid/query/ordering/StringComparators.java
+++ 
b/processing/src/main/java/org/apache/druid/query/ordering/StringComparators.java
@@ -122,7 +122,7 @@ public int compare(String str1, String str2)
         int ch1 = str1.codePointAt(pos[0]);
         int ch2 = str2.codePointAt(pos[1]);
 
-        int result = 0;
+        int result;
 
         if (isDigit(ch1)) {
           result = isDigit(ch2) ? compareNumbers(str1, str2, pos) : -1;
@@ -135,7 +135,7 @@ public int compare(String str1, String str2)
         }
       }
 
-      return str1.length() - str2.length();
+      return Integer.compare(str1.length(), str2.length());
     }
 
     private int compareNumbers(String str0, String str1, int[] pos)
diff --git 
a/processing/src/main/java/org/apache/druid/query/scan/ScanQueryLimitRowIterator.java
 
b/processing/src/main/java/org/apache/druid/query/scan/ScanQueryLimitRowIterator.java
index f07647aee58..b655b21a155 100644
--- 
a/processing/src/main/java/org/apache/druid/query/scan/ScanQueryLimitRowIterator.java
+++ 
b/processing/src/main/java/org/apache/druid/query/scan/ScanQueryLimitRowIterator.java
@@ -33,7 +33,7 @@
 {
   private Yielder<ScanResultValue> yielder;
   private String resultFormat;
-  private long limit = 0;
+  private long limit;
   private long count = 0;
 
   public ScanQueryLimitRowIterator(
diff --git 
a/processing/src/main/java/org/apache/druid/query/search/SearchQueryQueryToolChest.java
 
b/processing/src/main/java/org/apache/druid/query/search/SearchQueryQueryToolChest.java
index 8f11e4b5919..88291845565 100644
--- 
a/processing/src/main/java/org/apache/druid/query/search/SearchQueryQueryToolChest.java
+++ 
b/processing/src/main/java/org/apache/druid/query/search/SearchQueryQueryToolChest.java
@@ -285,22 +285,22 @@ public SearchHit apply(@Nullable Object input)
                               @Override
                               public SearchHit apply(@Nullable Object input)
                               {
-                                String dim = null;
-                                String val = null;
-                                Integer cnt = null;
+                                String dim;
+                                String val;
+                                Integer count;
                                 if (input instanceof Map) {
                                   dim = outputNameMap.get((String) ((Map) 
input).get("dimension"));
                                   val = (String) ((Map) input).get("value");
-                                  cnt = (Integer) ((Map) input).get("count");
+                                  count = (Integer) ((Map) input).get("count");
                                 } else if (input instanceof SearchHit) {
                                   SearchHit cached = (SearchHit) input;
                                   dim = 
outputNameMap.get(cached.getDimension());
                                   val = cached.getValue();
-                                  cnt = cached.getCount();
+                                  count = cached.getCount();
                                 } else {
                                   throw new IAE("Unknown format [%s]", 
input.getClass());
                                 }
-                                return new SearchHit(dim, val, cnt);
+                                return new SearchHit(dim, val, count);
                               }
                             }
                         )
diff --git 
a/processing/src/main/java/org/apache/druid/query/select/SelectResultValueBuilder.java
 
b/processing/src/main/java/org/apache/druid/query/select/SelectResultValueBuilder.java
index 8f98cfd3cc4..5f2cac5b8f9 100644
--- 
a/processing/src/main/java/org/apache/druid/query/select/SelectResultValueBuilder.java
+++ 
b/processing/src/main/java/org/apache/druid/query/select/SelectResultValueBuilder.java
@@ -23,7 +23,6 @@
 import com.google.common.collect.Maps;
 import com.google.common.collect.MinMaxPriorityQueue;
 import com.google.common.primitives.Longs;
-import org.apache.druid.java.util.common.guava.Comparators;
 import org.apache.druid.query.Result;
 import org.joda.time.DateTime;
 
@@ -137,7 +136,7 @@ public MergeBuilder(DateTime timestamp, PagingSpec 
pagingSpec, boolean descendin
     protected Queue<EventHolder> instantiatePQueue()
     {
       int threshold = pagingSpec.getThreshold();
-      return MinMaxPriorityQueue.orderedBy(descending ? 
Comparators.inverse(comparator) : comparator)
+      return MinMaxPriorityQueue.orderedBy(descending ? comparator.reversed() 
: comparator)
                                 .maximumSize(threshold > 0 ? threshold : 
Integer.MAX_VALUE)
                                 .create();
     }
diff --git 
a/processing/src/main/java/org/apache/druid/query/topn/AggregateTopNMetricFirstAlgorithm.java
 
b/processing/src/main/java/org/apache/druid/query/topn/AggregateTopNMetricFirstAlgorithm.java
index 20735111935..270ec164086 100644
--- 
a/processing/src/main/java/org/apache/druid/query/topn/AggregateTopNMetricFirstAlgorithm.java
+++ 
b/processing/src/main/java/org/apache/druid/query/topn/AggregateTopNMetricFirstAlgorithm.java
@@ -84,7 +84,7 @@ public void run(
 
     PooledTopNAlgorithm singleMetricAlgo = new 
PooledTopNAlgorithm(storageAdapter, singleMetricQuery, bufferPool);
     PooledTopNAlgorithm.PooledTopNParams singleMetricParam = null;
-    int[] dimValSelector = null;
+    int[] dimValSelector;
     try {
       singleMetricParam = 
singleMetricAlgo.makeInitParams(params.getSelectorPlus(), params.getCursor());
       singleMetricAlgo.run(
diff --git 
a/processing/src/main/java/org/apache/druid/query/topn/InvertedTopNMetricSpec.java
 
b/processing/src/main/java/org/apache/druid/query/topn/InvertedTopNMetricSpec.java
index b4c8cbc5f58..eac69d32837 100644
--- 
a/processing/src/main/java/org/apache/druid/query/topn/InvertedTopNMetricSpec.java
+++ 
b/processing/src/main/java/org/apache/druid/query/topn/InvertedTopNMetricSpec.java
@@ -21,7 +21,6 @@
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import org.apache.druid.java.util.common.guava.Comparators;
 import org.apache.druid.query.aggregation.AggregatorFactory;
 import org.apache.druid.query.aggregation.PostAggregator;
 import org.apache.druid.query.dimension.DimensionSpec;
@@ -67,23 +66,7 @@ public Comparator getComparator(
       final List<PostAggregator> postAggregatorSpecs
   )
   {
-    return Comparators.inverse(
-        new Comparator()
-        {
-          @Override
-          public int compare(Object o1, Object o2)
-          {
-            // nulls last
-            if (o1 == null) {
-              return 1;
-            }
-            if (o2 == null) {
-              return -1;
-            }
-            return delegate.getComparator(aggregatorSpecs, 
postAggregatorSpecs).compare(o1, o2);
-          }
-        }
-    );
+    return Comparator.nullsFirst(delegate.getComparator(aggregatorSpecs, 
postAggregatorSpecs).reversed());
   }
 
   @Override
diff --git a/processing/src/main/java/org/apache/druid/segment/IndexIO.java 
b/processing/src/main/java/org/apache/druid/segment/IndexIO.java
index 1ee8b7c21e9..2293991c551 100644
--- a/processing/src/main/java/org/apache/druid/segment/IndexIO.java
+++ b/processing/src/main/java/org/apache/druid/segment/IndexIO.java
@@ -78,7 +78,6 @@
 import java.util.Map;
 import java.util.Objects;
 import java.util.Set;
-import java.util.TreeSet;
 
 public class IndexIO
 {
@@ -484,14 +483,6 @@ public QueryableIndex load(File inDir, ObjectMapper 
mapper) throws IOException
         }
       }
 
-      Set<String> colSet = new TreeSet<>();
-      for (String dimension : index.getAvailableDimensions()) {
-        colSet.add(dimension);
-      }
-      for (String metric : index.getAvailableMetrics()) {
-        colSet.add(metric);
-      }
-
       columns.put(
           ColumnHolder.TIME_COLUMN_NAME,
           new ColumnBuilder()
diff --git 
a/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarLongsSerializer.java
 
b/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarLongsSerializer.java
index 8aadff24a7a..c1851c73d38 100644
--- 
a/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarLongsSerializer.java
+++ 
b/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarLongsSerializer.java
@@ -47,7 +47,7 @@
   private int numInserted = 0;
   private int numInsertedForNextFlush;
 
-  private ByteBuffer endBuffer = null;
+  private ByteBuffer endBuffer;
 
   BlockLayoutColumnarLongsSerializer(
       SegmentWriteOutMedium segmentWriteOutMedium,
diff --git 
a/processing/src/main/java/org/apache/druid/segment/filter/Filters.java 
b/processing/src/main/java/org/apache/druid/segment/filter/Filters.java
index c9630ccecc8..a81eb60a8fe 100644
--- a/processing/src/main/java/org/apache/druid/segment/filter/Filters.java
+++ b/processing/src/main/java/org/apache/druid/segment/filter/Filters.java
@@ -397,7 +397,7 @@ public IntIterator iterator()
         {
           private final int bitmapIndexCardinality = 
bitmapIndex.getCardinality();
           private int nextIndex = 0;
-          private int found = -1;
+          private int found;
 
           {
             found = findNextIndex();
diff --git 
a/processing/src/main/java/org/apache/druid/segment/filter/LikeFilter.java 
b/processing/src/main/java/org/apache/druid/segment/filter/LikeFilter.java
index 493eb9ec72b..018102a1833 100644
--- a/processing/src/main/java/org/apache/druid/segment/filter/LikeFilter.java
+++ b/processing/src/main/java/org/apache/druid/segment/filter/LikeFilter.java
@@ -174,7 +174,7 @@ public IntIterator iterator()
         return new IntIterator()
         {
           int currIndex = startIndex;
-          int found = -1;
+          int found;
 
           {
             found = findNext();
diff --git 
a/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorTest.java
 
b/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorTest.java
index 960b4701b42..757001b0ed3 100644
--- 
a/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorTest.java
+++ 
b/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorTest.java
@@ -322,7 +322,7 @@ public static void main(String... args)
 
     // warmup
     int i = 0;
-    long t = 0;
+    long t;
     while (i < 10000) {
       aggregate(selector, aggRhino);
       ++i;
diff --git 
a/processing/src/test/java/org/apache/druid/query/select/SelectBinaryFnTest.java
 
b/processing/src/test/java/org/apache/druid/query/select/SelectBinaryFnTest.java
index 2de7b4f3bbb..219452e3b99 100644
--- 
a/processing/src/test/java/org/apache/druid/query/select/SelectBinaryFnTest.java
+++ 
b/processing/src/test/java/org/apache/druid/query/select/SelectBinaryFnTest.java
@@ -137,9 +137,6 @@ public void testApply()
     Assert.assertEquals(res1.getTimestamp(), merged.getTimestamp());
 
     LinkedHashMap<String, Integer> expectedPageIds = Maps.newLinkedHashMap();
-    expectedPageIds.put(segmentId1, 0);
-    expectedPageIds.put(segmentId2, 0);
-    expectedPageIds.put(segmentId2, 1);
     expectedPageIds.put(segmentId1, 1);
     expectedPageIds.put(segmentId2, 2);
 
diff --git 
a/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java
 
b/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java
index 06c7eaa2bb0..6527f3ceefd 100644
--- 
a/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java
+++ 
b/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java
@@ -24,7 +24,6 @@
 import com.google.common.io.CharSource;
 import org.apache.commons.lang.StringUtils;
 import org.apache.druid.java.util.common.DateTimes;
-import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.common.granularity.Granularities;
 import org.apache.druid.query.Druids;
 import org.apache.druid.query.QueryPlus;
@@ -40,7 +39,6 @@
 import org.apache.druid.segment.incremental.IncrementalIndex;
 import org.apache.druid.segment.incremental.IncrementalIndexSchema;
 import org.apache.druid.timeline.DataSegment;
-import org.apache.druid.timeline.TimelineObjectHolder;
 import org.apache.druid.timeline.VersionedIntervalTimeline;
 import org.apache.druid.timeline.partition.NoneShardSpec;
 import org.apache.druid.timeline.partition.SingleElementPartitionChunk;
@@ -80,7 +78,6 @@
   );
   private static Segment segment0;
   private static Segment segment1;
-  private static List<String> segmentIdentifiers;
 
   public TimeBoundaryQueryRunnerTest(
       QueryRunner runner
@@ -157,11 +154,6 @@ private QueryRunner getCustomRunner() throws IOException
     timeline.add(index0.getInterval(), "v1", new 
SingleElementPartitionChunk(segment0));
     timeline.add(index1.getInterval(), "v1", new 
SingleElementPartitionChunk(segment1));
 
-    segmentIdentifiers = new ArrayList<>();
-    for (TimelineObjectHolder<String, ?> holder : 
timeline.lookup(Intervals.of("2011-01-12/2011-01-17"))) {
-      segmentIdentifiers.add(makeIdentifier(holder.getInterval(), 
holder.getVersion()));
-    }
-
     return QueryRunnerTestHelper.makeFilteringQueryRunner(timeline, factory);
   }
 
diff --git 
a/processing/src/test/java/org/apache/druid/segment/SchemalessTestSimpleTest.java
 
b/processing/src/test/java/org/apache/druid/segment/SchemalessTestSimpleTest.java
index b16157e369a..a6a2330774e 100644
--- 
a/processing/src/test/java/org/apache/druid/segment/SchemalessTestSimpleTest.java
+++ 
b/processing/src/test/java/org/apache/druid/segment/SchemalessTestSimpleTest.java
@@ -57,6 +57,7 @@
 import org.apache.druid.query.topn.TopNResultValue;
 import org.apache.druid.segment.incremental.IncrementalIndex;
 import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory;
+import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
@@ -167,6 +168,7 @@ public void testFullOnTimeseries()
 
   //  @Test TODO: Handling of null values is inconsistent right now, need to 
make it all consistent and re-enable test
   // TODO: Complain to Eric when you see this.  It shouldn't be like this...
+  @Ignore
   @SuppressWarnings("unused")
   public void testFullOnTopN()
   {
diff --git 
a/server/src/main/java/org/apache/druid/client/cache/BytesBoundedLinkedQueue.java
 
b/server/src/main/java/org/apache/druid/client/cache/BytesBoundedLinkedQueue.java
index f996d47ae00..25a2da4c899 100644
--- 
a/server/src/main/java/org/apache/druid/client/cache/BytesBoundedLinkedQueue.java
+++ 
b/server/src/main/java/org/apache/druid/client/cache/BytesBoundedLinkedQueue.java
@@ -137,7 +137,6 @@ public boolean offer(E e, long timeout, TimeUnit unit) 
throws InterruptedExcepti
     checkNotNull(e);
     checkSize(e);
     long nanos = unit.toNanos(timeout);
-    boolean added = false;
     putLock.lockInterruptibly();
     try {
       while (currentSize.get() + getBytesSize(e) > capacity) {
@@ -148,16 +147,12 @@ public boolean offer(E e, long timeout, TimeUnit unit) 
throws InterruptedExcepti
       }
       delegate.add(e);
       elementAdded(e);
-      added = true;
     }
     finally {
       putLock.unlock();
     }
-    if (added) {
-      signalNotEmpty();
-    }
-    return added;
-
+    signalNotEmpty();
+    return true;
   }
 
   @Override
@@ -222,12 +217,12 @@ public int drainTo(Collection<? super E> c, int 
maxElements)
     if (c == this) {
       throw new IllegalArgumentException();
     }
-    int n = 0;
+    int n;
     takeLock.lock();
     try {
       // elementCount.get provides visibility to first n Nodes
       n = Math.min(maxElements, elementCount.get());
-      if (n < 0) {
+      if (n <= 0) {
         return 0;
       }
       for (int i = 0; i < n; i++) {
@@ -239,9 +234,7 @@ public int drainTo(Collection<? super E> c, int maxElements)
     finally {
       takeLock.unlock();
     }
-    if (n > 0) {
-      signalNotFull();
-    }
+    signalNotFull();
     return n;
   }
 
@@ -250,7 +243,7 @@ public boolean offer(E e)
   {
     checkNotNull(e);
     checkSize(e);
-    boolean added = false;
+    boolean added;
     putLock.lock();
     try {
       if (currentSize.get() + getBytesSize(e) > capacity) {
@@ -274,7 +267,7 @@ public boolean offer(E e)
   @Override
   public E poll()
   {
-    E e = null;
+    E e;
     takeLock.lock();
     try {
       e = delegate.poll();
@@ -295,7 +288,7 @@ public E poll()
   public E poll(long timeout, TimeUnit unit) throws InterruptedException
   {
     long nanos = unit.toNanos(timeout);
-    E e = null;
+    E e;
     takeLock.lockInterruptibly();
     try {
       while (elementCount.get() == 0) {
diff --git 
a/server/src/main/java/org/apache/druid/metadata/SQLMetadataConnector.java 
b/server/src/main/java/org/apache/druid/metadata/SQLMetadataConnector.java
index ddd33d1b159..f7243751a9f 100644
--- a/server/src/main/java/org/apache/druid/metadata/SQLMetadataConnector.java
+++ b/server/src/main/java/org/apache/druid/metadata/SQLMetadataConnector.java
@@ -639,7 +639,7 @@ protected BasicDataSource getDatasource()
   {
     MetadataStorageConnectorConfig connectorConfig = getConfig();
 
-    BasicDataSource dataSource = null;
+    BasicDataSource dataSource;
 
     try {
       Properties dbcpProperties = connectorConfig.getDbcpProperties();
diff --git 
a/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java
 
b/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java
index 9939fad7dae..94e2c6cd50b 100644
--- 
a/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java
+++ 
b/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java
@@ -242,7 +242,7 @@ public Response addAll(
       }
 
       CountingInputStream countingInputStream = new CountingInputStream(in);
-      Collection<Map<String, Object>> events = null;
+      Collection<Map<String, Object>> events;
       try {
         events = objectMapper.readValue(
             countingInputStream,
diff --git 
a/server/src/main/java/org/apache/druid/server/coordinator/ReplicationThrottler.java
 
b/server/src/main/java/org/apache/druid/server/coordinator/ReplicationThrottler.java
index f9b4d7bf748..a08a68c76d0 100644
--- 
a/server/src/main/java/org/apache/druid/server/coordinator/ReplicationThrottler.java
+++ 
b/server/src/main/java/org/apache/druid/server/coordinator/ReplicationThrottler.java
@@ -154,7 +154,7 @@ public void reduceLifetime(String tier)
         lifetime = maxLifetime;
         lifetimes.put(tier, lifetime);
       }
-      lifetimes.put(tier, --lifetime);
+      lifetimes.put(tier, lifetime - 1);
     }
 
     public void resetLifetime(String tier)
diff --git 
a/server/src/main/java/org/apache/druid/server/coordinator/SegmentReplicantLookup.java
 
b/server/src/main/java/org/apache/druid/server/coordinator/SegmentReplicantLookup.java
index 145b69b57de..6c122870df4 100644
--- 
a/server/src/main/java/org/apache/druid/server/coordinator/SegmentReplicantLookup.java
+++ 
b/server/src/main/java/org/apache/druid/server/coordinator/SegmentReplicantLookup.java
@@ -47,7 +47,7 @@ public static SegmentReplicantLookup make(DruidCluster 
cluster)
           if (numReplicants == null) {
             numReplicants = 0;
           }
-          segmentsInCluster.put(segment.getIdentifier(), server.getTier(), 
++numReplicants);
+          segmentsInCluster.put(segment.getIdentifier(), server.getTier(), 
numReplicants + 1);
         }
 
         // Also account for queued segments
@@ -56,7 +56,7 @@ public static SegmentReplicantLookup make(DruidCluster 
cluster)
           if (numReplicants == null) {
             numReplicants = 0;
           }
-          loadingSegments.put(segment.getIdentifier(), server.getTier(), 
++numReplicants);
+          loadingSegments.put(segment.getIdentifier(), server.getTier(), 
numReplicants + 1);
         }
       }
     }
diff --git 
a/server/src/main/java/org/apache/druid/server/http/DatasourcesResource.java 
b/server/src/main/java/org/apache/druid/server/http/DatasourcesResource.java
index 793e19d0add..77ca931eae1 100644
--- a/server/src/main/java/org/apache/druid/server/http/DatasourcesResource.java
+++ b/server/src/main/java/org/apache/druid/server/http/DatasourcesResource.java
@@ -276,7 +276,7 @@ public Response getSegmentDataSourceIntervals(
       return Response.noContent().build();
     }
 
-    final Comparator<Interval> comparator = 
Comparators.inverse(Comparators.intervalsByStartThenEnd());
+    final Comparator<Interval> comparator = 
Comparators.intervalsByStartThenEnd().reversed();
 
     if (full != null) {
       final Map<Interval, Map<String, Object>> retVal = new 
TreeMap<>(comparator);
@@ -342,7 +342,7 @@ public Response getSegmentDataSourceSpecificInterval(
       return Response.noContent().build();
     }
 
-    final Comparator<Interval> comparator = 
Comparators.inverse(Comparators.intervalsByStartThenEnd());
+    final Comparator<Interval> comparator = 
Comparators.intervalsByStartThenEnd().reversed();
     if (full != null) {
       final Map<Interval, Map<String, Object>> retVal = new 
TreeMap<>(comparator);
       for (DataSegment dataSegment : dataSource.getSegments()) {
@@ -385,7 +385,7 @@ public Response getSegmentDataSourceSpecificInterval(
       return Response.ok(retVal).build();
     }
 
-    final Set<String> retVal = new 
TreeSet<>(Comparators.inverse(String.CASE_INSENSITIVE_ORDER));
+    final Set<String> retVal = new 
TreeSet<>(String.CASE_INSENSITIVE_ORDER.reversed());
     for (DataSegment dataSegment : dataSource.getSegments()) {
       if (theInterval.contains(dataSegment.getInterval())) {
         retVal.add(dataSegment.getIdentifier());
diff --git 
a/server/src/main/java/org/apache/druid/server/http/IntervalsResource.java 
b/server/src/main/java/org/apache/druid/server/http/IntervalsResource.java
index 29d92f09826..56357944c5f 100644
--- a/server/src/main/java/org/apache/druid/server/http/IntervalsResource.java
+++ b/server/src/main/java/org/apache/druid/server/http/IntervalsResource.java
@@ -70,7 +70,7 @@ public IntervalsResource(
   @Produces(MediaType.APPLICATION_JSON)
   public Response getIntervals(@Context final HttpServletRequest req)
   {
-    final Comparator<Interval> comparator = 
Comparators.inverse(Comparators.intervalsByStartThenEnd());
+    final Comparator<Interval> comparator = 
Comparators.intervalsByStartThenEnd().reversed();
     final Set<ImmutableDruidDataSource> datasources = 
InventoryViewUtils.getSecuredDataSources(
         req,
         serverInventoryView,
@@ -80,11 +80,7 @@ public Response getIntervals(@Context final 
HttpServletRequest req)
     final Map<Interval, Map<String, Map<String, Object>>> retVal = new 
TreeMap<>(comparator);
     for (ImmutableDruidDataSource dataSource : datasources) {
       for (DataSegment dataSegment : dataSource.getSegments()) {
-        Map<String, Map<String, Object>> interval = 
retVal.get(dataSegment.getInterval());
-        if (interval == null) {
-          Map<String, Map<String, Object>> tmp = new HashMap<>();
-          retVal.put(dataSegment.getInterval(), tmp);
-        }
+        retVal.computeIfAbsent(dataSegment.getInterval(), i -> new 
HashMap<>());
         setProperties(retVal, dataSource, dataSegment);
       }
     }
@@ -109,18 +105,14 @@ public Response getSpecificIntervals(
         authorizerMapper
     );
 
-    final Comparator<Interval> comparator = 
Comparators.inverse(Comparators.intervalsByStartThenEnd());
+    final Comparator<Interval> comparator = 
Comparators.intervalsByStartThenEnd().reversed();
 
     if (full != null) {
       final Map<Interval, Map<String, Map<String, Object>>> retVal = new 
TreeMap<>(comparator);
       for (ImmutableDruidDataSource dataSource : datasources) {
         for (DataSegment dataSegment : dataSource.getSegments()) {
           if (theInterval.contains(dataSegment.getInterval())) {
-            Map<String, Map<String, Object>> dataSourceInterval = 
retVal.get(dataSegment.getInterval());
-            if (dataSourceInterval == null) {
-              Map<String, Map<String, Object>> tmp = new HashMap<>();
-              retVal.put(dataSegment.getInterval(), tmp);
-            }
+            retVal.computeIfAbsent(dataSegment.getInterval(), k -> new 
HashMap<>());
             setProperties(retVal, dataSource, dataSegment);
           }
         }
diff --git 
a/server/src/main/java/org/apache/druid/server/http/LookupCoordinatorResource.java
 
b/server/src/main/java/org/apache/druid/server/http/LookupCoordinatorResource.java
index 881f3e53e05..90e56941ce7 100644
--- 
a/server/src/main/java/org/apache/druid/server/http/LookupCoordinatorResource.java
+++ 
b/server/src/main/java/org/apache/druid/server/http/LookupCoordinatorResource.java
@@ -482,7 +482,7 @@ public Response getAllNodesStatus(
   )
   {
     try {
-      Collection<String> tiers = null;
+      Collection<String> tiers;
       if (discover) {
         tiers = lookupCoordinatorManager.discoverTiers();
       } else {
diff --git 
a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java
 
b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java
index 90e9d614592..0b0234324a8 100644
--- 
a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java
+++ 
b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java
@@ -57,7 +57,7 @@ public static void addExtensionFilters(ServletContextHandler 
handler, Injector i
     for (ServletFilterHolder servletFilterHolder : extensionFilters) {
       // Check the Filter first to guard against people who don't read the 
docs and return the Class even
       // when they have an instance.
-      FilterHolder holder = null;
+      FilterHolder holder;
       if (servletFilterHolder.getFilter() != null) {
         holder = new FilterHolder(servletFilterHolder.getFilter());
       } else if (servletFilterHolder.getFilterClass() != null) {
diff --git 
a/server/src/main/java/org/apache/druid/server/security/PreResponseAuthorizationCheckFilter.java
 
b/server/src/main/java/org/apache/druid/server/security/PreResponseAuthorizationCheckFilter.java
index f88edada1e1..9b72b19e36e 100644
--- 
a/server/src/main/java/org/apache/druid/server/security/PreResponseAuthorizationCheckFilter.java
+++ 
b/server/src/main/java/org/apache/druid/server/security/PreResponseAuthorizationCheckFilter.java
@@ -24,7 +24,6 @@
 import org.apache.druid.java.util.emitter.EmittingLogger;
 import org.apache.druid.query.QueryInterruptedException;
 import org.apache.druid.server.DruidNode;
-import org.eclipse.jetty.server.Response;
 
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
@@ -94,7 +93,7 @@ public void doFilter(ServletRequest servletRequest, 
ServletResponse servletRespo
       );
     }
 
-    if (authInfoChecked != null && !authInfoChecked && response.getStatus() != 
Response.SC_FORBIDDEN) {
+    if (authInfoChecked != null && !authInfoChecked && response.getStatus() != 
HttpServletResponse.SC_FORBIDDEN) {
       handleAuthorizationCheckError(
           "Request's authorization check failed but status code was not 403.",
           request,
@@ -134,7 +133,7 @@ private void handleUnauthenticatedRequest(
     );
     unauthorizedError.setStackTrace(new StackTraceElement[0]);
     OutputStream out = response.getOutputStream();
-    sendJsonError(response, Response.SC_UNAUTHORIZED, 
jsonMapper.writeValueAsString(unauthorizedError), out);
+    sendJsonError(response, HttpServletResponse.SC_UNAUTHORIZED, 
jsonMapper.writeValueAsString(unauthorizedError), out);
     out.close();
     return;
   }
@@ -157,7 +156,7 @@ private void handleAuthorizationCheckError(
       throw new ISE(errorMsg);
     } else {
       try {
-        servletResponse.sendError(Response.SC_FORBIDDEN);
+        servletResponse.sendError(HttpServletResponse.SC_FORBIDDEN);
       }
       catch (Exception e) {
         throw new RuntimeException(e);
diff --git 
a/server/src/main/java/org/apache/druid/server/security/SecuritySanityCheckFilter.java
 
b/server/src/main/java/org/apache/druid/server/security/SecuritySanityCheckFilter.java
index be71e4631c1..f222e5a8a6f 100644
--- 
a/server/src/main/java/org/apache/druid/server/security/SecuritySanityCheckFilter.java
+++ 
b/server/src/main/java/org/apache/druid/server/security/SecuritySanityCheckFilter.java
@@ -23,7 +23,6 @@
 import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.query.QueryInterruptedException;
 import org.apache.druid.server.DruidNode;
-import org.eclipse.jetty.server.Response;
 
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
@@ -79,7 +78,7 @@ public void doFilter(ServletRequest request, ServletResponse 
response, FilterCha
 
     AuthenticationResult result = (AuthenticationResult) 
request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT);
     if (authInfoChecked != null || result != null || allowUnsecured != null) {
-      sendJsonError(httpResponse, Response.SC_FORBIDDEN, unauthorizedMessage, 
out);
+      sendJsonError(httpResponse, HttpServletResponse.SC_FORBIDDEN, 
unauthorizedMessage, out);
       out.close();
       return;
     }
diff --git 
a/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java 
b/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java
index d5bd504191a..f65dc3070d8 100644
--- 
a/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java
+++ 
b/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java
@@ -2875,6 +2875,9 @@ public int compareTo(DataSegment dataSegment)
       @Override
       public boolean equals(Object o)
       {
+        if (!(o instanceof DataSegment)) {
+          return false;
+        }
         return baseSegment.equals(o);
       }
 
diff --git 
a/server/src/test/java/org/apache/druid/client/cache/BytesBoundedLinkedQueueTest.java
 
b/server/src/test/java/org/apache/druid/client/cache/BytesBoundedLinkedQueueTest.java
index 940160cce3c..b01ad799311 100644
--- 
a/server/src/test/java/org/apache/druid/client/cache/BytesBoundedLinkedQueueTest.java
+++ 
b/server/src/test/java/org/apache/druid/client/cache/BytesBoundedLinkedQueueTest.java
@@ -183,7 +183,7 @@ public void testAddedObjectExceedsCapacity() throws 
Exception
     Assert.assertFalse(q.offer(new TestObject(2), delayMS, 
TimeUnit.MILLISECONDS));
   }
 
- // @Test
+  @Test
   public void testConcurrentOperations() throws Exception
   {
     final BlockingQueue<TestObject> q = getQueue(Integer.MAX_VALUE);
@@ -240,8 +240,7 @@ public Boolean call() throws InterruptedException
                 public Boolean call()
                 {
                   while (!stopTest.get()) {
-                    System.out
-                          .println("drained elements : " + q.drainTo(new 
ArrayList<TestObject>(), Integer.MAX_VALUE));
+                    q.drainTo(new ArrayList<>(), Integer.MAX_VALUE);
                   }
                   return true;
                 }
diff --git 
a/server/src/test/java/org/apache/druid/client/cache/HybridCacheTest.java 
b/server/src/test/java/org/apache/druid/client/cache/HybridCacheTest.java
index 4284bd535b4..b5d96ebe2c2 100644
--- a/server/src/test/java/org/apache/druid/client/cache/HybridCacheTest.java
+++ b/server/src/test/java/org/apache/druid/client/cache/HybridCacheTest.java
@@ -172,8 +172,8 @@ public void testSanity()
       Assert.assertEquals(Sets.newHashSet(key3), res.keySet());
       Assert.assertArrayEquals(value3, res.get(key3));
 
-      Assert.assertEquals(++hits, cache.getStats().getNumHits());
-      Assert.assertEquals(++misses, cache.getStats().getNumMisses());
+      Assert.assertEquals(hits + 1, cache.getStats().getNumHits());
+      Assert.assertEquals(misses + 1, cache.getStats().getNumMisses());
     }
   }
 }
diff --git 
a/server/src/test/java/org/apache/druid/client/cache/MemcachedCacheBenchmark.java
 
b/server/src/test/java/org/apache/druid/client/cache/MemcachedCacheBenchmark.java
index 313ecdfd35a..fda0b64c414 100644
--- 
a/server/src/test/java/org/apache/druid/client/cache/MemcachedCacheBenchmark.java
+++ 
b/server/src/test/java/org/apache/druid/client/cache/MemcachedCacheBenchmark.java
@@ -131,7 +131,7 @@ public void timePutObjects(int reps)
 
   public long timeGetObject(int reps)
   {
-    byte[] bytes = null;
+    byte[] bytes;
     long count = 0;
     for (int i = 0; i < reps; i++) {
       for (int k = 0; k < objectCount; ++k) {
diff --git 
a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java
 
b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java
index d87841293b2..b5f37a6cca3 100644
--- 
a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java
+++ 
b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java
@@ -30,9 +30,6 @@
 import org.junit.Assert;
 import org.junit.Test;
 
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-
 public class AppenderatorPlumberTest
 {
   private final AppenderatorPlumber plumber;
@@ -94,43 +91,29 @@ public AppenderatorPlumberTest() throws Exception
   @Test
   public void testSimpleIngestion() throws Exception
   {
-
-    final ConcurrentMap<String, String> commitMetadata = new 
ConcurrentHashMap<>();    
-    
     Appenderator appenderator = appenderatorTester.getAppenderator();
 
     // startJob
     Assert.assertEquals(null, plumber.startJob());
 
     // getDataSource
-    Assert.assertEquals(AppenderatorTester.DATASOURCE,
-        appenderator.getDataSource());
+    Assert.assertEquals(AppenderatorTester.DATASOURCE, 
appenderator.getDataSource());
 
     InputRow[] rows = new InputRow[] {AppenderatorTest.IR("2000", "foo", 1), 
         AppenderatorTest.IR("2000", "bar", 2), AppenderatorTest.IR("2000", 
"qux", 4)};
     // add
-    commitMetadata.put("x", "1");
-    Assert.assertEquals(
-        1,
-        plumber.add(rows[0], null).getRowCount());
+    Assert.assertEquals(1, plumber.add(rows[0], null).getRowCount());
 
-    commitMetadata.put("x", "2");
-    Assert.assertEquals(
-        2,
-        plumber.add(rows[1], null).getRowCount());
+    Assert.assertEquals(2, plumber.add(rows[1], null).getRowCount());
 
-    commitMetadata.put("x", "3");
-    Assert.assertEquals(
-        3,
-        plumber.add(rows[2], null).getRowCount());
+    Assert.assertEquals(3, plumber.add(rows[2], null).getRowCount());
 
     
     Assert.assertEquals(1, plumber.getSegmentsView().size());
     
     SegmentIdentifier si = plumber.getSegmentsView().values().toArray(new 
SegmentIdentifier[0])[0];
     
-    Assert.assertEquals(3,
-        appenderator.getRowCount(si));
+    Assert.assertEquals(3, appenderator.getRowCount(si));
 
     appenderator.clear();    
     Assert.assertTrue(appenderator.getSegments().isEmpty());
diff --git 
a/server/src/test/java/org/apache/druid/server/coordinator/CuratorDruidCoordinatorTest.java
 
b/server/src/test/java/org/apache/druid/server/coordinator/CuratorDruidCoordinatorTest.java
index 9312e4bed00..2b4a8c5c75e 100644
--- 
a/server/src/test/java/org/apache/druid/server/coordinator/CuratorDruidCoordinatorTest.java
+++ 
b/server/src/test/java/org/apache/druid/server/coordinator/CuratorDruidCoordinatorTest.java
@@ -307,14 +307,13 @@ public void testMoveSegment() throws Exception
     DataSegment segmentToMove = sourceSegments.get(2);
 
     List<String> sourceSegKeys = new ArrayList<>();
-    List<String> destSegKeys = new ArrayList<>();
 
     for (DataSegment segment : sourceSegments) {
       sourceSegKeys.add(announceBatchSegmentsForServer(source, 
ImmutableSet.of(segment), zkPathsConfig, jsonMapper));
     }
 
     for (DataSegment segment : destinationSegments) {
-      destSegKeys.add(announceBatchSegmentsForServer(dest, 
ImmutableSet.of(segment), zkPathsConfig, jsonMapper));
+      announceBatchSegmentsForServer(dest, ImmutableSet.of(segment), 
zkPathsConfig, jsonMapper);
     }
 
     Assert.assertTrue(timing.forWaiting().awaitLatch(segmentViewInitLatch));
diff --git 
a/server/src/test/java/org/apache/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java
 
b/server/src/test/java/org/apache/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java
index 5db4b8f541b..277f56d6d83 100644
--- 
a/server/src/test/java/org/apache/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java
+++ 
b/server/src/test/java/org/apache/druid/server/router/JavaScriptTieredBrokerSelectorStrategyTest.java
@@ -94,7 +94,6 @@ public void testDisabled() throws Exception
   public void testGetBrokerServiceName()
   {
     final LinkedHashMap<String, String> tierBrokerMap = new LinkedHashMap<>();
-    tierBrokerMap.put("fast", "druid/fastBroker");
     tierBrokerMap.put("fast", "druid/broker");
     tierBrokerMap.put("slow", "druid/slowBroker");
 
diff --git 
a/services/src/main/java/org/apache/druid/cli/CoordinatorJettyServerInitializer.java
 
b/services/src/main/java/org/apache/druid/cli/CoordinatorJettyServerInitializer.java
index dac70890a5a..64deddd89ce 100644
--- 
a/services/src/main/java/org/apache/druid/cli/CoordinatorJettyServerInitializer.java
+++ 
b/services/src/main/java/org/apache/druid/cli/CoordinatorJettyServerInitializer.java
@@ -117,7 +117,6 @@ public void initialize(Server server, Injector injector)
     final ObjectMapper jsonMapper = 
injector.getInstance(Key.get(ObjectMapper.class, Json.class));
     final AuthenticatorMapper authenticatorMapper = 
injector.getInstance(AuthenticatorMapper.class);
 
-    List<Authenticator> authenticators = null;
     AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper);
 
     // perform no-op authorization for these resources
@@ -128,7 +127,7 @@ public void initialize(Server server, Injector injector)
       AuthenticationUtils.addNoopAuthorizationFilters(root, 
CliOverlord.UNSECURED_PATHS);
     }
 
-    authenticators = authenticatorMapper.getAuthenticatorChain();
+    List<Authenticator> authenticators = 
authenticatorMapper.getAuthenticatorChain();
     AuthenticationUtils.addAuthenticationFilterChain(root, authenticators);
 
     AuthenticationUtils.addAllowOptionsFilter(root, 
authConfig.isAllowUnauthenticatedHttpOptions());
diff --git a/services/src/main/java/org/apache/druid/cli/PullDependencies.java 
b/services/src/main/java/org/apache/druid/cli/PullDependencies.java
index 4df077b745b..885e100104a 100644
--- a/services/src/main/java/org/apache/druid/cli/PullDependencies.java
+++ b/services/src/main/java/org/apache/druid/cli/PullDependencies.java
@@ -73,6 +73,7 @@
 {
   private static final Logger log = new Logger(PullDependencies.class);
 
+  @SuppressWarnings("MismatchedQueryAndUpdateOfCollection")
   private static final Set<String> exclusions = new HashSet<>(
       /*
 
diff --git 
a/services/src/main/java/org/apache/druid/cli/QueryJettyServerInitializer.java 
b/services/src/main/java/org/apache/druid/cli/QueryJettyServerInitializer.java
index 48d2962497f..2c9260241bc 100644
--- 
a/services/src/main/java/org/apache/druid/cli/QueryJettyServerInitializer.java
+++ 
b/services/src/main/java/org/apache/druid/cli/QueryJettyServerInitializer.java
@@ -93,14 +93,13 @@ public void initialize(Server server, Injector injector)
     final ObjectMapper jsonMapper = 
injector.getInstance(Key.get(ObjectMapper.class, Json.class));
     final AuthenticatorMapper authenticatorMapper = 
injector.getInstance(AuthenticatorMapper.class);
 
-    List<Authenticator> authenticators = null;
     AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper);
 
     // perform no-op authorization for these resources
     AuthenticationUtils.addNoopAuthorizationFilters(root, UNSECURED_PATHS);
     AuthenticationUtils.addNoopAuthorizationFilters(root, 
authConfig.getUnsecuredPaths());
 
-    authenticators = authenticatorMapper.getAuthenticatorChain();
+    List<Authenticator> authenticators = 
authenticatorMapper.getAuthenticatorChain();
     AuthenticationUtils.addAuthenticationFilterChain(root, authenticators);
 
     AuthenticationUtils.addAllowOptionsFilter(root, 
authConfig.isAllowUnauthenticatedHttpOptions());
diff --git 
a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java 
b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java
index c6d17f1efa0..fb93a698cbb 100644
--- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java
+++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java
@@ -20,7 +20,6 @@
 package org.apache.druid.sql.avatica;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Predicate;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSortedMap;
@@ -29,7 +28,6 @@
 import org.apache.druid.java.util.common.ISE;
 import org.apache.druid.java.util.common.logger.Logger;
 
-import javax.annotation.Nullable;
 import javax.annotation.concurrent.GuardedBy;
 import java.util.HashMap;
 import java.util.Map;
@@ -85,17 +83,9 @@ public DruidStatement createStatement()
 
       // remove sensitive fields from the context, only the connection's 
context needs to have authentication
       // credentials
-      Map<String, Object> sanitizedContext = new HashMap<>();
-      sanitizedContext = Maps.filterEntries(
+      Map<String, Object> sanitizedContext = Maps.filterEntries(
           context,
-          new Predicate<Map.Entry<String, Object>>()
-          {
-            @Override
-            public boolean apply(@Nullable Map.Entry<String, Object> input)
-            {
-              return !SENSITIVE_CONTEXT_FIELDS.contains(input.getKey());
-            }
-          }
+          e -> !SENSITIVE_CONTEXT_FIELDS.contains(e.getKey())
       );
 
       final DruidStatement statement = new DruidStatement(


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@druid.apache.org
For additional commands, e-mail: commits-h...@druid.apache.org

Reply via email to