This is an automated email from the ASF dual-hosted git repository.

ndimiduk pushed a commit to branch branch-2.4
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2.4 by this push:
     new c3ac786  HBASE-26622 Update error-prone to 2.10
c3ac786 is described below

commit c3ac78691e8b302a81425046994a01a67e474d10
Author: Mike Drob <[email protected]>
AuthorDate: Thu Dec 23 11:00:54 2021 -0800

    HBASE-26622 Update error-prone to 2.10
    
    Author:    Mike Drob <[email protected]>
    Co-authored-by: Nick Dimiduk <[email protected]>
    Signed-off-by: Andrew Purtell <[email protected]>
---
 .../apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java |  8 ++------
 .../src/main/java/org/apache/hadoop/hbase/KeyValue.java | 17 ++++++++---------
 .../hbase/coprocessor/AggregateImplementation.java      | 10 +++++-----
 .../hadoop/hbase/mapreduce/HFileOutputFormat2.java      |  2 +-
 .../hbase/regionserver/RegionCoprocessorHost.java       |  1 +
 .../hadoop/hbase/security/token/TokenProvider.java      |  2 +-
 .../hbase/client/TestPutDeleteEtcCellIteration.java     | 12 +++++-------
 .../org/apache/hadoop/hbase/codec/CodecPerformance.java | 11 ++---------
 .../hadoop/hbase/thrift/ThriftHBaseServiceHandler.java  |  1 -
 pom.xml                                                 |  2 +-
 10 files changed, 26 insertions(+), 40 deletions(-)

diff --git 
a/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java
 
b/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java
index d6e68f3..3be9a2e 100644
--- 
a/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java
+++ 
b/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java
@@ -132,17 +132,13 @@ public class ProtobufDecoder extends 
MessageToMessageDecoder<ByteBuf> {
     try {
       getParserForTypeMethod = 
protobufMessageLiteClass.getDeclaredMethod("getParserForType");
       newBuilderForTypeMethod = 
protobufMessageLiteClass.getDeclaredMethod("newBuilderForType");
+      // TODO: If this is false then the class will fail to load? Can refactor 
it out?
+      hasParser = true;
     } catch (NoSuchMethodException e) {
       // If the method is not found, we are in trouble. Abort.
       throw new RuntimeException(e);
     }
 
-    try {
-      protobufMessageLiteClass.getDeclaredMethod("getParserForType");
-      hasParser = true;
-    } catch (Throwable var2) {
-    }
-
     HAS_PARSER = hasParser;
   }
 }
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
index 79356ed..efa442b 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java
@@ -1107,10 +1107,10 @@ public class KeyValue implements ExtendedCell, 
Cloneable {
    */
   @Override
   public KeyValue clone() throws CloneNotSupportedException {
-    super.clone();
-    byte [] b = new byte[this.length];
-    System.arraycopy(this.bytes, this.offset, b, 0, this.length);
-    KeyValue ret = new KeyValue(b, 0, b.length);
+    KeyValue ret = (KeyValue) super.clone();
+    ret.bytes = Arrays.copyOf(this.bytes, this.bytes.length);
+    ret.offset = 0;
+    ret.length = ret.bytes.length;
     // Important to clone the memstoreTS as well - otherwise memstore's
     // update-in-place methods (eg increment) will end up creating
     // new entries
@@ -1721,8 +1721,8 @@ public class KeyValue implements ExtendedCell, Cloneable {
     }
 
     @Override
-    protected Object clone() throws CloneNotSupportedException {
-      return new MetaComparator();
+    protected MetaComparator clone() throws CloneNotSupportedException {
+      return (MetaComparator) super.clone();
     }
 
     /**
@@ -2252,9 +2252,8 @@ public class KeyValue implements ExtendedCell, Cloneable {
     }
 
     @Override
-    protected Object clone() throws CloneNotSupportedException {
-      super.clone();
-      return new KVComparator();
+    protected KVComparator clone() throws CloneNotSupportedException {
+      return (KVComparator) super.clone();
     }
 
   }
diff --git 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
index a7181f9..5571e1b 100644
--- 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
+++ 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java
@@ -53,11 +53,11 @@ import org.slf4j.LoggerFactory;
  * {@link ColumnInterpreter} is used to interpret column value. This class is
  * parameterized with the following (these are the types with which the {@link 
ColumnInterpreter}
  * is parameterized, and for more description on these, refer to {@link 
ColumnInterpreter}):
- * @param T Cell value data type
- * @param S Promoted data type
- * @param P PB message that is used to transport initializer specific bytes
- * @param Q PB message that is used to transport Cell (&lt;T&gt;) instance
- * @param R PB message that is used to transport Promoted (&lt;S&gt;) instance
+ * @param <T> Cell value data type
+ * @param <S> Promoted data type
+ * @param <P> PB message that is used to transport initializer specific bytes
+ * @param <Q> PB message that is used to transport Cell (&lt;T&gt;) instance
+ * @param <R> PB message that is used to transport Promoted (&lt;S&gt;) 
instance
  */
 @InterfaceAudience.Private
 public class AggregateImplementation<T, S, P extends Message, Q extends 
Message, R extends Message>
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
index 2187de0..9c64b49 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
@@ -95,7 +95,7 @@ import org.slf4j.LoggerFactory;
 /**
  * Writes HFiles. Passed Cells must arrive in order.
  * Writes current time as the sequence id for the file. Sets the major 
compacted
- * attribute on created @{link {@link HFile}s. Calling write(null,null) will 
forcibly roll
+ * attribute on created {@link HFile}s. Calling write(null,null) will forcibly 
roll
  * all HFiles being written.
  * <p>
  * Using this class as part of a MapReduce job is best done
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
index 6961bfd..78565c1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java
@@ -260,6 +260,7 @@ public class RegionCoprocessorHost
    * @param rsServices interface to available region server functionality
    * @param conf the configuration
    */
+  @SuppressWarnings("ReturnValueIgnored") // Checking method exists as CPU 
optimization
   public RegionCoprocessorHost(final HRegion region,
       final RegionServerServices rsServices, final Configuration conf) {
     super(rsServices);
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
index 92bd0db..28fef37 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
@@ -129,7 +129,7 @@ public class TokenProvider implements 
AuthenticationProtos.AuthenticationService
 
       Token<AuthenticationTokenIdentifier> token =
           secretManager.generateToken(currentUser.getName());
-      response.setToken(ClientTokenUtil.toToken(token)).build();
+      response.setToken(ClientTokenUtil.toToken(token));
     } catch (IOException ioe) {
       CoprocessorRpcUtils.setControllerException(controller, ioe);
     }
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
index b558358..de8ecd4 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java
@@ -60,7 +60,7 @@ public class TestPutDeleteEtcCellIteration {
     for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) {
       Cell cell = cellScanner.current();
       byte [] bytes = Bytes.toBytes(index++);
-      cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes));
+      assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell);
     }
     assertEquals(COUNT, index);
   }
@@ -73,15 +73,13 @@ public class TestPutDeleteEtcCellIteration {
       p.addColumn(bytes, bytes, TIMESTAMP, bytes);
     }
     int index = 0;
-    int trigger = 3;
     for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) {
       Cell cell = cellScanner.current();
       byte [] bytes = Bytes.toBytes(index++);
       // When we hit the trigger, try inserting a new KV; should trigger 
exception
-      if (trigger == 3) p.addColumn(bytes, bytes, TIMESTAMP, bytes);
-      cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes));
+      p.addColumn(bytes, bytes, TIMESTAMP, bytes);
+      assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell);
     }
-    assertEquals(COUNT, index);
   }
 
   @Test
@@ -95,7 +93,7 @@ public class TestPutDeleteEtcCellIteration {
     for (CellScanner cellScanner = d.cellScanner(); cellScanner.advance();) {
       Cell cell = cellScanner.current();
       byte [] bytes = Bytes.toBytes(index++);
-      cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, 
KeyValue.Type.DeleteColumn));
+      assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, 
KeyValue.Type.Delete), cell);
     }
     assertEquals(COUNT, index);
   }
@@ -150,7 +148,7 @@ public class TestPutDeleteEtcCellIteration {
     for (CellScanner cellScanner = r.cellScanner(); cellScanner.advance();) {
       Cell cell = cellScanner.current();
       byte [] bytes = Bytes.toBytes(index++);
-      cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes));
+      assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell);
     }
     assertEquals(COUNT, index);
   }
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java
index bba27fe..7abf189 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hbase.codec;
 
-import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertTrue;
 
 import java.io.ByteArrayInputStream;
@@ -30,10 +30,6 @@ import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellScanner;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.codec.CellCodec;
-import org.apache.hadoop.hbase.codec.Codec;
-import org.apache.hadoop.hbase.codec.KeyValueCodec;
-import org.apache.hadoop.hbase.codec.MessageCodec;
 import org.apache.hadoop.hbase.io.CellOutputStream;
 import org.apache.hadoop.hbase.util.Bytes;
 
@@ -95,10 +91,7 @@ public class CodecPerformance {
   }
 
   static void verifyCells(final Cell [] input, final Cell [] output) {
-    assertEquals(input.length, output.length);
-    for (int i = 0; i < input.length; i ++) {
-      input[i].equals(output[i]);
-    }
+    assertArrayEquals(input, output);
   }
 
   static void doCodec(final Codec codec, final Cell [] cells, final int 
cycles, final int count,
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java
index c942977..369b2be 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java
@@ -154,7 +154,6 @@ public class ThriftHBaseServiceHandler extends 
HBaseServiceHandler implements Hb
    * id-&gt;scanner hash-map.
    *
    * @param id the ID of the scanner to remove
-   * @return a Scanner, or null if ID was invalid.
    */
   private synchronized void removeScanner(int id) {
     scannerMap.invalidate(id);
diff --git a/pom.xml b/pom.xml
index 1398ded..e078985 100755
--- a/pom.xml
+++ b/pom.xml
@@ -1499,7 +1499,7 @@
     -->
     <checkstyle.version>8.28</checkstyle.version>
     <exec.maven.version>1.6.0</exec.maven.version>
-    <error-prone.version>2.4.0</error-prone.version>
+    <error-prone.version>2.10.0</error-prone.version>
     <jamon.plugin.version>2.4.2</jamon.plugin.version>
     <lifecycle.mapping.version>1.0.0</lifecycle.mapping.version>
     <maven.antrun.version>1.8</maven.antrun.version>

Reply via email to