This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 3419daf  HIVE-23617: Fixing storage-api FindBug issues (#1063)
3419daf is described below

commit 3419dafd9159f5f2dd2333dd6e816480992954b6
Author: Panagiotis Garefalakis <pga...@cloudera.com>
AuthorDate: Fri Jun 12 10:47:28 2020 +0100

    HIVE-23617: Fixing storage-api FindBug issues (#1063)
---
 Jenkinsfile                                           |  3 ++-
 .../apache/hadoop/hive/common/ValidReadTxnList.java   |  4 ++++
 .../hadoop/hive/common/ValidReaderWriteIdList.java    |  5 +++++
 .../org/apache/hadoop/hive/common/io/DataCache.java   |  4 +++-
 .../apache/hadoop/hive/common/io/DiskRangeList.java   | 15 +++++++++++++++
 .../hive/common/io/encoded/EncodedColumnBatch.java    | 17 +++++++++++++----
 .../hadoop/hive/common/type/FastHiveDecimal.java      |  3 ++-
 .../hadoop/hive/common/type/FastHiveDecimalImpl.java  | 17 ++++++++---------
 .../hadoop/hive/common/type/HiveIntervalDayTime.java  |  2 ++
 .../hadoop/hive/common/type/RandomTypeUtil.java       | 12 ++++--------
 .../hadoop/hive/ql/exec/vector/BytesColumnVector.java |  8 ++++++--
 .../hive/ql/exec/vector/TimestampColumnVector.java    |  3 +++
 .../hive/ql/exec/vector/VectorizedRowBatch.java       |  5 +++++
 .../hadoop/hive/ql/io/sarg/SearchArgumentImpl.java    |  1 -
 .../hadoop/hive/serde2/io/HiveDecimalWritable.java    |  4 ++--
 .../hadoop/hive/serde2/io/HiveDecimalWritableV1.java  |  3 +++
 .../java/org/apache/hive/common/util/BloomFilter.java | 11 ++++++-----
 .../org/apache/hive/common/util/BloomKFilter.java     |  7 +++++--
 .../src/java/org/apache/hive/common/util/Murmur3.java |  4 ++++
 .../apache/hive/common/util/SuppressFBWarnings.java   | 19 +++++++++++++++++++
 .../test/org/apache/hive/common/util/TestMurmur3.java | 19 ++++++++++---------
 21 files changed, 121 insertions(+), 45 deletions(-)

diff --git a/Jenkinsfile b/Jenkinsfile
index c7dbb05..8c18733 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -171,7 +171,8 @@ jobWrappers {
       stage('Prechecks') {
         def findbugsProjects = [
             ":hive-shims-aggregator",
-            ":hive-shims-common"
+            ":hive-shims-common",
+            ":hive-storage-api"
         ]
         buildHive("-Pfindbugs -pl " + findbugsProjects.join(",") + " -am 
compile findbugs:check")
       }
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java 
b/storage-api/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java
index b8ff03f..9cfe60e 100644
--- a/storage-api/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java
+++ b/storage-api/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hive.common;
 
+import org.apache.hive.common.util.SuppressFBWarnings;
+
 import java.util.Arrays;
 import java.util.BitSet;
 
@@ -41,6 +43,7 @@ public class ValidReadTxnList implements ValidTxnList {
   /**
    * Used if there are no open transactions in the snapshot
    */
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP2", justification = "Ref external 
obj for efficiency")
   public ValidReadTxnList(long[] exceptions, BitSet abortedBits, long 
highWatermark, long minOpenTxn) {
     if (exceptions.length > 0) {
       this.minOpenTxn = minOpenTxn;
@@ -177,6 +180,7 @@ public class ValidReadTxnList implements ValidTxnList {
   }
 
   @Override
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP", justification = "Expose 
internal rep for efficiency")
   public long[] getInvalidTransactions() {
     return exceptions;
   }
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/common/ValidReaderWriteIdList.java
 
b/storage-api/src/java/org/apache/hadoop/hive/common/ValidReaderWriteIdList.java
index bc8ac0d..4c2cf7c 100644
--- 
a/storage-api/src/java/org/apache/hadoop/hive/common/ValidReaderWriteIdList.java
+++ 
b/storage-api/src/java/org/apache/hadoop/hive/common/ValidReaderWriteIdList.java
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hive.common;
 
+import org.apache.hive.common.util.SuppressFBWarnings;
+
 import java.util.Arrays;
 import java.util.BitSet;
 
@@ -51,6 +53,8 @@ public class ValidReaderWriteIdList implements 
ValidWriteIdList {
   public ValidReaderWriteIdList(String tableName, long[] exceptions, BitSet 
abortedBits, long highWatermark) {
     this(tableName, exceptions, abortedBits, highWatermark, Long.MAX_VALUE);
   }
+
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP2", justification = "Ref external 
obj for efficiency")
   public ValidReaderWriteIdList(String tableName,
                                 long[] exceptions, BitSet abortedBits, long 
highWatermark, long minOpenWriteId) {
     this.tableName = tableName;
@@ -213,6 +217,7 @@ public class ValidReaderWriteIdList implements 
ValidWriteIdList {
   }
 
   @Override
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP", justification = "Expose 
internal rep for efficiency")
   public long[] getInvalidWriteIds() {
     return exceptions;
   }
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/common/io/DataCache.java 
b/storage-api/src/java/org/apache/hadoop/hive/common/io/DataCache.java
index 9b23a71..59e5174 100644
--- a/storage-api/src/java/org/apache/hadoop/hive/common/io/DataCache.java
+++ b/storage-api/src/java/org/apache/hadoop/hive/common/io/DataCache.java
@@ -19,10 +19,12 @@
 package org.apache.hadoop.hive.common.io;
 
 import org.apache.hadoop.hive.common.io.encoded.MemoryBuffer;
+import org.apache.hive.common.util.SuppressFBWarnings;
 
 /** An abstract data cache that IO formats can use to retrieve and cache data. 
*/
 public interface DataCache {
-  public static final class BooleanRef {
+  @SuppressFBWarnings(value = "UUF_UNUSED_PUBLIC_OR_PROTECTED_FIELD", 
justification = "Used by interface consumers")
+  final class BooleanRef {
     public boolean value;
   }
 
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java 
b/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java
index 731a564..d83ce8b 100644
--- a/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java
+++ b/storage-api/src/java/org/apache/hadoop/hive/common/io/DiskRangeList.java
@@ -228,6 +228,21 @@ public class DiskRangeList extends DiskRange {
     return result;
   }
 
+  /**
+   * This class provides just a simplistic iterator interface (check {@link 
DiskRangeList}).
+   * Thus, for equality/hashcode just check the actual DiskRange content.
+   * @return hashcode
+   */
+  @Override
+  public int hashCode() {
+    return super.hashCode();
+  }
+
+  @Override
+  public boolean equals(Object other) {
+    return super.equals(other);
+  }
+
   public static class CreateHelper {
     private DiskRangeList tail = null, head;
 
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/common/io/encoded/EncodedColumnBatch.java
 
b/storage-api/src/java/org/apache/hadoop/hive/common/io/encoded/EncodedColumnBatch.java
index 29a3b0f..037c1ce 100644
--- 
a/storage-api/src/java/org/apache/hadoop/hive/common/io/encoded/EncodedColumnBatch.java
+++ 
b/storage-api/src/java/org/apache/hadoop/hive/common/io/encoded/EncodedColumnBatch.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.common.io.encoded;
 
 import java.util.Arrays;
+import java.util.Iterator;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -78,13 +79,21 @@ public class EncodedColumnBatch<BatchKey> {
 
     @Override
     public String toString() {
-      String bufStr = "";
+      StringBuilder sb = new StringBuilder();
       if (cacheBuffers != null) {
-        for (MemoryBuffer mb : cacheBuffers) {
-          bufStr += mb.getClass().getSimpleName() + " with " + 
mb.getByteBufferRaw().remaining() + " bytes, ";
+        Iterator<MemoryBuffer> iter = cacheBuffers.iterator();
+        while (iter.hasNext()) {
+          MemoryBuffer mb = iter.next();
+          sb.append(mb.getClass().getSimpleName());
+          sb.append(" with ");
+          sb.append(mb.getByteBufferRaw().remaining());
+          sb.append(" bytes");
+          if (iter.hasNext()) {
+            sb.append(", ");
+          }
         }
       }
-      return "ColumnStreamData [cacheBuffers=[" + bufStr
+      return "ColumnStreamData [cacheBuffers=[" + sb.toString()
           + "], indexBaseOffset=" + indexBaseOffset + "]";
     }
 
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/common/type/FastHiveDecimal.java 
b/storage-api/src/java/org/apache/hadoop/hive/common/type/FastHiveDecimal.java
index 7151f09..ed1c099 100644
--- 
a/storage-api/src/java/org/apache/hadoop/hive/common/type/FastHiveDecimal.java
+++ 
b/storage-api/src/java/org/apache/hadoop/hive/common/type/FastHiveDecimal.java
@@ -23,6 +23,7 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.math.BigDecimal;
 import java.math.BigInteger;
+import java.nio.charset.StandardCharsets;
 
 /**
  *    FastHiveDecimal is a mutable fast decimal object.  It is the base class 
for both the
@@ -188,7 +189,7 @@ public class FastHiveDecimal {
   }
 
   protected boolean fastSetFromString(String string, boolean trimBlanks) {
-    byte[] bytes = string.getBytes();
+    byte[] bytes = string.getBytes(StandardCharsets.UTF_8);
     return
         fastSetFromBytes(
             bytes, 0, bytes.length, trimBlanks);
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/common/type/FastHiveDecimalImpl.java
 
b/storage-api/src/java/org/apache/hadoop/hive/common/type/FastHiveDecimalImpl.java
index 5a16a30..3c163b8 100644
--- 
a/storage-api/src/java/org/apache/hadoop/hive/common/type/FastHiveDecimalImpl.java
+++ 
b/storage-api/src/java/org/apache/hadoop/hive/common/type/FastHiveDecimalImpl.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.common.type;
 
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.io.EOFException;
 import java.io.IOException;
@@ -229,7 +230,7 @@ public class FastHiveDecimalImpl extends FastHiveDecimal {
     final int end = offset + length;
     throw new RuntimeException(
         "Invalid fast decimal \"" +
-            new String(bytes, offset, end) + "\"" +
+            new String(bytes, offset, end, StandardCharsets.UTF_8) + "\"" +
         " fastSignum " + fastResult.fastSignum + " fast0 " + fastResult.fast0 
+ " fast1 " + fastResult.fast1 + " fast2 " + fastResult.fast2 +
             " fastIntegerDigitCount " + fastResult.fastIntegerDigitCount +" 
fastScale " + fastResult.fastScale +
         " stack trace: " + 
getStackTraceAsSingleLine(Thread.currentThread().getStackTrace()));
@@ -903,7 +904,7 @@ public class FastHiveDecimalImpl extends FastHiveDecimal {
    */
   public static boolean fastSetFromString(
       String string, boolean trimBlanks, FastHiveDecimal result) {
-    byte[] bytes = string.getBytes();
+    byte[] bytes = string.getBytes(StandardCharsets.UTF_8);
     return fastSetFromBytes(bytes, 0, bytes.length, trimBlanks, result);
   }
 
@@ -5140,7 +5141,6 @@ public class FastHiveDecimalImpl extends FastHiveDecimal {
       fastResult.fastIntegerDigitCount = 0;
       fastResult.fastScale = 0;
     } else {
-      fastResult.fastSignum = 0;
       fastResult.fastSignum = fastSignum;
       fastResult.fastIntegerDigitCount = fastRawPrecision(fastResult);
       fastResult.fastScale = 0;
@@ -8287,7 +8287,6 @@ public class FastHiveDecimalImpl extends FastHiveDecimal {
       long left0, long left1, long left2,
       long right0, long right1, long right2,
       long[] result) {
-    assert (result.length == 5);
     if (result.length != 5) {
       throw new IllegalArgumentException("Expecting result array length = 5");
     }
@@ -8982,7 +8981,7 @@ public class FastHiveDecimalImpl extends FastHiveDecimal {
           formatScale,
           scratchBuffer);
     return
-        new String(scratchBuffer, index, FAST_SCRATCH_BUFFER_LEN_TO_BYTES - 
index);
+        new String(scratchBuffer, index, FAST_SCRATCH_BUFFER_LEN_TO_BYTES - 
index, StandardCharsets.UTF_8);
   }
 
   
//************************************************************************************************
@@ -8998,7 +8997,7 @@ public class FastHiveDecimalImpl extends FastHiveDecimal {
             fastSignum, fast0, fast1, fast2,
             fastIntegerDigitCount, fastScale, formatScale,
             scratchBuffer);
-    return new String(scratchBuffer, index, scratchBuffer.length - index);
+    return new String(scratchBuffer, index, scratchBuffer.length - index, 
StandardCharsets.UTF_8);
   }
 
   public static int fastToFormatBytes(
@@ -9073,7 +9072,7 @@ public class FastHiveDecimalImpl extends FastHiveDecimal {
             fastIntegerDigitCount,
             scratchBuffer);
     return
-        new String(scratchBuffer, index, FAST_SCRATCH_BUFFER_LEN_TO_BYTES - 
index);
+        new String(scratchBuffer, index, FAST_SCRATCH_BUFFER_LEN_TO_BYTES - 
index, StandardCharsets.UTF_8);
   }
 
   public static int fastToBytes(
@@ -9097,7 +9096,7 @@ public class FastHiveDecimalImpl extends FastHiveDecimal {
             scratchBuffer);
     return
         new String(
-            scratchBuffer, index, FAST_SCRATCH_BUFFER_LEN_TO_BYTES - index);
+            scratchBuffer, index, FAST_SCRATCH_BUFFER_LEN_TO_BYTES - index, 
StandardCharsets.UTF_8);
   }
 
   private static String doFastToString(
@@ -9109,7 +9108,7 @@ public class FastHiveDecimalImpl extends FastHiveDecimal {
             fastSignum, fast0, fast1, fast2,
             fastIntegerDigitCount, fastScale, fastTrailingZeroesScale,
             scratchBuffer);
-    return new String(scratchBuffer, index, scratchBuffer.length - index);
+    return new String(scratchBuffer, index, scratchBuffer.length - index, 
StandardCharsets.UTF_8);
   }
 
   private static int doFastToBytes(
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java
 
b/storage-api/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java
index 907f660..2a0b352 100644
--- 
a/storage-api/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java
+++ 
b/storage-api/src/java/org/apache/hadoop/hive/common/type/HiveIntervalDayTime.java
@@ -24,6 +24,7 @@ import java.util.regex.Pattern;
 
 import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.hive.common.util.IntervalDayTimeUtils;
+import org.apache.hive.common.util.SuppressFBWarnings;
 
 
 /**
@@ -168,6 +169,7 @@ public class HiveIntervalDayTime implements 
Comparable<HiveIntervalDayTime> {
    * Return a copy of this object.
    */
   @Override
+  @SuppressFBWarnings(value = "CN_IMPLEMENTS_CLONE_BUT_NOT_CLONEABLE", 
justification = "Intended")
   public Object clone() {
       return new HiveIntervalDayTime(totalSeconds, nanos);
   }
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java 
b/storage-api/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java
index ec1b11e..1d53135 100644
--- 
a/storage-api/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java
+++ 
b/storage-api/src/java/org/apache/hadoop/hive/common/type/RandomTypeUtil.java
@@ -38,11 +38,7 @@ public class RandomTypeUtil {
     }
     StringBuilder sb = new StringBuilder();
     for (int i = 0; i < length; i++) {
-      if (characters == null) {
-        sb.append((char) (r.nextInt(128)));
-      } else {
-        sb.append(characters.charAt(r.nextInt(characters.length())));
-      }
+      sb.append(characters.charAt(r.nextInt(characters.length())));
     }
     return sb.toString();
   }
@@ -131,9 +127,6 @@ public class RandomTypeUtil {
   public static Timestamp getRandTimestamp(Random r, int minYear, int maxYear) 
{
     String optionalNanos = "";
     switch (r.nextInt(4)) {
-    case 0:
-      // No nanos.
-      break;
     case 1:
       optionalNanos = String.format(".%09d",
           Integer.valueOf(r.nextInt((int) NANOSECONDS_PER_SECOND)));
@@ -148,6 +141,9 @@ public class RandomTypeUtil {
       optionalNanos = String.format(".%09d",
           Integer.valueOf(r.nextInt((int) NANOSECONDS_PER_MILLISSECOND)));
       break;
+    default:
+      // No nanos.
+      break;
     }
     String timestampStr = String.format("%04d-%02d-%02d %02d:%02d:%02d%s",
         Integer.valueOf(minYear + r.nextInt(maxYear - minYear + 1)),  // year
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/BytesColumnVector.java
 
b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/BytesColumnVector.java
index 2c2ef61..6618807 100644
--- 
a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/BytesColumnVector.java
+++ 
b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/BytesColumnVector.java
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
+import org.apache.hive.common.util.SuppressFBWarnings;
+
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 
 
@@ -219,6 +222,7 @@ public class BytesColumnVector extends ColumnVector {
     }
   }
 
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP", justification = "Expose 
internal rep for efficiency")
   public byte[] getValPreallocatedBytes() {
     return buffer;
   }
@@ -523,7 +527,7 @@ public class BytesColumnVector extends ColumnVector {
       row = 0;
     }
     if (noNulls || !isNull[row]) {
-      return new String(vector[row], start[row], length[row]);
+      return new String(vector[row], start[row], length[row], 
StandardCharsets.UTF_8);
     } else {
       return null;
     }
@@ -536,7 +540,7 @@ public class BytesColumnVector extends ColumnVector {
     }
     if (noNulls || !isNull[row]) {
       buffer.append('"');
-      buffer.append(new String(vector[row], start[row], length[row]));
+      buffer.append(new String(vector[row], start[row], length[row], 
StandardCharsets.UTF_8));
       buffer.append('"');
     } else {
       buffer.append("null");
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
 
b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
index 7807e69..f97156c 100644
--- 
a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
+++ 
b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
@@ -25,6 +25,7 @@ import java.util.Arrays;
 
 import org.apache.hadoop.hive.common.type.CalendarUtils;
 import org.apache.hadoop.io.Writable;
+import org.apache.hive.common.util.SuppressFBWarnings;
 
 /**
  * This class represents a nullable timestamp column vector capable of handing 
a wide range of
@@ -132,6 +133,7 @@ public class TimestampColumnVector extends ColumnVector {
    * @param elementNum
    * @return
    */
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP", justification = "Expose 
internal rep for efficiency")
   public Timestamp asScratchTimestamp(int elementNum) {
     scratchTimestamp.setTime(time[elementNum]);
     scratchTimestamp.setNanos(nanos[elementNum]);
@@ -142,6 +144,7 @@ public class TimestampColumnVector extends ColumnVector {
    * Return the scratch timestamp (contents undefined).
    * @return
    */
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP", justification = "Expose 
internal rep for efficiency")
   public Timestamp getScratchTimestamp() {
     return scratchTimestamp;
   }
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatch.java
 
b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatch.java
index 0e678d3..f1453e8 100644
--- 
a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatch.java
+++ 
b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatch.java
@@ -24,6 +24,7 @@ import java.io.IOException;
 import org.apache.hadoop.hive.ql.io.filter.MutableFilterContext;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Writable;
+import org.apache.hive.common.util.SuppressFBWarnings;
 
 /**
  * A VectorizedRowBatch is a set of rows, organized with each column
@@ -369,6 +370,7 @@ public class VectorizedRowBatch implements Writable, 
MutableFilterContext {
   }
 
   @Override
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP", justification = "Expose 
internal rep for efficiency")
   public int[] getSelected() {
     return selected;
   }
@@ -379,6 +381,7 @@ public class VectorizedRowBatch implements Writable, 
MutableFilterContext {
   }
 
   @Override
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP2", justification = "Ref external 
obj for efficiency")
   public void setFilterContext(boolean isSelectedInUse, int[] selected, int 
selectedSize) {
     this.selectedInUse = isSelectedInUse;
     this.selected = selected;
@@ -399,6 +402,7 @@ public class VectorizedRowBatch implements Writable, 
MutableFilterContext {
   }
 
   @Override
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP", justification = "Expose 
internal rep for efficiency")
   public int[] updateSelected(int minCapacity) {
     if (selected == null || selected.length < minCapacity) {
       selected = new int[minCapacity];
@@ -412,6 +416,7 @@ public class VectorizedRowBatch implements Writable, 
MutableFilterContext {
   }
 
   @Override
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP2", justification = "Ref external 
obj for efficiency")
   public void setSelected(int[] selectedArray) {
     selected = selectedArray;
   }
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
 
b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
index c2b7c4a..2daa5e8 100644
--- 
a/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
+++ 
b/storage-api/src/java/org/apache/hadoop/hive/ql/io/sarg/SearchArgumentImpl.java
@@ -82,7 +82,6 @@ public final class SearchArgumentImpl implements 
SearchArgument {
       checkLiteralType(literal, type, conf);
       this.literalList = literalList;
       if (literalList != null) {
-        Class valueCls = type.getValueClass();
         for(Object lit: literalList) {
           checkLiteralType(lit, type, conf);
         }
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java
 
b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java
index b931b81..5f9e22a 100644
--- 
a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java
+++ 
b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritable.java
@@ -23,14 +23,13 @@ import java.io.DataOutput;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.IOException;
-import java.math.BigInteger;
 
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.FastHiveDecimal;
 import org.apache.hadoop.hive.common.type.FastHiveDecimalImpl;
-import org.apache.hadoop.hive.common.type.HiveDecimalVersionV2;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableUtils;
+import org.apache.hive.common.util.SuppressFBWarnings;
 
 /**
  * A mutable decimal.
@@ -577,6 +576,7 @@ public final class HiveDecimalWritable extends 
FastHiveDecimal
    *
    */
   @HiveDecimalWritableVersionV2
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP", justification = "Expose 
internal rep for efficiency")
   public byte[] bigIntegerBytesInternalScratchBuffer() {
     return internalScratchBuffer;
   }
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritableV1.java
 
b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritableV1.java
index a8e5b25..ca0306d 100644
--- 
a/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritableV1.java
+++ 
b/storage-api/src/java/org/apache/hadoop/hive/serde2/io/HiveDecimalWritableV1.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimalV1;
 
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableUtils;
+import org.apache.hive.common.util.SuppressFBWarnings;
 
 public class HiveDecimalWritableV1 implements 
WritableComparable<HiveDecimalWritableV1> {
 
@@ -77,6 +78,7 @@ public class HiveDecimalWritableV1 implements 
WritableComparable<HiveDecimalWrit
   }
 
   @HiveDecimalWritableVersionV1
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP2", justification = "Ref external 
obj for efficiency")
   public void set(byte[] bytes, int scale) {
     this.internalStorage = bytes;
     this.scale = scale;
@@ -162,6 +164,7 @@ public class HiveDecimalWritableV1 implements 
WritableComparable<HiveDecimalWrit
    * @return
    */
   @HiveDecimalWritableVersionV1
+  @SuppressFBWarnings(value = "EI_EXPOSE_REP", justification = "Expose 
internal rep for efficiency")
   public byte[] getInternalStorage() {
     return internalStorage;
   }
diff --git a/storage-api/src/java/org/apache/hive/common/util/BloomFilter.java 
b/storage-api/src/java/org/apache/hive/common/util/BloomFilter.java
index c0380ec..8e1bd7a 100644
--- a/storage-api/src/java/org/apache/hive/common/util/BloomFilter.java
+++ b/storage-api/src/java/org/apache/hive/common/util/BloomFilter.java
@@ -19,6 +19,7 @@
 package org.apache.hive.common.util;
 
 import java.io.*;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 
 /**
@@ -91,7 +92,7 @@ public class BloomFilter {
 
   public void add(byte[] val) {
     if (val == null) {
-      addBytes(val, -1, -1);
+      addBytes(null, -1, -1);
     } else {
       addBytes(val, 0, val.length);
     }
@@ -128,7 +129,7 @@ public class BloomFilter {
     if (val == null) {
       add(null);
     } else {
-      add(val.getBytes());
+      add(val.getBytes(StandardCharsets.UTF_8));
     }
   }
 
@@ -142,7 +143,7 @@ public class BloomFilter {
 
   public boolean test(byte[] val) {
     if (val == null) {
-      return testBytes(val, -1, -1);
+      return testBytes(null, -1, -1);
     }
     return testBytes(val, 0, val.length);
   }
@@ -175,7 +176,7 @@ public class BloomFilter {
     if (val == null) {
       return test(null);
     } else {
-      return test(val.getBytes());
+      return test(val.getBytes(StandardCharsets.UTF_8));
     }
   }
 
@@ -327,7 +328,7 @@ public class BloomFilter {
    * Bare metal bit set implementation. For performance reasons, this 
implementation does not check
    * for index bounds nor expand the bit set size if the specified index is 
greater than the size.
    */
-  public class BitSet {
+  static class BitSet {
     private final long[] data;
 
     public BitSet(long bits) {
diff --git a/storage-api/src/java/org/apache/hive/common/util/BloomKFilter.java 
b/storage-api/src/java/org/apache/hive/common/util/BloomKFilter.java
index 8fe6597..2386279 100644
--- a/storage-api/src/java/org/apache/hive/common/util/BloomKFilter.java
+++ b/storage-api/src/java/org/apache/hive/common/util/BloomKFilter.java
@@ -23,6 +23,7 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 
 /**
@@ -138,7 +139,7 @@ import java.util.Arrays;
   }
 
   public void addString(String val) {
-    addBytes(val.getBytes());
+    addBytes(val.getBytes(StandardCharsets.UTF_8));
   }
 
   public void addByte(byte val) {
@@ -217,7 +218,7 @@ import java.util.Arrays;
   }
 
   public boolean testString(String val) {
-    return testBytes(val.getBytes());
+    return testBytes(val.getBytes(StandardCharsets.UTF_8));
   }
 
   public boolean testByte(byte val) {
@@ -382,6 +383,7 @@ import java.util.Arrays;
      *
      * @param data - bit array
      */
+    @SuppressFBWarnings(value = "EI_EXPOSE_REP2", justification = "Ref 
external obj for efficiency")
     public BitSet(long[] data) {
       assert data.length > 0 : "data length is zero!";
       this.data = data;
@@ -413,6 +415,7 @@ import java.util.Arrays;
       return data.length * Long.SIZE;
     }
 
+    @SuppressFBWarnings(value = "EI_EXPOSE_REP", justification = "Expose 
internal rep for efficiency")
     public long[] getData() {
       return data;
     }
diff --git a/storage-api/src/java/org/apache/hive/common/util/Murmur3.java 
b/storage-api/src/java/org/apache/hive/common/util/Murmur3.java
index 85db95c..a2dd462 100644
--- a/storage-api/src/java/org/apache/hive/common/util/Murmur3.java
+++ b/storage-api/src/java/org/apache/hive/common/util/Murmur3.java
@@ -133,6 +133,7 @@ public class Murmur3 {
    * @param seed   - seed. (default 0)
    * @return - hashcode
    */
+  @SuppressFBWarnings(value = {"SF_SWITCH_FALLTHROUGH", 
"SF_SWITCH_NO_DEFAULT"}, justification = "Expected")
   public static int hash32(byte[] data, int offset, int length, int seed) {
     int hash = seed;
     final int nblocks = length >> 2;
@@ -256,6 +257,7 @@ public class Murmur3 {
    * @param seed   - seed. (default is 0)
    * @return - hashcode
    */
+  @SuppressFBWarnings(value = "SF_SWITCH_NO_DEFAULT", justification = 
"Expected")
   public static long hash64(byte[] data, int offset, int length, int seed) {
     long hash = seed;
     final int nblocks = length >> 3;
@@ -330,6 +332,7 @@ public class Murmur3 {
    * @param seed   - seed. (default is 0)
    * @return - hashcode (2 longs)
    */
+  @SuppressFBWarnings(value = "SF_SWITCH_NO_DEFAULT", justification = 
"Expected")
   public static long[] hash128(byte[] data, int offset, int length, int seed) {
     long h1 = seed;
     long h2 = seed;
@@ -510,6 +513,7 @@ public class Murmur3 {
       System.arraycopy(data, offset + consumed, tail, 0, tailLen);
     }
 
+    @SuppressFBWarnings(value = {"SF_SWITCH_FALLTHROUGH", 
"SF_SWITCH_NO_DEFAULT"}, justification = "Expected")
     public final int end() {
       int k1 = 0;
       switch (tailLen) {
diff --git 
a/storage-api/src/java/org/apache/hive/common/util/SuppressFBWarnings.java 
b/storage-api/src/java/org/apache/hive/common/util/SuppressFBWarnings.java
new file mode 100644
index 0000000..24f9224
--- /dev/null
+++ b/storage-api/src/java/org/apache/hive/common/util/SuppressFBWarnings.java
@@ -0,0 +1,19 @@
+package org.apache.hive.common.util;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@Retention(RetentionPolicy.CLASS)
+public @interface SuppressFBWarnings {
+    /**
+     * The set of FindBugs warnings that are to be suppressed in
+     * annotated element. The value can be a bug category, kind or pattern.
+     *
+     */
+    String[] value() default {};
+
+    /**
+     * Optional documentation of the reason why the warning is suppressed
+     */
+    String justification() default "";
+}
diff --git a/storage-api/src/test/org/apache/hive/common/util/TestMurmur3.java 
b/storage-api/src/test/org/apache/hive/common/util/TestMurmur3.java
index 7320d6d..edd58c3 100644
--- a/storage-api/src/test/org/apache/hive/common/util/TestMurmur3.java
+++ b/storage-api/src/test/org/apache/hive/common/util/TestMurmur3.java
@@ -28,6 +28,7 @@ import org.junit.Test;
 
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Random;
 
@@ -41,13 +42,13 @@ public class TestMurmur3 {
     String key = "test";
     int seed = 123;
     HashFunction hf = Hashing.murmur3_32(seed);
-    int hc1 = hf.hashBytes(key.getBytes()).asInt();
-    int hc2 = Murmur3.hash32(key.getBytes(), key.getBytes().length, seed);
+    int hc1 = hf.hashBytes(key.getBytes(StandardCharsets.UTF_8)).asInt();
+    int hc2 = Murmur3.hash32(key.getBytes(StandardCharsets.UTF_8), 
key.getBytes().length, seed);
     assertEquals(hc1, hc2);
 
     key = "testkey";
-    hc1 = hf.hashBytes(key.getBytes()).asInt();
-    hc2 = Murmur3.hash32(key.getBytes(), key.getBytes().length, seed);
+    hc1 = hf.hashBytes(key.getBytes(StandardCharsets.UTF_8)).asInt();
+    hc2 = Murmur3.hash32(key.getBytes(StandardCharsets.UTF_8), 
key.getBytes().length, seed);
     assertEquals(hc1, hc2);
   }
 
@@ -100,11 +101,11 @@ public class TestMurmur3 {
     HashFunction hf = Hashing.murmur3_128(seed);
     // guava stores the hashcodes in little endian order
     ByteBuffer buf = ByteBuffer.allocate(16).order(ByteOrder.LITTLE_ENDIAN);
-    buf.put(hf.hashBytes(key.getBytes()).asBytes());
+    buf.put(hf.hashBytes(key.getBytes(StandardCharsets.UTF_8)).asBytes());
     buf.flip();
     long gl1 = buf.getLong();
     long gl2 = buf.getLong(8);
-    long[] hc = Murmur3.hash128(key.getBytes(), 0, key.getBytes().length, 
seed);
+    long[] hc = Murmur3.hash128(key.getBytes(StandardCharsets.UTF_8), 0, 
key.getBytes(StandardCharsets.UTF_8).length, seed);
     long m1 = hc[0];
     long m2 = hc[1];
     assertEquals(gl1, m1);
@@ -112,11 +113,11 @@ public class TestMurmur3 {
 
     key = "testkey128_testkey128";
     buf = ByteBuffer.allocate(16).order(ByteOrder.LITTLE_ENDIAN);
-    buf.put(hf.hashBytes(key.getBytes()).asBytes());
+    buf.put(hf.hashBytes(key.getBytes(StandardCharsets.UTF_8)).asBytes());
     buf.flip();
     gl1 = buf.getLong();
     gl2 = buf.getLong(8);
-    byte[] keyBytes = key.getBytes();
+    byte[] keyBytes = key.getBytes(StandardCharsets.UTF_8);
     hc = Murmur3.hash128(keyBytes, 0, keyBytes.length, seed);
     m1 = hc[0];
     m2 = hc[1];
@@ -140,7 +141,7 @@ public class TestMurmur3 {
         " it was the spring of hope, it was the winter of despair," +
         " we had everything before us, we had nothing before us," +
         " we were all going direct to Heaven," +
-        " we were all going direct the other way.").getBytes();
+        " we were all going direct the other 
way.").getBytes(StandardCharsets.UTF_8);
     long hash = Murmur3.hash64(origin, 0, origin.length);
     assertEquals(305830725663368540L, hash);
 

Reply via email to