This is an automated email from the ASF dual-hosted git repository.

srdo pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/storm.git


The following commit(s) were added to refs/heads/master by this push:
     new 4bae551  STORM-3443: hive: fix all checkstyle warnings
     new 04af947  Merge pull request #3058 from krichter722/checkstyle-hive
4bae551 is described below

commit 4bae551127c4e34eab49a352924bbc9fbbe6b83b
Author: Karl-Philipp Richter <krich...@posteo.de>
AuthorDate: Mon Jul 1 20:00:41 2019 +0200

    STORM-3443: hive: fix all checkstyle warnings
---
 external/storm-hive/pom.xml                        |  2 +-
 .../java/org/apache/storm/hive/bolt/HiveBolt.java  | 14 ++++----
 .../bolt/mapper/DelimitedRecordHiveMapper.java     | 25 +++++++-------
 .../apache/storm/hive/bolt/mapper/HiveMapper.java  | 24 +++-----------
 .../hive/bolt/mapper/JsonRecordHiveMapper.java     | 23 +++++++------
 .../org/apache/storm/hive/common/HiveOptions.java  |  6 ++--
 .../org/apache/storm/hive/common/HiveUtils.java    |  8 ++---
 .../org/apache/storm/hive/common/HiveWriter.java   | 38 +++++++++-------------
 .../org/apache/storm/hive/trident/HiveState.java   | 14 ++++----
 9 files changed, 67 insertions(+), 87 deletions(-)

diff --git a/external/storm-hive/pom.xml b/external/storm-hive/pom.xml
index 94b73db..4ca6f0b 100644
--- a/external/storm-hive/pom.xml
+++ b/external/storm-hive/pom.xml
@@ -246,7 +246,7 @@
         <artifactId>maven-checkstyle-plugin</artifactId>
         <!--Note - the version would be inherited-->
         <configuration>
-          <maxAllowedViolations>58</maxAllowedViolations>
+          <maxAllowedViolations>0</maxAllowedViolations>
         </configuration>
       </plugin>
       <plugin>
diff --git 
a/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/HiveBolt.java 
b/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/HiveBolt.java
index 180f41b..db373e5 100644
--- a/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/HiveBolt.java
+++ b/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/HiveBolt.java
@@ -129,15 +129,15 @@ public class HiveBolt extends BaseRichBolt {
                 HiveWriter w = entry.getValue();
                 w.flushAndClose();
             } catch (Exception ex) {
-                LOG.warn("Error while closing writer to " + entry.getKey() +
-                         ". Exception follows.", ex);
+                LOG.warn("Error while closing writer to " + entry.getKey() + 
". Exception follows.",
+                        ex);
                 if (ex instanceof InterruptedException) {
                     Thread.currentThread().interrupt();
                 }
             }
         }
 
-        ExecutorService toShutdown[] = { callTimeoutPool };
+        ExecutorService[] toShutdown = { callTimeoutPool };
         for (ExecutorService execService : toShutdown) {
             execService.shutdown();
             try {
@@ -214,7 +214,7 @@ public class HiveBolt extends BaseRichBolt {
     }
 
     /**
-     * Abort current Txn on all writers
+     * Abort current Txn on all writers.
      */
     private void abortAllWriters() throws InterruptedException, 
StreamingException, HiveWriter.TxnBatchFailure {
         for (Entry<HiveEndPoint, HiveWriter> entry : allWriters.entrySet()) {
@@ -227,7 +227,7 @@ public class HiveBolt extends BaseRichBolt {
     }
 
     /**
-     * Closes all writers and remove them from cache
+     * Closes all writers and remove them from cache.
      */
     private void closeAllWriters() {
         //1) Retire writers
@@ -269,7 +269,7 @@ public class HiveBolt extends BaseRichBolt {
     }
 
     /**
-     * Locate writer that has not been used for longest time and retire it
+     * Locate writer that has not been used for longest time and retire it.
      */
     private void retireEldestWriter() {
         LOG.info("Attempting close eldest writers");
@@ -295,7 +295,7 @@ public class HiveBolt extends BaseRichBolt {
     }
 
     /**
-     * Locate all writers past idle timeout and retire them
+     * Locate all writers past idle timeout and retire them.
      * @return number of writers retired
      */
     private int retireIdleWriters() {
diff --git 
a/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/mapper/DelimitedRecordHiveMapper.java
 
b/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/mapper/DelimitedRecordHiveMapper.java
index f6e3612..fbbc4cc 100644
--- 
a/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/mapper/DelimitedRecordHiveMapper.java
+++ 
b/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/mapper/DelimitedRecordHiveMapper.java
@@ -12,7 +12,6 @@
 
 package org.apache.storm.hive.bolt.mapper;
 
-
 import com.google.common.annotations.VisibleForTesting;
 import java.io.IOException;
 import java.text.SimpleDateFormat;
@@ -94,18 +93,6 @@ public class DelimitedRecordHiveMapper implements HiveMapper 
{
     }
 
     @Override
-    public byte[] mapRecord(Tuple tuple) {
-        StringBuilder builder = new StringBuilder();
-        if (this.columnFields != null) {
-            for (String field : this.columnFields) {
-                builder.append(tuple.getValueByField(field));
-                builder.append(fieldDelimiter);
-            }
-        }
-        return builder.toString().getBytes();
-    }
-
-    @Override
     public List<String> mapPartitions(TridentTuple tuple) {
         List<String> partitionList = new ArrayList<String>();
         if (this.partitionFields != null) {
@@ -120,6 +107,18 @@ public class DelimitedRecordHiveMapper implements 
HiveMapper {
     }
 
     @Override
+    public byte[] mapRecord(Tuple tuple) {
+        StringBuilder builder = new StringBuilder();
+        if (this.columnFields != null) {
+            for (String field : this.columnFields) {
+                builder.append(tuple.getValueByField(field));
+                builder.append(fieldDelimiter);
+            }
+        }
+        return builder.toString().getBytes();
+    }
+
+    @Override
     public byte[] mapRecord(TridentTuple tuple) {
         StringBuilder builder = new StringBuilder();
         if (this.columnFields != null) {
diff --git 
a/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/mapper/HiveMapper.java
 
b/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/mapper/HiveMapper.java
index cc29392..dd67941 100644
--- 
a/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/mapper/HiveMapper.java
+++ 
b/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/mapper/HiveMapper.java
@@ -12,7 +12,6 @@
 
 package org.apache.storm.hive.bolt.mapper;
 
-
 import java.io.IOException;
 import java.io.Serializable;
 import java.util.List;
@@ -31,9 +30,6 @@ public interface HiveMapper extends Serializable {
 
     /**
      * Given a endPoint, returns a RecordWriter with columnNames.
-     *
-     * @param endPoint
-     * @return
      */
 
     RecordWriter createRecordWriter(HiveEndPoint endPoint)
@@ -44,31 +40,21 @@ public interface HiveMapper extends Serializable {
 
     /**
      * Given a tuple, return a hive partition values list.
-     *
-     * @param tuple
-     * @return List<String>
      */
     List<String> mapPartitions(Tuple tuple);
 
     /**
-     * Given a tuple, maps to a HiveRecord based on columnFields
-     * @Param Tuple
-     * @return byte[]
+     * Given a TridetnTuple, return a hive partition values list.
      */
-    byte[] mapRecord(Tuple tuple);
+    List<String> mapPartitions(TridentTuple tuple);
 
     /**
-     * Given a TridetnTuple, return a hive partition values list.
-     *
-     * @param tuple
-     * @return List<String>
+     * Given a tuple, maps to a HiveRecord based on columnFields.
      */
-    List<String> mapPartitions(TridentTuple tuple);
+    byte[] mapRecord(Tuple tuple);
 
     /**
-     * Given a TridentTuple, maps to a HiveRecord based on columnFields
-     * @Param TridentTuple
-     * @return byte[]
+     * Given a TridentTuple, maps to a HiveRecord based on columnFields.
      */
     byte[] mapRecord(TridentTuple tuple);
 
diff --git 
a/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/mapper/JsonRecordHiveMapper.java
 
b/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/mapper/JsonRecordHiveMapper.java
index 59c1bb7..9f70a70 100644
--- 
a/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/mapper/JsonRecordHiveMapper.java
+++ 
b/external/storm-hive/src/main/java/org/apache/storm/hive/bolt/mapper/JsonRecordHiveMapper.java
@@ -12,7 +12,6 @@
 
 package org.apache.storm.hive.bolt.mapper;
 
-
 import java.io.IOException;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
@@ -83,17 +82,6 @@ public class JsonRecordHiveMapper implements HiveMapper {
     }
 
     @Override
-    public byte[] mapRecord(Tuple tuple) {
-        JSONObject obj = new JSONObject();
-        if (this.columnFields != null) {
-            for (String field : this.columnFields) {
-                obj.put(field, tuple.getValueByField(field));
-            }
-        }
-        return obj.toJSONString().getBytes();
-    }
-
-    @Override
     public List<String> mapPartitions(TridentTuple tuple) {
         List<String> partitionList = new ArrayList<String>();
         if (this.partitionFields != null) {
@@ -108,6 +96,17 @@ public class JsonRecordHiveMapper implements HiveMapper {
     }
 
     @Override
+    public byte[] mapRecord(Tuple tuple) {
+        JSONObject obj = new JSONObject();
+        if (this.columnFields != null) {
+            for (String field : this.columnFields) {
+                obj.put(field, tuple.getValueByField(field));
+            }
+        }
+        return obj.toJSONString().getBytes();
+    }
+
+    @Override
     public byte[] mapRecord(TridentTuple tuple) {
         JSONObject obj = new JSONObject();
         if (this.columnFields != null) {
diff --git 
a/external/storm-hive/src/main/java/org/apache/storm/hive/common/HiveOptions.java
 
b/external/storm-hive/src/main/java/org/apache/storm/hive/common/HiveOptions.java
index db0fa69..ea97396 100644
--- 
a/external/storm-hive/src/main/java/org/apache/storm/hive/common/HiveOptions.java
+++ 
b/external/storm-hive/src/main/java/org/apache/storm/hive/common/HiveOptions.java
@@ -24,6 +24,7 @@ public class HiveOptions implements Serializable {
     protected HiveMapper mapper;
     protected String databaseName;
     protected String tableName;
+    @SuppressWarnings("checkstyle:AbbreviationAsWordInName")
     protected String metaStoreURI;
     protected Integer txnsPerBatch = 100;
     protected Integer maxOpenConnections = 10;
@@ -36,8 +37,8 @@ public class HiveOptions implements Serializable {
     protected String kerberosKeytab;
     protected Integer tickTupleInterval = DEFAULT_TICK_TUPLE_INTERVAL_SECS;
 
-    public HiveOptions(String metaStoreURI, String databaseName, String 
tableName, HiveMapper mapper) {
-        this.metaStoreURI = metaStoreURI;
+    public HiveOptions(String metaStoreUri, String databaseName, String 
tableName, HiveMapper mapper) {
+        this.metaStoreURI = metaStoreUri;
         this.databaseName = databaseName;
         this.tableName = tableName;
         this.mapper = mapper;
@@ -93,6 +94,7 @@ public class HiveOptions implements Serializable {
         return this;
     }
 
+    @SuppressWarnings("checkstyle:AbbreviationAsWordInName")
     public String getMetaStoreURI() {
         return metaStoreURI;
     }
diff --git 
a/external/storm-hive/src/main/java/org/apache/storm/hive/common/HiveUtils.java 
b/external/storm-hive/src/main/java/org/apache/storm/hive/common/HiveUtils.java
index 7681f91..328794f 100644
--- 
a/external/storm-hive/src/main/java/org/apache/storm/hive/common/HiveUtils.java
+++ 
b/external/storm-hive/src/main/java/org/apache/storm/hive/common/HiveUtils.java
@@ -12,6 +12,8 @@
 
 package org.apache.storm.hive.common;
 
+import static org.apache.storm.Config.TOPOLOGY_AUTO_CREDENTIALS;
+
 import java.io.File;
 import java.io.IOException;
 import java.util.List;
@@ -25,8 +27,6 @@ import org.apache.storm.hive.security.AutoHive;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.storm.Config.TOPOLOGY_AUTO_CREDENTIALS;
-
 public class HiveUtils {
     private static final Logger LOG = LoggerFactory.getLogger(HiveUtils.class);
 
@@ -94,8 +94,8 @@ public class HiveUtils {
     }
 
     public static boolean isTokenAuthEnabled(Map<String, Object> conf) {
-        return conf.get(TOPOLOGY_AUTO_CREDENTIALS) != null &&
-               (((List) 
conf.get(TOPOLOGY_AUTO_CREDENTIALS)).contains(AutoHive.class.getName()));
+        return conf.get(TOPOLOGY_AUTO_CREDENTIALS) != null
+                && (((List) 
conf.get(TOPOLOGY_AUTO_CREDENTIALS)).contains(AutoHive.class.getName()));
     }
 
     private static UserGroupInformation getCurrentUser(String principal) 
throws AuthenticationFailed {
diff --git 
a/external/storm-hive/src/main/java/org/apache/storm/hive/common/HiveWriter.java
 
b/external/storm-hive/src/main/java/org/apache/storm/hive/common/HiveWriter.java
index bc0ee75..c0ea26f 100644
--- 
a/external/storm-hive/src/main/java/org/apache/storm/hive/common/HiveWriter.java
+++ 
b/external/storm-hive/src/main/java/org/apache/storm/hive/common/HiveWriter.java
@@ -84,7 +84,6 @@ public class HiveWriter {
     /**
      * If the current thread has been interrupted, then throws an
      * exception.
-     * @throws InterruptedException
      */
     private static void checkAndThrowInterruptedException()
         throws InterruptedException {
@@ -114,13 +113,13 @@ public class HiveWriter {
         }
     }
 
-    private HiveConf createHiveConf(String metaStoreURI, boolean 
tokenAuthEnabled) {
+    private HiveConf createHiveConf(String metaStoreUri, boolean 
tokenAuthEnabled) {
         if (!tokenAuthEnabled) {
             return null;
         }
 
         HiveConf hcatConf = new HiveConf();
-        hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI);
+        hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreUri);
         hcatConf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL, true);
         return hcatConf;
     }
@@ -133,16 +132,14 @@ public class HiveWriter {
     }
 
     /**
-     * Write data <br />
-     *
-     * @throws IOException
-     * @throws InterruptedException
+     * Write data.
      */
     public synchronized void write(final byte[] record)
         throws WriteFailure, SerializationError, InterruptedException {
         if (closed) {
-            throw new IllegalStateException("This hive streaming writer was 
closed " +
-                                            "and thus no longer able to write 
: " + endPoint);
+            throw new IllegalStateException("This hive streaming writer was 
closed "
+                    + "and thus no longer able to write : "
+                    + endPoint);
         }
         // write the tuple
         try {
@@ -172,7 +169,9 @@ public class HiveWriter {
     public void flush(boolean rollToNext)
         throws CommitFailure, TxnBatchFailure, TxnFailure, 
InterruptedException {
         // if there are no records do not call flush
-        if (totalRecords <= 0) return;
+        if (totalRecords <= 0) {
+            return;
+        }
         try {
             synchronized (txnBatchLock) {
                 commitTxn();
@@ -186,7 +185,7 @@ public class HiveWriter {
     }
 
     /** Queues up a heartbeat request on the current and remaining txns using 
the
-     *  heartbeatThdPool and returns immediately
+     *  heartbeatThdPool and returns immediately.
      */
     public void heartBeat() throws InterruptedException {
         // 1) schedule the heartbeat on one thread in pool
@@ -214,8 +213,7 @@ public class HiveWriter {
     }
 
     /**
-     * returns totalRecords written so far in a transaction
-     * @returns totalRecords
+     * returns totalRecords written so far in a transaction.
      */
     public int getTotalRecords() {
         return totalRecords;
@@ -231,9 +229,7 @@ public class HiveWriter {
     }
 
     /**
-     * Close the Transaction Batch and connection
-     * @throws IOException
-     * @throws InterruptedException
+     * Close the Transaction Batch and connection.
      */
     public void close() throws IOException, InterruptedException {
         closeTxnBatch();
@@ -369,7 +365,6 @@ public class HiveWriter {
     /**
      * if there are remainingTransactions in current txnBatch, begins 
nextTransactions
      * otherwise creates new txnBatch.
-     * @param rollToNext
      */
     private void nextTxn(boolean rollToNext) throws StreamingException, 
InterruptedException, TxnBatchFailure {
         if (txnBatch.remainingTransactions() == 0) {
@@ -403,9 +398,9 @@ public class HiveWriter {
             } else {
                 return future.get();
             }
-        } catch (TimeoutException eT) {
+        } catch (TimeoutException timeoutException) {
             future.cancel(true);
-            throw eT;
+            throw timeoutException;
         } catch (ExecutionException e1) {
             Throwable cause = e1.getCause();
             if (cause instanceof IOException) {
@@ -441,7 +436,6 @@ public class HiveWriter {
      * Simple interface whose <tt>call</tt> method is called by
      * {#callWithTimeout} in a new thread inside a
      * {@linkplain java.security.PrivilegedExceptionAction#run()} call.
-     * @param <T>
      */
     private interface CallRunner<T> {
         T call() throws Exception;
@@ -460,8 +454,8 @@ public class HiveWriter {
     }
 
     public static class CommitFailure extends Failure {
-        public CommitFailure(HiveEndPoint endPoint, Long txnID, Throwable 
cause) {
-            super("Commit of Txn " + txnID + " failed on EndPoint: " + 
endPoint, cause);
+        public CommitFailure(HiveEndPoint endPoint, Long txnId, Throwable 
cause) {
+            super("Commit of Txn " + txnId + " failed on EndPoint: " + 
endPoint, cause);
         }
     }
 
diff --git 
a/external/storm-hive/src/main/java/org/apache/storm/hive/trident/HiveState.java
 
b/external/storm-hive/src/main/java/org/apache/storm/hive/trident/HiveState.java
index b9494da..a661447 100644
--- 
a/external/storm-hive/src/main/java/org/apache/storm/hive/trident/HiveState.java
+++ 
b/external/storm-hive/src/main/java/org/apache/storm/hive/trident/HiveState.java
@@ -130,7 +130,7 @@ public class HiveState implements State {
     }
 
     /**
-     * Abort current Txn on all writers
+     * Abort current Txn on all writers.
      */
     private void abortAllWriters() throws InterruptedException, 
StreamingException, HiveWriter.TxnBatchFailure {
         for (Entry<HiveEndPoint, HiveWriter> entry : allWriters.entrySet()) {
@@ -140,7 +140,7 @@ public class HiveState implements State {
 
 
     /**
-     * Closes all writers and remove them from cache
+     * Closes all writers and remove them from cache.
      * @return number of writers retired
      */
     private void closeAllWriters() throws InterruptedException, IOException {
@@ -209,7 +209,7 @@ public class HiveState implements State {
 
 
     /**
-     * Locate writer that has not been used for longest time and retire it
+     * Locate writer that has not been used for longest time and retire it.
      */
     private void retireEldestWriter() {
         long oldestTimeStamp = System.currentTimeMillis();
@@ -234,7 +234,7 @@ public class HiveState implements State {
     }
 
     /**
-     * Locate all writers past idle timeout and retire them
+     * Locate all writers past idle timeout and retire them.
      * @return number of writers retired
      */
     private int retireIdleWriters() {
@@ -276,15 +276,15 @@ public class HiveState implements State {
                 LOG.info("Closing writer to {}", w);
                 w.close();
             } catch (Exception ex) {
-                LOG.warn("Error while closing writer to " + entry.getKey() +
-                         ". Exception follows.", ex);
+                LOG.warn("Error while closing writer to " + entry.getKey() + 
". Exception follows.",
+                        ex);
                 if (ex instanceof InterruptedException) {
                     Thread.currentThread().interrupt();
                 }
             }
         }
 
-        ExecutorService toShutdown[] = { callTimeoutPool };
+        ExecutorService[] toShutdown = { callTimeoutPool };
         for (ExecutorService execService : toShutdown) {
             execService.shutdown();
             try {

Reply via email to