shaofengshi closed pull request #262: KYLIN-3597 Improve code smell
URL: https://github.com/apache/kylin/pull/262
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git 
a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SegmentCubeTupleIterator.java
 
b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SegmentCubeTupleIterator.java
index 6711664161..629c02563b 100644
--- 
a/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SegmentCubeTupleIterator.java
+++ 
b/core-storage/src/main/java/org/apache/kylin/storage/gtrecord/SegmentCubeTupleIterator.java
@@ -98,14 +98,23 @@ public GTInfo getInfo() {
                     return scanRequest.getInfo();
                 }
 
-                public void close() throws IOException {}
+                public void close() {
+                    // Underlying resource is hold by scanner and it will be 
closed at
+                    // SegmentCubeTupleIterator#close, caller is 
SequentialCubeTupleIterator
+                }
 
                 public Iterator<GTRecord> iterator() {
                     return records;
                 }
             };
-            GTStreamAggregateScanner aggregator = new 
GTStreamAggregateScanner(inputScanner, scanRequest);
-            return aggregator.valuesIterator(gtDimsIdx, gtMetricsIdx);
+            Iterator<Object[]> result;
+            try (GTStreamAggregateScanner aggregator = new 
GTStreamAggregateScanner(inputScanner, scanRequest)) {
+                result = aggregator.valuesIterator(gtDimsIdx, gtMetricsIdx);
+            } catch (IOException ioe) {
+                // implementation of close method of anonymous IGTScanner is 
empty, no way throw exception
+                throw new IllegalStateException("IOException is not expected 
here.", ioe);
+            }
+            return result;
         }
 
         // simply decode records
@@ -149,10 +158,10 @@ public boolean hasNext() {
         if (!gtValues.hasNext()) {
             return false;
         }
-        Object[] gtValues = this.gtValues.next();
+        Object[] values = this.gtValues.next();
 
         // translate into tuple
-        advMeasureFillers = cubeTupleConverter.translateResult(gtValues, 
tuple);
+        advMeasureFillers = cubeTupleConverter.translateResult(values, tuple);
 
         // the simple case
         if (advMeasureFillers == null) {
diff --git 
a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConsumerProperties.java
 
b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConsumerProperties.java
index cc32ed9592..a1b9ab253d 100644
--- 
a/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConsumerProperties.java
+++ 
b/source-kafka/src/main/java/org/apache/kylin/source/kafka/config/KafkaConsumerProperties.java
@@ -20,6 +20,7 @@
 
 import java.io.File;
 import java.io.FileInputStream;
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.HashSet;
@@ -28,7 +29,6 @@
 import java.util.Properties;
 import java.util.Set;
 
-import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.kafka.clients.consumer.ConsumerConfig;
@@ -56,8 +56,8 @@ public static KafkaConsumerProperties getInstanceFromEnv() {
                 try {
                     KafkaConsumerProperties config = new 
KafkaConsumerProperties();
                     config.properties = config.loadKafkaConsumerProperties();
-
-                    logger.info("Initialized a new KafkaConsumerProperties 
from getInstanceFromEnv : " + System.identityHashCode(config));
+                    logger.info("Initialized a new KafkaConsumerProperties 
from getInstanceFromEnv : {}",
+                            System.identityHashCode(config));
                     ENV_INSTANCE = config;
                 } catch (IllegalArgumentException e) {
                     throw new IllegalStateException("Failed to find 
KafkaConsumerProperties ", e);
@@ -79,7 +79,7 @@ public static Properties 
extractKafkaConfigToProperties(Configuration configurat
         Set<String> configNames = new HashSet<String>();
         try {
             configNames = ConsumerConfig.configNames();
-        } catch (Error e) {
+        } catch (Exception e) {
             // the Kafka configNames api is supported on 0.10.1.0+, in case 
NoSuchMethodException which is an Error, not Exception
             String[] configNamesArray = ("metric.reporters, 
metadata.max.age.ms, partition.assignment.strategy, reconnect.backoff.ms," + 
"sasl.kerberos.ticket.renew.window.factor, max.partition.fetch.bytes, 
bootstrap.servers, ssl.keystore.type," + " enable.auto.commit, sasl.mechanism, 
interceptor.classes, exclude.internal.topics, ssl.truststore.password," + " 
client.id, ssl.endpoint.identification.algorithm, max.poll.records, check.crcs, 
request.timeout.ms, heartbeat.interval.ms," + " auto.commit.interval.ms, 
receive.buffer.bytes, ssl.truststore.type, ssl.truststore.location, 
ssl.keystore.password, fetch.min.bytes," + " fetch.max.bytes, 
send.buffer.bytes, max.poll.interval.ms, value.deserializer, group.id, 
retry.backoff.ms,"
                     + " ssl.secure.random.implementation, 
sasl.kerberos.kinit.cmd, sasl.kerberos.service.name, 
sasl.kerberos.ticket.renew.jitter, ssl.trustmanager.algorithm, 
ssl.key.password, fetch.max.wait.ms, sasl.kerberos.min.time.before.relogin, 
connections.max.idle.ms, session.timeout.ms, metrics.num.samples, 
key.deserializer, ssl.protocol, ssl.provider, ssl.enabled.protocols, 
ssl.keystore.location, ssl.cipher.suites, security.protocol, 
ssl.keymanager.algorithm, metrics.sample.window.ms, 
auto.offset.reset").split(",");
@@ -101,27 +101,27 @@ public static Properties 
extractKafkaConfigToProperties(Configuration configurat
     private Properties loadKafkaConsumerProperties() {
         File propFile = getKafkaConsumerFile();
         if (propFile == null || !propFile.exists()) {
-            logger.warn("fail to locate " + KAFKA_CONSUMER_FILE + ", use empty 
kafka consumer properties");
+            logger.warn("fail to locate {}, use empty kafka consumer 
properties", KAFKA_CONSUMER_FILE);
             return new Properties();
         }
         Properties properties = new Properties();
-        try {
-            FileInputStream is = new FileInputStream(propFile);
+        try (FileInputStream is = new FileInputStream(propFile)) {
             Configuration conf = new Configuration();
             conf.addResource(is);
             properties.putAll(extractKafkaConfigToProperties(conf));
-            IOUtils.closeQuietly(is);
 
             File propOverrideFile = new File(propFile.getParentFile(), 
propFile.getName() + ".override");
             if (propOverrideFile.exists()) {
-                FileInputStream ois = new FileInputStream(propOverrideFile);
-                Configuration oconf = new Configuration();
-                oconf.addResource(ois);
-                properties.putAll(extractKafkaConfigToProperties(oconf));
-                IOUtils.closeQuietly(ois);
+                try (FileInputStream ois = new 
FileInputStream(propOverrideFile)) {
+                    Configuration oconf = new Configuration();
+                    oconf.addResource(ois);
+                    properties.putAll(extractKafkaConfigToProperties(oconf));
+                }
             }
+        } catch (FileNotFoundException fne) {
+            throw new IllegalArgumentException(fne);
         } catch (IOException e) {
-            throw new RuntimeException(e);
+            // close inputStream quietly
         }
 
         return properties;
@@ -135,7 +135,7 @@ public String getKafkaConsumerHadoopJobConf() {
     private File getKafkaConsumerFile() {
         String kylinConfHome = System.getProperty(KylinConfig.KYLIN_CONF);
         if (!StringUtils.isEmpty(kylinConfHome)) {
-            logger.info("Use KYLIN_CONF=" + kylinConfHome);
+            logger.info("Use KYLIN_CONF={}", kylinConfHome);
             return getKafkaConsumerFile(kylinConfHome);
         }
 
diff --git 
a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
 
b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
index 973f020ead..56e2dd5d1d 100644
--- 
a/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
+++ 
b/source-kafka/src/main/java/org/apache/kylin/source/kafka/util/KafkaSampleProducer.java
@@ -55,7 +55,7 @@
     private static final ObjectMapper mapper = new ObjectMapper();
 
     public static void main(String[] args) throws Exception {
-        logger.info("args: " + Arrays.toString(args));
+        logger.info("args: {}", Arrays.toString(args));
         OptionsHelper optionsHelper = new OptionsHelper();
         Options options = new Options();
         String topic, broker;
@@ -64,7 +64,7 @@ public static void main(String[] args) throws Exception {
         options.addOption(OPTION_INTERVAL);
         optionsHelper.parseOptions(options, args);
 
-        logger.info("options: '" + optionsHelper.getOptionsAsString() + "'");
+        logger.info("options: '{}'", optionsHelper.getOptionsAsString());
 
         topic = optionsHelper.getOptionValue(OPTION_TOPIC);
         broker = optionsHelper.getOptionValue(OPTION_BROKER);
@@ -75,7 +75,7 @@ public static void main(String[] args) throws Exception {
             interval = Long.parseLong(intervalString);
         }
 
-        List<String> countries = new ArrayList();
+        List<String> countries = new ArrayList<>();
         countries.add("AUSTRALIA");
         countries.add("CANADA");
         countries.add("CHINA");
@@ -84,19 +84,19 @@ public static void main(String[] args) throws Exception {
         countries.add("KOREA");
         countries.add("US");
         countries.add("Other");
-        List<String> category = new ArrayList();
+        List<String> category = new ArrayList<>();
         category.add("BOOK");
         category.add("TOY");
         category.add("CLOTH");
         category.add("ELECTRONIC");
         category.add("Other");
-        List<String> devices = new ArrayList();
+        List<String> devices = new ArrayList<>();
         devices.add("iOS");
         devices.add("Windows");
         devices.add("Andriod");
         devices.add("Other");
 
-        List<String> genders = new ArrayList();
+        List<String> genders = new ArrayList<>();
         genders.add("Male");
         genders.add("Female");
 
@@ -110,34 +110,32 @@ public static void main(String[] args) throws Exception {
         props.put("key.serializer", 
"org.apache.kafka.common.serialization.StringSerializer");
         props.put("value.serializer", 
"org.apache.kafka.common.serialization.StringSerializer");
 
-        Producer<String, String> producer = new KafkaProducer<>(props);
-
-        boolean alive = true;
-        Random rnd = new Random();
-        Map<String, Object> record = new HashMap();
-        while (alive == true) {
-            //add normal record
-            record.put("order_time", (new Date().getTime()));
-            record.put("country", 
countries.get(rnd.nextInt(countries.size())));
-            record.put("category", category.get(rnd.nextInt(category.size())));
-            record.put("device", devices.get(rnd.nextInt(devices.size())));
-            record.put("qty", rnd.nextInt(10));
-            record.put("currency", "USD");
-            record.put("amount", rnd.nextDouble() * 100);
-            //add embedded record
-            Map<String, Object> user = new HashMap();
-            user.put("id", RandomUtil.randomUUID().toString());
-            user.put("gender", genders.get(rnd.nextInt(2)));
-            user.put("age", rnd.nextInt(20) + 10);
-            user.put("first_name", "unknown");
-            record.put("user", user);
-            //send message
-            ProducerRecord<String, String> data = new ProducerRecord<>(topic, 
System.currentTimeMillis() + "", mapper.writeValueAsString(record));
-            System.out.println("Sending 1 message: " + 
JsonUtil.writeValueAsString(record));
-            producer.send(data);
-            Thread.sleep(interval);
+        try (Producer<String, String> producer = new KafkaProducer<>(props)) {
+            boolean alive = true;
+            Random rnd = new Random();
+            Map<String, Object> record = new HashMap<>();
+            while (alive == true) {
+                //add normal record
+                record.put("order_time", (new Date().getTime()));
+                record.put("country", 
countries.get(rnd.nextInt(countries.size())));
+                record.put("category", 
category.get(rnd.nextInt(category.size())));
+                record.put("device", devices.get(rnd.nextInt(devices.size())));
+                record.put("qty", rnd.nextInt(10));
+                record.put("currency", "USD");
+                record.put("amount", rnd.nextDouble() * 100);
+                //add embedded record
+                Map<String, Object> user = new HashMap<>();
+                user.put("id", RandomUtil.randomUUID().toString());
+                user.put("gender", genders.get(rnd.nextInt(2)));
+                user.put("age", rnd.nextInt(20) + 10);
+                user.put("first_name", "unknown");
+                record.put("user", user);
+                //send message
+                ProducerRecord<String, String> data = new 
ProducerRecord<>(topic, System.currentTimeMillis() + "", 
mapper.writeValueAsString(record));
+                System.out.println("Sending 1 message: " + 
JsonUtil.writeValueAsString(record));
+                producer.send(data);
+                Thread.sleep(interval);
+            }
         }
-        producer.close();
     }
-
 }
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
index db516bb8cd..d6367e5a09 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/RowCounterCLI.java
@@ -42,24 +42,26 @@
     public static void main(String[] args) throws IOException {
 
         if (args == null || args.length != 3) {
-            System.out.println("Usage: hbase org.apache.hadoop.util.RunJar 
kylin-job-latest.jar org.apache.kylin.job.tools.RowCounterCLI [HTABLE_NAME] 
[STARTKEY] [ENDKEY]");
+            logger.info(
+                    "Usage: hbase org.apache.hadoop.util.RunJar 
kylin-job-latest.jar org.apache.kylin.job.tools.RowCounterCLI [HTABLE_NAME] 
[STARTKEY] [ENDKEY]");
+            return; // if no enough arguments provided, return with above 
message
         }
 
-        System.out.println(args[0]);
+        logger.info(args[0]);
         String htableName = args[0];
-        System.out.println(args[1]);
+        logger.info(args[1]);
         byte[] startKey = BytesUtil.fromReadableText(args[1]);
-        System.out.println(args[2]);
+        logger.info(args[2]);
         byte[] endKey = BytesUtil.fromReadableText(args[2]);
 
         if (startKey == null) {
-            System.out.println("startkey is null ");
+            logger.info("startkey is null ");
         } else {
-            System.out.println("startkey lenght: " + startKey.length);
+            logger.info("startkey lenght: {}", startKey.length);
         }
 
-        System.out.println("start key in binary: " + 
Bytes.toStringBinary(startKey));
-        System.out.println("end key in binary: " + 
Bytes.toStringBinary(endKey));
+        logger.info("start key in binary: {}", Bytes.toStringBinary(startKey));
+        logger.info("end key in binary: {}", Bytes.toStringBinary(endKey));
 
         Configuration conf = HBaseConnection.getCurrentHBaseConfiguration();
 
@@ -69,22 +71,19 @@ public static void main(String[] args) throws IOException {
         scan.setStartRow(startKey);
         scan.setStopRow(endKey);
 
-        logger.info("My Scan " + scan.toString());
-
-        Connection conn = ConnectionFactory.createConnection(conf);
-        Table tableInterface = conn.getTable(TableName.valueOf(htableName));
-
-        Iterator<Result> iterator = tableInterface.getScanner(scan).iterator();
-        int counter = 0;
-        while (iterator.hasNext()) {
-            iterator.next();
-            counter++;
-            if (counter % 1000 == 1) {
-                System.out.println("number of rows: " + counter);
+        logger.info("My Scan {}", scan);
+        try (Connection conn = ConnectionFactory.createConnection(conf);
+                Table tableInterface = 
conn.getTable(TableName.valueOf(htableName))) {
+            Iterator<Result> iterator = 
tableInterface.getScanner(scan).iterator();
+            int counter = 0;
+            while (iterator.hasNext()) {
+                iterator.next();
+                counter++;
+                if (counter % 1000 == 1) {
+                    logger.info("number of rows: {}", counter);
+                }
             }
+            logger.info("number of rows: {}", counter);
         }
-        System.out.println("number of rows: " + counter);
-        tableInterface.close();
-        conn.close();
     }
 }


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

Reply via email to