Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 91146fd26 -> bb718a094


CARBONDATA-330: Fix compiler warnings

CARBONDATA-330: Fix compiler warnings

CARBONDATA-330: Fix compiler warnings & Review comments

CARBONDATA-330: Fix Scala style checking violation


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/5b51d483
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/5b51d483
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/5b51d483

Branch: refs/heads/master
Commit: 5b51d4838f87da1e8e01d52a98f06d77e4a9fc61
Parents: 91146fd
Author: aniket <aniket.adn...@huawei.com>
Authored: Wed Oct 19 18:36:12 2016 -0700
Committer: jackylk <jacky.li...@huawei.com>
Committed: Wed Oct 26 18:22:33 2016 +0800

----------------------------------------------------------------------
 .../impl/AuditExtendedRollingFileAppenderTest_UT.java |  2 +-
 .../impl/ExtendedRollingFileAppenderTest_UT.java      |  2 +-
 .../org/apache/carbondata/core/util/DataTypeUtil.java |  2 +-
 .../carbondata/hadoop/ft/CarbonInputFormat_FT.java    |  4 ++--
 .../apache/spark/sql/CarbonDictionaryDecoder.scala    |  2 +-
 .../org/apache/spark/sql/hive/CarbonSQLDialect.scala  |  2 +-
 .../MajorCompactionIgnoreInMinorTest.scala            |  2 +-
 .../TestDataLoadWithColumnsMoreThanSchema.scala       | 12 ++++++------
 ...tLoadDataWithMalformedCarbonCommandException.scala | 14 +++++++-------
 .../dataretention/DataRetentionTestCase.scala         | 12 ++++++------
 .../ColumnPropertyValidationTestCase.scala            |  4 ++--
 .../spark/util/ExternalColumnDictionaryTestCase.scala |  4 ++--
 12 files changed, 31 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b51d483/common/src/test/java/org/apache/carbondata/common/logging/impl/AuditExtendedRollingFileAppenderTest_UT.java
----------------------------------------------------------------------
diff --git 
a/common/src/test/java/org/apache/carbondata/common/logging/impl/AuditExtendedRollingFileAppenderTest_UT.java
 
b/common/src/test/java/org/apache/carbondata/common/logging/impl/AuditExtendedRollingFileAppenderTest_UT.java
index 4032ddb..24cdfcb 100644
--- 
a/common/src/test/java/org/apache/carbondata/common/logging/impl/AuditExtendedRollingFileAppenderTest_UT.java
+++ 
b/common/src/test/java/org/apache/carbondata/common/logging/impl/AuditExtendedRollingFileAppenderTest_UT.java
@@ -19,7 +19,7 @@
 
 package org.apache.carbondata.common.logging.impl;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 import mockit.Deencapsulation;
 import org.apache.log4j.Logger;
 import org.apache.log4j.spi.LoggingEvent;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b51d483/common/src/test/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppenderTest_UT.java
----------------------------------------------------------------------
diff --git 
a/common/src/test/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppenderTest_UT.java
 
b/common/src/test/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppenderTest_UT.java
index 006db9c..372d805 100644
--- 
a/common/src/test/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppenderTest_UT.java
+++ 
b/common/src/test/java/org/apache/carbondata/common/logging/impl/ExtendedRollingFileAppenderTest_UT.java
@@ -19,7 +19,7 @@
 
 package org.apache.carbondata.common.logging.impl;
 
-import junit.framework.Assert;
+import org.junit.Assert;
 import mockit.Deencapsulation;
 import org.apache.log4j.Logger;
 import org.apache.log4j.spi.LoggingEvent;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b51d483/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java 
b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
index d00fc39..ae8775f 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
@@ -35,7 +35,7 @@ import 
org.apache.carbondata.core.carbon.metadata.schema.table.column.CarbonDime
 import 
org.apache.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 
-import org.apache.commons.lang.NumberUtils;
+import org.apache.commons.lang.math.NumberUtils;
 import org.apache.spark.unsafe.types.UTF8String;
 
 public final class DataTypeUtil {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b51d483/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputFormat_FT.java
----------------------------------------------------------------------
diff --git 
a/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputFormat_FT.java
 
b/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputFormat_FT.java
index aed7d79..94663b2 100644
--- 
a/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputFormat_FT.java
+++ 
b/hadoop/src/test/java/org/apache/carbondata/hadoop/ft/CarbonInputFormat_FT.java
@@ -51,7 +51,7 @@ public class CarbonInputFormat_FT extends TestCase {
   @Test public void testGetSplits() throws Exception {
     CarbonInputFormat carbonInputFormat = new CarbonInputFormat();
     JobConf jobConf = new JobConf(new Configuration());
-    Job job = new Job(jobConf);
+    Job job = Job.getInstance(jobConf);
     FileInputFormat.addInputPath(job, new Path("/opt/carbonstore/db/table1"));
     job.getConfiguration().set(CarbonInputFormat.INPUT_SEGMENT_NUMBERS, "1,2");
     List splits = carbonInputFormat.getSplits(job);
@@ -63,7 +63,7 @@ public class CarbonInputFormat_FT extends TestCase {
   @Test public void testGetFilteredSplits() throws Exception {
     CarbonInputFormat carbonInputFormat = new CarbonInputFormat();
     JobConf jobConf = new JobConf(new Configuration());
-    Job job = new Job(jobConf);
+    Job job = Job.getInstance(jobConf);
     FileInputFormat.addInputPath(job, new Path("/opt/carbonstore/db/table1"));
     job.getConfiguration().set(CarbonInputFormat.INPUT_SEGMENT_NUMBERS, "1,2");
     Expression expression = new EqualToExpression(new ColumnExpression("c1", 
DataType.STRING),

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b51d483/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
index 697cb1b..12cdf9d 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
@@ -223,7 +223,7 @@ case class CarbonDictionaryDecoder(
             atiMap.get(f._1).get.getCarbonTableIdentifier,
             f._2, f._3))
         } catch {
-          case _ => null
+          case _: Throwable => null
         }
       } else {
         null

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b51d483/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSQLDialect.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSQLDialect.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSQLDialect.scala
index ef4053a..82c5f7f 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSQLDialect.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSQLDialect.scala
@@ -37,7 +37,7 @@ private[spark] class CarbonSQLDialect(hiveContext: 
HiveContext) extends ParserDi
       // because hive can no parse carbon command
       case ce: MalformedCarbonCommandException =>
         throw ce
-      case _ =>
+      case _: Throwable =>
         HiveQl.parseSql(sqlText)
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b51d483/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
index 7edb000..99e3d56 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
@@ -154,7 +154,7 @@ class MajorCompactionIgnoreInMinorTest extends QueryTest 
with BeforeAndAfterAll
       assert(false)
     }
     catch {
-      case _ => assert(true)
+      case _:Throwable => assert(true)
     }
     val segmentStatusManager: SegmentStatusManager = new 
SegmentStatusManager(new
         AbsoluteTableIdentifier(

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b51d483/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
index da53143..931e4e0 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
@@ -54,7 +54,7 @@ class TestDataLoadWithColumnsMoreThanSchema extends QueryTest 
with BeforeAndAfte
       sql("LOAD DATA LOCAL INPATH './src/test/resources/character_carbon.csv' 
into table max_columns_test options('MAXCOLUMNS'='avfgd')")
       assert(false)
     } catch {
-      case _ => assert(true)
+      case _: Throwable => assert(true)
     }
   }
 
@@ -66,7 +66,7 @@ class TestDataLoadWithColumnsMoreThanSchema extends QueryTest 
with BeforeAndAfte
       checkAnswer(sql("select count(*) from valid_max_columns_test"),
         sql("select count(*) from hive_char_test"))
     } catch {
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 
@@ -83,7 +83,7 @@ class TestDataLoadWithColumnsMoreThanSchema extends QueryTest 
with BeforeAndAfte
     } catch {
       case me: MalformedCarbonCommandException =>
         assert(false)
-      case _ => assert(true)
+      case _: Throwable => assert(true)
     }
   }
 
@@ -95,7 +95,7 @@ class TestDataLoadWithColumnsMoreThanSchema extends QueryTest 
with BeforeAndAfte
       checkAnswer(sql("select count(*) from valid_max_columns_test"),
         sql("select count(*) from hive_char_test"))
     } catch {
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 
@@ -111,7 +111,7 @@ class TestDataLoadWithColumnsMoreThanSchema extends 
QueryTest with BeforeAndAfte
       sql("LOAD DATA LOCAL INPATH './src/test/resources/data.csv' into table 
boundary_max_columns_test options('MAXCOLUMNS'='14')")
       assert(true)
     } catch {
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 
@@ -127,7 +127,7 @@ class TestDataLoadWithColumnsMoreThanSchema extends 
QueryTest with BeforeAndAfte
       sql("LOAD DATA LOCAL INPATH './src/test/resources/data.csv' into table 
boundary_max_columns_test options('MAXCOLUMNS'='13')")
       assert(true)
     } catch {
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b51d483/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithMalformedCarbonCommandException.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithMalformedCarbonCommandException.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithMalformedCarbonCommandException.scala
index 88cbf48..74e98f0 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithMalformedCarbonCommandException.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithMalformedCarbonCommandException.scala
@@ -89,7 +89,7 @@ class TestLoadDataWithMalformedCarbonCommandException extends 
QueryTest with Bef
       case e: MalformedCarbonCommandException =>
         assert(e.getMessage.equals("DICTIONARY_EXCLUDE column: ccc does not 
exist in table. " +
           "Please check create table statement."))
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 
@@ -100,7 +100,7 @@ class TestLoadDataWithMalformedCarbonCommandException 
extends QueryTest with Bef
       case e: MalformedCarbonCommandException =>
         assert(e.getMessage.equals("DICTIONARY_INCLUDE column: aaa does not 
exist in table. " +
           "Please check create table statement."))
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 
@@ -111,7 +111,7 @@ class TestLoadDataWithMalformedCarbonCommandException 
extends QueryTest with Bef
       case e: MalformedCarbonCommandException =>
         assert(e.getMessage.equals("DICTIONARY_EXCLUDE can not contain the 
same column: country " +
           "with DICTIONARY_INCLUDE. Please check create table statement."))
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 
@@ -123,7 +123,7 @@ class TestLoadDataWithMalformedCarbonCommandException 
extends QueryTest with Bef
     } catch {
       case e: MalformedCarbonCommandException =>
         assert(e.getMessage.equals("Error: Invalid option(s): delimiterrr"))
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 
@@ -135,7 +135,7 @@ class TestLoadDataWithMalformedCarbonCommandException 
extends QueryTest with Bef
     } catch {
       case e: MalformedCarbonCommandException =>
         assert(e.getMessage.equals("Error: Duplicate option(s): delimiter"))
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 
@@ -146,7 +146,7 @@ class TestLoadDataWithMalformedCarbonCommandException 
extends QueryTest with Bef
           "TestLoadTableOptions options('DeLIMITEr'=',', 'qUOtECHAR'='\"')"
       )
     } catch {
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 
@@ -157,7 +157,7 @@ class TestLoadDataWithMalformedCarbonCommandException 
extends QueryTest with Bef
       case e: MalformedCarbonCommandException =>
         assert(e.getMessage.equals("DICTIONARY_EXCLUDE can not contain the 
same column: country " +
           "with DICTIONARY_INCLUDE. Please check create table statement."))
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b51d483/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
index 0b4267d..bed6428 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
@@ -183,7 +183,7 @@ class DataRetentionTestCase extends QueryTest with 
BeforeAndAfterAll {
     } catch {
       case e: MalformedCarbonCommandException =>
         assert(e.getMessage.contains("should not be empty"))
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 
@@ -221,7 +221,7 @@ class DataRetentionTestCase extends QueryTest with 
BeforeAndAfterAll {
     } catch {
       case e: MalformedCarbonCommandException =>
         assert(e.getMessage.contains("Invalid load start time format"))
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
 
     try {
@@ -232,7 +232,7 @@ class DataRetentionTestCase extends QueryTest with 
BeforeAndAfterAll {
     } catch {
       case e: MalformedCarbonCommandException =>
         assert(e.getMessage.contains("Invalid load start time format"))
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
 
     checkAnswer(
@@ -258,7 +258,7 @@ class DataRetentionTestCase extends QueryTest with 
BeforeAndAfterAll {
     } catch {
       case e: MalformedCarbonCommandException =>
         assert(!e.getMessage.equalsIgnoreCase("Invalid query"))
-      case _ => assert(true)
+      case _: Throwable => assert(true)
     }
 
     try {
@@ -267,7 +267,7 @@ class DataRetentionTestCase extends QueryTest with 
BeforeAndAfterAll {
     } catch {
       case e: MalformedCarbonCommandException =>
         assert(!e.getMessage.equalsIgnoreCase("Invalid query"))
-      case _ => assert(true)
+      case _: Throwable => assert(true)
     }
 
     try {
@@ -276,7 +276,7 @@ class DataRetentionTestCase extends QueryTest with 
BeforeAndAfterAll {
     } catch {
       case e: MalformedCarbonCommandException =>
         assert(!e.getMessage.equalsIgnoreCase("Invalid query"))
-      case _ => assert(true)
+      case _: Throwable => assert(true)
     }
 
   }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b51d483/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/ColumnPropertyValidationTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/ColumnPropertyValidationTestCase.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/ColumnPropertyValidationTestCase.scala
index 6baeaca..59f4a87 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/ColumnPropertyValidationTestCase.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/ColumnPropertyValidationTestCase.scala
@@ -30,7 +30,7 @@ class ColumnPropertyValidationTestCase extends QueryTest with 
BeforeAndAfterAll
        assert(true)
        sql("drop table employee")
      } catch {
-       case e =>assert(false)
+       case e: Throwable =>assert(false)
      }
   }
   test("Validate Dictionary include _ invalid key") {
@@ -39,7 +39,7 @@ class ColumnPropertyValidationTestCase extends QueryTest with 
BeforeAndAfterAll
        assert(false)
        sql("drop table employee")
      } catch {
-       case e =>assert(true)
+       case e: Throwable =>assert(true)
      }
   }
   

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b51d483/integration/spark/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
index dcf257a..97bcb64 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/util/ExternalColumnDictionaryTestCase.scala
@@ -221,7 +221,7 @@ class ExternalColumnDictionaryTestCase extends QueryTest 
with BeforeAndAfterAll
       case ex: MalformedCarbonCommandException =>
         assertResult(ex.getMessage)("Error: COLUMNDICT and ALL_DICTIONARY_PATH 
can not be used together " +
           "in options")
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 
@@ -236,7 +236,7 @@ class ExternalColumnDictionaryTestCase extends QueryTest 
with BeforeAndAfterAll
       case ex: DataLoadingException =>
         assertResult(ex.getMessage)("Column gamePointId is not a key column. 
Only key column can be part " +
           "of dictionary and used in COLUMNDICT option.")
-      case _ => assert(false)
+      case _: Throwable => assert(false)
     }
   }
 

Reply via email to