[CARBONDATA-2122] Add validation for empty bad record path

Data Load having bad record redirect with empty location should throw the 
exception of Invalid Path.

This closes #1914


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/4a2a2d1b
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/4a2a2d1b
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/4a2a2d1b

Branch: refs/heads/branch-1.3
Commit: 4a2a2d1b74901f96efc4ecf9cc16e9804884b929
Parents: 50e2f2c
Author: Jatin <jatin.de...@knoldus.in>
Authored: Fri Feb 2 19:55:16 2018 +0530
Committer: kunal642 <kunalkapoor...@gmail.com>
Committed: Sun Feb 4 00:23:19 2018 +0530

----------------------------------------------------------------------
 .../apache/carbondata/core/util/CarbonUtil.java |  7 +-
 .../sdv/generated/AlterTableTestCase.scala      |  2 -
 .../sdv/generated/DataLoadingTestCase.scala     |  5 +-
 .../badrecordloger/BadRecordActionTest.scala    | 71 +++++++++++++++++++-
 .../badrecordloger/BadRecordEmptyDataTest.scala |  5 --
 .../badrecordloger/BadRecordLoggerTest.scala    |  5 --
 .../StandardPartitionBadRecordLoggerTest.scala  |  5 --
 .../carbondata/spark/util/DataLoadingUtil.scala |  2 +-
 .../spark/sql/test/TestQueryExecutor.scala      | 16 ++---
 .../BadRecordPathLoadOptionTest.scala           | 11 ++-
 .../DataLoadFailAllTypeSortTest.scala           | 28 +-------
 .../NumericDimensionBadRecordTest.scala         |  6 +-
 .../AlterTableValidationTestCase.scala          |  3 -
 .../carbon/datastore/BlockIndexStoreTest.java   |  2 -
 14 files changed, 93 insertions(+), 75 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java 
b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index b62b77d..c208154 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -98,6 +98,7 @@ import com.google.gson.GsonBuilder;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.lang.ArrayUtils;
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -1891,7 +1892,11 @@ public final class CarbonUtil {
    * @return
    */
   public static boolean isValidBadStorePath(String badRecordsLocation) {
-    return !(null == badRecordsLocation || badRecordsLocation.length() == 0);
+    if (StringUtils.isEmpty(badRecordsLocation)) {
+      return false;
+    } else {
+      return isFileExists(checkAndAppendHDFSUrl(badRecordsLocation));
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala
 
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala
index 8899f5c..4e53ea3 100644
--- 
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala
+++ 
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala
@@ -1016,8 +1016,6 @@ class AlterTableTestCase extends QueryTest with 
BeforeAndAfterAll {
     prop.addProperty("carbon.compaction.level.threshold", "2,1")
     prop.addProperty("carbon.enable.auto.load.merge", "false")
     prop.addProperty("carbon.bad.records.action", "FORCE")
-    prop.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-      TestQueryExecutor.warehouse+"/baaaaaaadrecords")
   }
 
   override def afterAll: Unit = {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
 
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
index 52396ee..24a5aa4 100644
--- 
a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
+++ 
b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingTestCase.scala
@@ -27,6 +27,7 @@ import org.apache.spark.sql.test.TestQueryExecutor
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.util.CarbonProperties
 
 /**
@@ -1469,7 +1470,5 @@ class DataLoadingTestCase extends QueryTest with 
BeforeAndAfterAll {
 
   override protected def beforeAll(): Unit = {
     sql(s"""drop table if exists uniqdata""").collect
-    
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-      TestQueryExecutor.warehouse + "/baaaaaaadrecords")
-  }
+     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
index 0249ddf..d85ee49 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordActionTest.scala
@@ -1,21 +1,29 @@
 package org.apache.carbondata.spark.testsuite.badrecordloger
 
+import java.io.File
+
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
+import org.apache.carbondata.common.constants.LoggerAction
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
 
-class BadRecordActionTest extends QueryTest with BeforeAndAfterAll  {
+class BadRecordActionTest extends QueryTest with BeforeAndAfterAll {
 
 
   val csvFilePath = s"$resourcesPath/badrecords/datasample.csv"
+  def currentPath: String = new File(this.getClass.getResource("/").getPath + 
"../../")
+    .getCanonicalPath
+  val badRecordFilePath: File =new File(currentPath + 
"/target/test/badRecords")
 
   override def beforeAll = {
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
-
+    CarbonProperties.getInstance().addProperty(
+      CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, 
LoggerAction.FORCE.name())
+        badRecordFilePath.mkdirs()
     sql("drop table if exists sales")
   }
 
@@ -92,6 +100,65 @@ class BadRecordActionTest extends QueryTest with 
BeforeAndAfterAll  {
       Seq(Row(2)))
   }
 
+  test("test bad record REDIRECT but not having location should throw 
exception") {
+    sql("drop table if exists sales")
+    sql(
+      """CREATE TABLE IF NOT EXISTS sales(ID BigInt, date Timestamp, country 
String,
+          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED 
BY 'carbondata'""")
+    val exMessage = intercept[Exception] {
+      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales 
OPTIONS" +
+          "('bad_records_action'='REDIRECT', 'DELIMITER'=" +
+          " ',', 'QUOTECHAR'= '\"', 'BAD_RECORD_PATH'='')")
+    }
+    assert(exMessage.getMessage.contains("Invalid bad records location."))
+  }
+
+  test("test bad record REDIRECT but not having empty location in option 
should throw exception") {
+    val badRecordLocation = CarbonProperties.getInstance()
+      .getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC)
+    
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+      CarbonCommonConstants.CARBON_BADRECORDS_LOC_DEFAULT_VAL)
+    sql("drop table if exists sales")
+    try {
+      sql(
+        """CREATE TABLE IF NOT EXISTS sales(ID BigInt, date Timestamp, country 
String,
+          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED 
BY 'carbondata'""")
+      val exMessage = intercept[Exception] {
+        sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales 
OPTIONS" +
+            "('bad_records_action'='REDIRECT', 'DELIMITER'=" +
+            " ',', 'QUOTECHAR'= '\"')")
+      }
+      assert(exMessage.getMessage.contains("Invalid bad records location."))
+    }
+    finally {
+      
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
+        badRecordLocation)
+    }
+  }
+
+  test("test bad record is REDIRECT with location in carbon properties should 
pass") {
+    sql("drop table if exists sales")
+      sql(
+        """CREATE TABLE IF NOT EXISTS sales(ID BigInt, date Timestamp, country 
String,
+          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED 
BY 'carbondata'""")
+      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales 
OPTIONS" +
+          "('bad_records_action'='REDIRECT', 'DELIMITER'=" +
+          " ',', 'QUOTECHAR'= '\"')")
+  }
+
+  test("test bad record is redirect with location in option while data loading 
should pass") {
+    sql("drop table if exists sales")
+         sql(
+        """CREATE TABLE IF NOT EXISTS sales(ID BigInt, date Timestamp, country 
String,
+          actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED 
BY 'carbondata'""")
+      sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales 
OPTIONS" +
+          "('bad_records_action'='REDIRECT', 'DELIMITER'=" +
+          " ',', 'QUOTECHAR'= '\"', 'BAD_RECORD_PATH'='" + 
{badRecordFilePath.getCanonicalPath} +
+          "')")
+      checkAnswer(sql("select count(*) from sales"),
+        Seq(Row(2)))
+  }
+
   override def afterAll() = {
     sql("drop table if exists sales")
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordEmptyDataTest.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordEmptyDataTest.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordEmptyDataTest.scala
index 4c6cc21..999fb6a 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordEmptyDataTest.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordEmptyDataTest.scala
@@ -49,11 +49,6 @@ class BadRecordEmptyDataTest extends QueryTest with 
BeforeAndAfterAll {
       sql("drop table IF EXISTS bigtab")
       CarbonProperties.getInstance().addProperty(
         CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, 
LoggerAction.FORCE.name())
-      CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-          new File("./target/test/badRecords")
-            .getCanonicalPath)
-      CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, 
"yyyy/MM/dd")
       var csvFilePath = ""
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
index 797a972..694d25b 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/badrecordloger/BadRecordLoggerTest.scala
@@ -56,11 +56,6 @@ class BadRecordLoggerTest extends QueryTest with 
BeforeAndAfterAll {
           actual_price Double, Quantity int, sold_price Decimal(19,2)) STORED 
BY 'carbondata'""")
 
       CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-          new File("./target/test/badRecords")
-            .getCanonicalPath)
-
-      CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, 
"yyyy/MM/dd")
       var csvFilePath = s"$resourcesPath/badrecords/datasample.csv"
       sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE sales 
OPTIONS"

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionBadRecordLoggerTest.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionBadRecordLoggerTest.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionBadRecordLoggerTest.scala
index f916c5e..e44ccd6 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionBadRecordLoggerTest.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionBadRecordLoggerTest.scala
@@ -38,11 +38,6 @@ class StandardPartitionBadRecordLoggerTest extends QueryTest 
with BeforeAndAfter
   override def beforeAll {
     drop()
     CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-        new File("./target/test/badRecords")
-          .getCanonicalPath)
-
-    CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/DataLoadingUtil.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/DataLoadingUtil.scala
 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/DataLoadingUtil.scala
index 8b4c232..3696e23 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/DataLoadingUtil.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/util/DataLoadingUtil.scala
@@ -248,10 +248,10 @@ object DataLoadingUtil {
 
     if (bad_records_logger_enable.toBoolean ||
         LoggerAction.REDIRECT.name().equalsIgnoreCase(bad_records_action)) {
-      bad_record_path = CarbonUtil.checkAndAppendHDFSUrl(bad_record_path)
       if (!CarbonUtil.isValidBadStorePath(bad_record_path)) {
         CarbonException.analysisException("Invalid bad records location.")
       }
+      bad_record_path = CarbonUtil.checkAndAppendHDFSUrl(bad_record_path)
     }
     carbonLoadModel.setBadRecordsLocation(bad_record_path)
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
index 78214ae..f582145 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/test/TestQueryExecutor.scala
@@ -52,8 +52,6 @@ object TestQueryExecutor {
   val integrationPath = s"$projectPath/integration"
   val metastoredb = s"$integrationPath/spark-common/target"
   val location = s"$integrationPath/spark-common/target/dbpath"
-  val badStoreLocation = s"$integrationPath/spark-common/target/bad_store"
-  createDirectory(badStoreLocation)
   val masterUrl = {
     val property = System.getProperty("spark.master.url")
     if (property == null) {
@@ -62,13 +60,6 @@ object TestQueryExecutor {
       property
     }
   }
-  val badStorePath = s"$integrationPath/spark-common-test/target/badrecord";
-  try {
-    FileFactory.mkdirs(badStorePath, FileFactory.getFileType(badStorePath))
-  } catch {
-    case e : Exception =>
-      throw e;
-  }
   val hdfsUrl = {
     val property = System.getProperty("hdfs.url")
     if (property == null) {
@@ -106,6 +97,13 @@ object TestQueryExecutor {
     s"$integrationPath/spark-common/target/warehouse"
   }
 
+  val badStoreLocation = if (hdfsUrl.startsWith("hdfs://")) {
+       s"$hdfsUrl/bad_store_" + System.nanoTime()
+      } else {
+        s"$integrationPath/spark-common/target/bad_store"
+      }
+    createDirectory(badStoreLocation)
+
   val hiveresultpath = if (hdfsUrl.startsWith("hdfs://")) {
     val p = s"$hdfsUrl/hiveresultpath"
     FileFactory.mkdirs(p, FileFactory.getFileType(p))

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/integration/spark2/src/test/scala/org/apache/spark/carbondata/BadRecordPathLoadOptionTest.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/BadRecordPathLoadOptionTest.scala
 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/BadRecordPathLoadOptionTest.scala
index 8bec6f6..a59ae67 100644
--- 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/BadRecordPathLoadOptionTest.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/BadRecordPathLoadOptionTest.scala
@@ -35,12 +35,10 @@ import org.apache.carbondata.core.util.CarbonProperties
  */
 class BadRecordPathLoadOptionTest extends Spark2QueryTest with 
BeforeAndAfterAll {
   var hiveContext: HiveContext = _
-  var badRecordPath: String = null
+
   override def beforeAll {
     try {
-       badRecordPath = new File("./target/test/badRecords")
-        .getCanonicalPath.replaceAll("\\\\","/")
-      sql("drop table IF EXISTS salestest")
+            sql("drop table IF EXISTS salestest")
     }
   }
 
@@ -51,7 +49,6 @@ class BadRecordPathLoadOptionTest extends Spark2QueryTest 
with BeforeAndAfterAll
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
     val csvFilePath = s"$resourcesPath/badrecords/datasample.csv"
-    sql(s"set 
${CarbonLoadOptionConstants.CARBON_OPTIONS_BAD_RECORD_PATH}=${badRecordPath}")
     sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE salestest 
OPTIONS" +
         "('bad_records_logger_enable'='true','bad_records_action'='redirect', 
'DELIMITER'=" +
         " ',', 'QUOTECHAR'= '\"')")
@@ -66,7 +63,9 @@ class BadRecordPathLoadOptionTest extends Spark2QueryTest 
with BeforeAndAfterAll
   }
 
   def isFilesWrittenAtBadStoreLocation: Boolean = {
-    val badStorePath = badRecordPath + "/default/salestest/0/0"
+    val badStorePath = CarbonProperties.getInstance()
+                         
.getProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC) +
+                       "/default/salestest/0/0"
     val carbonFile: CarbonFile = FileFactory
       .getCarbonFile(badStorePath, FileFactory.getFileType(badStorePath))
     var exists: Boolean = carbonFile.exists()

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
index 48519fd..121150c 100644
--- 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/DataLoadFailAllTypeSortTest.scala
@@ -46,10 +46,6 @@ class DataLoadFailAllTypeSortTest extends Spark2QueryTest 
with BeforeAndAfterAll
   test("dataload with parallel merge with bad_records_action='FAIL'") {
     try {
       CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-          new File("./target/test/badRecords")
-            .getCanonicalPath)
-      CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, 
"yyyy/MM/dd")
       CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "FAIL");
@@ -76,10 +72,6 @@ class DataLoadFailAllTypeSortTest extends Spark2QueryTest 
with BeforeAndAfterAll
   test("dataload with ENABLE_UNSAFE_SORT='true' with 
bad_records_action='FAIL'") {
     try {
       CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-          new File("./target/test/badRecords")
-            .getCanonicalPath)
-      CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, 
"yyyy/MM/dd")
       CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_SORT, "true");
@@ -109,11 +101,7 @@ class DataLoadFailAllTypeSortTest extends Spark2QueryTest 
with BeforeAndAfterAll
 
   test("dataload with LOAD_USE_BATCH_SORT='true' with 
bad_records_action='FAIL'") {
     try {
-      CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-          new File("./target/test/badRecords")
-            .getCanonicalPath)
-      CarbonProperties.getInstance()
+        CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, 
"yyyy/MM/dd")
       CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, "batch_sort")
@@ -143,10 +131,6 @@ class DataLoadFailAllTypeSortTest extends Spark2QueryTest 
with BeforeAndAfterAll
   test("dataload with LOAD_USE_BATCH_SORT='true' with 
bad_records_action='FORCE'") {
     try {
       CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-          new File("./target/test/badRecords")
-            .getCanonicalPath)
-      CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, 
"yyyy/MM/dd")
       CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, "BATCH_SORT")
@@ -177,11 +161,7 @@ class DataLoadFailAllTypeSortTest extends Spark2QueryTest 
with BeforeAndAfterAll
 
   test("dataload with LOAD_USE_BATCH_SORT='true' with 
bad_records_action='REDIRECT'") {
     try {
-      CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-          new File("./target/test/badRecords")
-            .getCanonicalPath)
-      CarbonProperties.getInstance()
+        CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, 
"yyyy/MM/dd")
       CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE, "BATCH_SORT")
@@ -211,10 +191,6 @@ class DataLoadFailAllTypeSortTest extends Spark2QueryTest 
with BeforeAndAfterAll
   test("dataload with table bucketing with bad_records_action='FAIL'") {
     try {
       CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-          new File("./target/test/badRecords")
-            .getCanonicalPath)
-      CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, 
"yyyy/MM/dd")
       CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "FAIL")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/integration/spark2/src/test/scala/org/apache/spark/carbondata/datatype/NumericDimensionBadRecordTest.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/datatype/NumericDimensionBadRecordTest.scala
 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/datatype/NumericDimensionBadRecordTest.scala
index b1e0bde..44fea03 100644
--- 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/datatype/NumericDimensionBadRecordTest.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/datatype/NumericDimensionBadRecordTest.scala
@@ -43,11 +43,7 @@ class NumericDimensionBadRecordTest extends Spark2QueryTest 
with BeforeAndAfterA
       sql("drop table IF EXISTS floatDataType")
       sql("drop table IF EXISTS bigDecimalDataType")
       sql("drop table IF EXISTS stringDataType")
-      CarbonProperties.getInstance()
-        .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-          new File("./target/test/badRecords")
-            .getCanonicalPath)
-      CarbonProperties.getInstance()
+       CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, 
"yyyy/MM/dd")
       var csvFilePath = ""
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
index c88302d..b62e3c9 100644
--- 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
@@ -32,9 +32,6 @@ import 
org.apache.carbondata.spark.exception.ProcessMetaDataException
 class AlterTableValidationTestCase extends Spark2QueryTest with 
BeforeAndAfterAll {
 
   override def beforeAll {
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
-        new File("./target/test/badRecords").getCanonicalPath)
 
     sql("drop table if exists restructure")
     sql("drop table if exists table1")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/4a2a2d1b/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
----------------------------------------------------------------------
diff --git 
a/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
 
b/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
index 7925b35..63320ef 100644
--- 
a/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
+++ 
b/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
@@ -49,8 +49,6 @@ public class BlockIndexStoreTest extends TestCase {
           LogServiceFactory.getLogService(BlockIndexStoreTest.class.getName());
 
   @BeforeClass public void setUp() {
-    CarbonProperties.getInstance().
-        addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, 
"/tmp/carbon/badrecords");
     StoreCreator.createCarbonStore();
     CarbonProperties.getInstance().
         addProperty(CarbonCommonConstants.CARBON_MAX_DRIVER_LRU_CACHE_SIZE, 
"10");

Reply via email to