Repository: carbondata
Updated Branches:
  refs/heads/master f82254021 -> cabafe56d


[CARBONDATA-3162][CARBONDATA-3163][CARBONDATA-3164] 'no_sort' as default 
sort_scope

What are the changes proposed in this PR?
This PR is based on the below discussion that is mentioned in the mail-chain.

http://apache-carbondata-dev-mailing-list-archive.1130556.n5.nabble.com/Discussion-Make-no-sort-as-default-sort-scope-and-keep-sort-columns-as-empty-by-default-td70233.html

changes are done to make 'no_sort' as default sort_scope and keep sort_columns 
as 'empty' by default.
Also few issues are fixed in 'no_sort' and empty sort_columns flow.

This closes #2966


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/cabafe56
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/cabafe56
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/cabafe56

Branch: refs/heads/master
Commit: cabafe56da23500128fdc895d0dde9a06b59c267
Parents: f822540
Author: ajantha-bhat <ajanthab...@gmail.com>
Authored: Tue Dec 4 22:30:09 2018 +0530
Committer: ravipesala <ravi.pes...@gmail.com>
Committed: Tue Dec 18 16:39:58 2018 +0530

----------------------------------------------------------------------
 .../core/constants/CarbonCommonConstants.java   |  2 +-
 .../executor/impl/AbstractQueryExecutor.java    |  1 +
 .../carbondata/core/scan/filter/FilterUtil.java |  3 +-
 .../carbondata/core/util/DataTypeUtil.java      |  8 ++
 docs/ddl-of-carbondata.md                       |  8 +-
 docs/sdk-guide.md                               |  5 +-
 .../hadoop/api/CarbonTableOutputFormat.java     |  9 +-
 .../hadoop/testutil/StoreCreator.java           |  2 +
 .../TestLoadDataWithSortColumnBounds.scala      | 12 ++-
 .../preaggregate/TestPreAggCreateCommand.scala  |  8 +-
 .../TestCreateTableWithSortScope.scala          | 12 +--
 .../TestNonTransactionalCarbonTable.scala       |  6 +-
 ...ompactionSupportGlobalSortFunctionTest.scala |  1 +
 ...mpactionSupportGlobalSortParameterTest.scala |  1 +
 .../dataload/TestBatchSortDataLoad.scala        |  9 +-
 .../dataload/TestGlobalSortDataLoad.scala       | 18 ++--
 .../testsuite/dataload/TestLoadDataFrame.scala  |  3 +-
 ...estLoadDataWithHiveSyntaxDefaultFormat.scala |  6 +-
 .../testsuite/datamap/TestDataMapStatus.scala   | 12 +--
 .../TestInsertAndOtherCommandConcurrent.scala   |  4 +-
 .../iud/UpdateCarbonTableTestCase.scala         |  3 +-
 .../testsuite/sortcolumns/TestSortColumns.scala |  3 +
 .../sortcolumns/TestSortColumnsWithUnsafe.scala |  4 +
 .../rdd/InsertTaskCompletionListener.scala      |  7 +-
 .../spark/rdd/NewCarbonDataLoadRDD.scala        |  4 +-
 .../spark/rdd/QueryTaskCompletionListener.scala |  3 +-
 .../spark/sql/catalyst/CarbonDDLSqlParser.scala | 12 +--
 .../command/carbonTableSchemaCommon.scala       | 14 ++-
 .../datasources/CarbonSparkDataSourceUtil.scala |  2 +-
 .../CarbonTaskCompletionListener.scala          |  4 +-
 .../AlterTableValidationTestCase.scala          |  3 +-
 .../vectorreader/AddColumnTestCases.scala       |  4 +-
 .../CarbonGetTableDetailComandTestCase.scala    |  0
 .../processing/loading/model/LoadOption.java    |  2 +-
 .../CarbonRowDataWriterProcessorStepImpl.java   |  2 +-
 .../sdk/file/CarbonWriterBuilder.java           | 16 +++-
 .../carbondata/sdk/file/CarbonReaderTest.java   | 98 ++++----------------
 37 files changed, 165 insertions(+), 146 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
 
b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index a61d86f..387bf3b 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -960,7 +960,7 @@ public final class CarbonCommonConstants {
    * If set to GLOBAL_SORT, the sorting scope is bigger and one index tree per 
task will be
    * created, thus loading is slower but query is faster.
    */
-  public static final String LOAD_SORT_SCOPE_DEFAULT = "LOCAL_SORT";
+  public static final String LOAD_SORT_SCOPE_DEFAULT = "NO_SORT";
 
   /**
    * Size of batch data to keep in memory, as a thumb rule it supposed

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
index 34de4b7..fa4d120 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
@@ -808,6 +808,7 @@ public abstract class AbstractQueryExecutor<E> implements 
QueryExecutor<E> {
     if (null != exceptionOccurred) {
       throw new QueryExecutionException(exceptionOccurred);
     }
+    DataTypeUtil.clearFormatter();
   }
 
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java 
b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
index 286f68f..f382f0b 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterUtil.java
@@ -2265,7 +2265,8 @@ public final class FilterUtil {
       defaultValue = FilterUtil
           .getMaskKey(key, currentBlockDimension, 
segmentProperties.getSortColumnsGenerator());
     } else {
-      defaultValue = ByteUtil.toXorBytes(key);
+      defaultValue = FilterUtil
+          .getMaskKey(key, currentBlockDimension, 
segmentProperties.getDimensionKeyGenerator());
     }
     return defaultValue;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java 
b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
index 8f05f39..995f80d 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/DataTypeUtil.java
@@ -959,6 +959,14 @@ public final class DataTypeUtil {
     }
   }
 
+  /**
+   * As each load can have it's own time format. Reset the thread local for 
each load.
+   */
+  public static void clearFormatter() {
+    timeStampformatter.remove();
+    dateformatter.remove();
+  }
+
   public static DataTypeConverter getDataTypeConverter() {
     if (converter == null) {
       converter = new DataTypeConverterImpl();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/docs/ddl-of-carbondata.md
----------------------------------------------------------------------
diff --git a/docs/ddl-of-carbondata.md b/docs/ddl-of-carbondata.md
index 965f11c..90153b7 100644
--- a/docs/ddl-of-carbondata.md
+++ b/docs/ddl-of-carbondata.md
@@ -134,15 +134,13 @@ CarbonData DDL statements are documented here,which 
includes:
    - ##### Sort Columns Configuration
 
      This property is for users to specify which columns belong to the 
MDK(Multi-Dimensions-Key) index.
-     * If users don't specify "SORT_COLUMN" property, by default MDK index be 
built by using all dimension columns except complex data type column. 
+     * If users don't specify "SORT_COLUMN" property, by default no columns 
are sorted 
      * If this property is specified but with empty argument, then the table 
will be loaded without sort.
      * This supports only string, date, timestamp, short, int, long, byte and 
boolean data types.
      Suggested use cases : Only build MDK index for required columns,it might 
help to improve the data loading performance.
 
      ```
      TBLPROPERTIES ('SORT_COLUMNS'='column1, column3')
-     OR
-     TBLPROPERTIES ('SORT_COLUMNS'='')
      ```
 
      **NOTE**: Sort_Columns for Complex datatype columns is not supported.
@@ -151,8 +149,8 @@ CarbonData DDL statements are documented here,which 
includes:
    
      This property is for users to specify the scope of the sort during data 
load, following are the types of sort scope.
      
-     * LOCAL_SORT: It is the default sort scope.             
-     * NO_SORT: It will load the data in unsorted manner, it will 
significantly increase load performance.       
+     * LOCAL_SORT: data will be locally sorted (task level sorting)            
 
+     * NO_SORT: default scope. It will load the data in unsorted manner, it 
will significantly increase load performance.       
      * BATCH_SORT: It increases the load performance but decreases the query 
performance if identified blocks > parallelism.
      * GLOBAL_SORT: It increases the query performance, especially high 
concurrent point query.
        And if you care about loading resources isolation strictly, because the 
system uses the spark GroupBy to sort data, the resource can be controlled by 
spark. 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index 8abc3b1..b157e51 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -370,9 +370,8 @@ public CarbonWriterBuilder withLoadOptions(Map<String, 
String> options);
 * b. table_blocklet_size -- values in MB. Default value is 64 MB
 * c. local_dictionary_threshold -- positive value, default is 10000
 * d. local_dictionary_enable -- true / false. Default is false
-* e. sort_columns -- comma separated column. "c1,c2". Default all dimensions 
are sorted.
-                     If empty string "" is passed. No columns are sorted
-* j. sort_scope -- "local_sort", "no_sort", "batch_sort". default value is 
"local_sort"
+* e. sort_columns -- comma separated column. "c1,c2". Default no columns are 
sorted.
+* j. sort_scope -- "local_sort", "no_sort", "batch_sort". default value is 
"no_sort"
 * k. long_string_columns -- comma separated string columns which are more than 
32k length. 
 *                           default value is null.
 * l. inverted_index -- comma separated string columns for which inverted index 
needs to be

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableOutputFormat.java
----------------------------------------------------------------------
diff --git 
a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableOutputFormat.java
 
b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableOutputFormat.java
index 16486d0..b109f30 100644
--- 
a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableOutputFormat.java
+++ 
b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableOutputFormat.java
@@ -34,6 +34,7 @@ import 
org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.metadata.schema.table.TableInfo;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonThreadFactory;
+import org.apache.carbondata.core.util.DataTypeUtil;
 import org.apache.carbondata.core.util.ObjectSerializationUtil;
 import org.apache.carbondata.core.util.ThreadLocalSessionInfo;
 import org.apache.carbondata.hadoop.internal.ObjectArrayWritable;
@@ -257,6 +258,7 @@ public class CarbonTableOutputFormat extends 
FileOutputFormat<NullWritable, Obje
     loadModel.setDataWritePath(
         
taskAttemptContext.getConfiguration().get("carbon.outputformat.writepath"));
     final String[] tempStoreLocations = 
getTempStoreLocations(taskAttemptContext);
+    DataTypeUtil.clearFormatter();
     final DataLoadExecutor dataLoadExecutor = new DataLoadExecutor();
     final ExecutorService executorService = Executors.newFixedThreadPool(1,
         new CarbonThreadFactory("CarbonRecordWriter:" + 
loadModel.getTableName()));
@@ -273,7 +275,12 @@ public class CarbonTableOutputFormat extends 
FileOutputFormat<NullWritable, Obje
           for (CarbonOutputIteratorWrapper iterator : iterators) {
             iterator.closeWriter(true);
           }
-          dataLoadExecutor.close();
+          try {
+            dataLoadExecutor.close();
+          } catch (Exception ex) {
+            // As already exception happened before close() send that 
exception.
+            throw new RuntimeException(e);
+          }
           throw new RuntimeException(e);
         } finally {
           ThreadLocalSessionInfo.unsetAll();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
----------------------------------------------------------------------
diff --git 
a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java 
b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
index 7fd9235..5c6653b 100644
--- 
a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
+++ 
b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
@@ -66,6 +66,7 @@ import 
org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
 import org.apache.carbondata.core.statusmanager.SegmentStatus;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.DataTypeUtil;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 import org.apache.carbondata.core.writer.CarbonDictionaryWriter;
 import org.apache.carbondata.core.writer.CarbonDictionaryWriterImpl;
@@ -495,6 +496,7 @@ public class StoreCreator {
 
     CSVRecordReaderIterator readerIterator =
         new CSVRecordReaderIterator(recordReader, blockDetails, 
hadoopAttemptContext);
+    DataTypeUtil.clearFormatter();
     new DataLoadExecutor().execute(loadModel,
         new String[] {storeLocation + "/" + databaseName + "/" + tableName},
         new CarbonIterator[]{readerIterator});

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithSortColumnBounds.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithSortColumnBounds.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithSortColumnBounds.scala
index 1f171b8..9b6780d 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithSortColumnBounds.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithSortColumnBounds.scala
@@ -23,7 +23,7 @@ import scala.util.Random
 
 import org.apache.spark.sql.test.util.QueryTest
 import org.apache.spark.sql.{DataFrame, Row, SaveMode}
-import org.scalatest.BeforeAndAfterAll
+import org.scalatest.{BeforeAndAfterAll, Ignore}
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
@@ -60,7 +60,8 @@ class TestLoadDataWithSortColumnBounds extends QueryTest with 
BeforeAndAfterAll
     CarbonProperties.getInstance().addProperty(
       CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, dateFormatStr)
     sql(s"DROP TABLE IF EXISTS $tableName")
-
+    // sort column bounds work only with local_sort
+    
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
 "local_sort")
     prepareDataFile()
     prepareDataFrame()
   }
@@ -71,6 +72,10 @@ class TestLoadDataWithSortColumnBounds extends QueryTest 
with BeforeAndAfterAll
     sql(s"DROP TABLE IF EXISTS $tableName")
     new File(filePath).delete()
     df = null
+    // sort column bounds work only with local_sort
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
+        CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
   }
 
   /**
@@ -247,7 +252,8 @@ class TestLoadDataWithSortColumnBounds extends QueryTest 
with BeforeAndAfterAll
     sql(s"DROP TABLE IF EXISTS $tableName")
   }
 
-  test("load data with sort column bounds: no sort columns explicitly 
specified" +
+  // now default sort scope is no_sort, hence all dimesions are not sorted by 
default
+  ignore("load data with sort column bounds: no sort columns explicitly 
specified" +
        " means all dimension columns will be sort columns, so bounds should be 
set correctly") {
     sql(s"DROP TABLE IF EXISTS $tableName")
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
index f07c417..c039c0f 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
@@ -210,8 +210,8 @@ class TestPreAggCreateCommand extends QueryTest with 
BeforeAndAfterAll {
     sql("create datamap agg0 on table mainTable using 'preaggregate' as select 
column3, sum(column3),column5, sum(column5) from maintable group by 
column3,column5")
     val df = sql("select * from maintable_agg0")
     val carbontable = getCarbonTable(df.queryExecution.analyzed)
-    assert(carbontable.getAllMeasures.size()==2)
-    assert(carbontable.getAllDimensions.size()==2)
+    assert(carbontable.getAllMeasures.size()==3)
+    assert(carbontable.getAllDimensions.size()==1)
     carbontable.getAllDimensions.asScala.foreach{ f =>
       assert(!f.getEncoder.contains(Encoding.DICTIONARY))
     }
@@ -222,8 +222,8 @@ class TestPreAggCreateCommand extends QueryTest with 
BeforeAndAfterAll {
     sql("create datamap agg0 on table mainTable using 'preaggregate' as select 
column3, sum(column3),column5, sum(column5) from maintable group by 
column3,column5,column2")
     val df = sql("select * from maintable_agg0")
     val carbontable = getCarbonTable(df.queryExecution.analyzed)
-    assert(carbontable.getAllMeasures.size()==2)
-    assert(carbontable.getAllDimensions.size()==3)
+    assert(carbontable.getAllMeasures.size()==3)
+    assert(carbontable.getAllDimensions.size()==2)
     carbontable.getAllDimensions.asScala.foreach{ f =>
       assert(!f.getEncoder.contains(Encoding.DICTIONARY))
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithSortScope.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithSortScope.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithSortScope.scala
index 3cd8243..890475d 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithSortScope.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestCreateTableWithSortScope.scala
@@ -43,7 +43,7 @@ class TestCreateTableWithSortScope extends QueryTest with 
BeforeAndAfterAll {
        | stringField STRING
        | )
        | STORED BY 'carbondata'
-       | TBLPROPERTIES('SORT_COLUMN'='stringField')
+       | TBLPROPERTIES('SORT_COLUMNS'='stringField')
        """.stripMargin)
 
     val exception_loaddata_sortscope: Exception = intercept[Exception] {
@@ -61,7 +61,7 @@ class TestCreateTableWithSortScope extends QueryTest with 
BeforeAndAfterAll {
          | stringField STRING
          | )
          | STORED BY 'carbondata'
-         | TBLPROPERTIES('SORT_COLUMN'='stringField', 
'SORT_SCOPE'='GLOBAL_SORT')
+         | TBLPROPERTIES('SORT_COLUMNS'='stringField', 
'SORT_SCOPE'='GLOBAL_SORT')
        """.stripMargin)
 
     checkExistence(sql("DESCRIBE FORMATTED tableWithGlobalSort"), true, 
"global_sort")
@@ -73,7 +73,7 @@ class TestCreateTableWithSortScope extends QueryTest with 
BeforeAndAfterAll {
          | stringField STRING
          | )
          | STORED BY 'carbondata'
-         | TBLPROPERTIES('SORT_COLUMN'='stringField', 
'SORT_SCOPE'='LOCAL_SORT')
+         | TBLPROPERTIES('SORT_COLUMNS'='stringField', 
'SORT_SCOPE'='LOCAL_SORT')
        """.stripMargin)
 
     sql("DESCRIBE FORMATTED tableWithLocalSort")
@@ -87,7 +87,7 @@ class TestCreateTableWithSortScope extends QueryTest with 
BeforeAndAfterAll {
          | stringField STRING
          | )
          | STORED BY 'carbondata'
-         | TBLPROPERTIES('SORT_COLUMN'='stringField', 
'SORT_SCOPE'='BATCH_SORT')
+         | TBLPROPERTIES('SORT_COLUMNS'='stringField', 
'SORT_SCOPE'='BATCH_SORT')
        """.stripMargin)
 
     checkExistence(sql("DESCRIBE FORMATTED tableWithBatchSort"), true, 
"batch_sort")
@@ -99,7 +99,7 @@ class TestCreateTableWithSortScope extends QueryTest with 
BeforeAndAfterAll {
          | stringField STRING
          | )
          | STORED BY 'carbondata'
-         | TBLPROPERTIES('SORT_COLUMN'='stringField', 'SORT_SCOPE'='NO_SORT')
+         | TBLPROPERTIES('SORT_COLUMNS'='stringField', 'SORT_SCOPE'='NO_SORT')
        """.stripMargin)
 
     checkExistence(sql("DESCRIBE FORMATTED tableWithNoSort"), true, "no_sort")
@@ -114,7 +114,7 @@ class TestCreateTableWithSortScope extends QueryTest with 
BeforeAndAfterAll {
            | stringField STRING
            | )
            | STORED BY 'carbondata'
-           | TBLPROPERTIES('SORT_COLUMN'='stringField', 'SORT_SCOPE'='abc')
+           | TBLPROPERTIES('SORT_COLUMNS'='stringField', 'SORT_SCOPE'='abc')
        """.stripMargin)
     }
     assert(exception_unsupported_sortscope.getMessage.contains(

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
index 942ad48..a166789 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/createTable/TestNonTransactionalCarbonTable.scala
@@ -2426,7 +2426,6 @@ class TestNonTransactionalCarbonTable extends QueryTest 
with BeforeAndAfterAll {
          |'$writerPath' """.stripMargin)
     val df = sql("describe formatted sdkTable")
     checkExistence(df, true, "Local Dictionary Enabled true")
-    checkExistence(df, true, "Inverted Index Columns name")
     FileUtils.deleteDirectory(new File(writerPath))
   }
 
@@ -2471,7 +2470,10 @@ class TestNonTransactionalCarbonTable extends QueryTest 
with BeforeAndAfterAll {
     FileUtils.deleteDirectory(new File(writerPath))
   }
 
-  test("test inverted index column by API") {
+  // Inverted index display is based on sort_scope, now by default sort_scope 
is no_sort.
+  // Hence inverted index will not be displayed for external table
+  // as we don't support table-properties inferring
+  ignore("test inverted index column by API") {
     FileUtils.deleteDirectory(new File(writerPath))
     val builder = CarbonWriter.builder
       
.sortBy(Array[String]("name")).withBlockSize(12).enableLocalDictionary(true)

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortFunctionTest.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortFunctionTest.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortFunctionTest.scala
index d49b962..bad8bdc 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortFunctionTest.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortFunctionTest.scala
@@ -56,6 +56,7 @@ class CompactionSupportGlobalSortFunctionTest extends 
QueryTest with BeforeAndAf
   override def afterEach {
     sql("DROP TABLE IF EXISTS compaction_globalsort")
     sql("DROP TABLE IF EXISTS carbon_localsort")
+    resetConf
   }
 
   test("Compaction type: major") {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortParameterTest.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortParameterTest.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortParameterTest.scala
index 54c19c2..4b027b9 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortParameterTest.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/CompactionSupportGlobalSortParameterTest.scala
@@ -57,6 +57,7 @@ class CompactionSupportGlobalSortParameterTest extends 
QueryTest with BeforeAndA
   override def afterEach {
     sql("DROP TABLE IF EXISTS compaction_globalsort")
     sql("DROP TABLE IF EXISTS carbon_localsort")
+    resetConf()
   }
 
   test("MINOR, ENABLE_AUTO_LOAD_MERGE: false") {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestBatchSortDataLoad.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestBatchSortDataLoad.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestBatchSortDataLoad.scala
index c695b05..412fd37 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestBatchSortDataLoad.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestBatchSortDataLoad.scala
@@ -82,7 +82,7 @@ class TestBatchSortDataLoad extends QueryTest with 
BeforeAndAfterAll {
         | c6 string, c7 int, c8 int, c9 int, c10 int)
         | STORED BY 'org.apache.carbondata.format'
         | TBLPROPERTIES('dictionary_include'='c1,c2,c3,c4,c5,c6',
-        | 'sort_scope'='batch_sort')
+        | 'sort_scope'='batch_sort', 'sort_columns'='c1,c2,c3,c4,c5,c6')
       """.stripMargin)
 
     sql(s"LOAD DATA LOCAL INPATH '$filePath' into table carbon_load1 " +
@@ -133,7 +133,7 @@ class TestBatchSortDataLoad extends QueryTest with 
BeforeAndAfterAll {
         | c6 string, c7 int, c8 int, c9 int, c10 int)
         | STORED BY 'org.apache.carbondata.format'
         | TBLPROPERTIES('dictionary_include'='c1,c2,c3,c4,c5,c6',
-        | 'sort_scope'='batch_sort')
+        | 'sort_scope'='batch_sort', 'sort_columns'='c1,c2,c3,c4,c5,c6')
       """.stripMargin)
 
     sql(s"LOAD DATA LOCAL INPATH '$filePath' into table carbon_load3 " +
@@ -156,7 +156,7 @@ class TestBatchSortDataLoad extends QueryTest with 
BeforeAndAfterAll {
         | CREATE TABLE carbon_load4(c1 string, c2 string, c3 string, c4 
string, c5 string,
         | c6 string, c7 int, c8 int, c9 int, c10 int)
         | STORED BY 'org.apache.carbondata.format'
-        | TBLPROPERTIES('dictionary_include'='c1,c2,c3,c4,c5,c6')
+        | TBLPROPERTIES('dictionary_include'='c1,c2,c3,c4,c5,c6', 
'sort_columns'='c1,c2,c3,c4,c5,c6')
       """.stripMargin)
 
     sql(s"LOAD DATA LOCAL INPATH '$filePath' into table carbon_load4 " )
@@ -204,6 +204,9 @@ class TestBatchSortDataLoad extends QueryTest with 
BeforeAndAfterAll {
   override def afterAll {
     dropTable
     new File(filePath).delete()
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
+        CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
   }
 }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestGlobalSortDataLoad.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestGlobalSortDataLoad.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestGlobalSortDataLoad.scala
index 44bc243..5eeee78 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestGlobalSortDataLoad.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestGlobalSortDataLoad.scala
@@ -50,7 +50,7 @@ class TestGlobalSortDataLoad extends QueryTest with 
BeforeAndAfterEach with Befo
     sql(
       """
         | CREATE TABLE carbon_globalsort(id INT, name STRING, city STRING, age 
INT)
-        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')
+        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT', 'sort_columns' = 'name, city')
       """.stripMargin)
   }
 
@@ -71,6 +71,9 @@ class TestGlobalSortDataLoad extends QueryTest with 
BeforeAndAfterEach with Befo
   }
 
   override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
+        CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
     sql("DROP TABLE IF EXISTS carbon_localsort_once")
     sql("DROP TABLE IF EXISTS carbon_localsort_twice")
     sql("DROP TABLE IF EXISTS carbon_localsort_triple")
@@ -91,7 +94,7 @@ class TestGlobalSortDataLoad extends QueryTest with 
BeforeAndAfterEach with Befo
       """
         | CREATE TABLE carbon_globalsort1(id INT, name STRING, city STRING, 
age INT)
         | STORED BY 'org.apache.carbondata.format'
-        | TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')
+        | TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT', 'sort_columns' = 'name, 
city')
       """.stripMargin)
 
     sql(s"LOAD DATA LOCAL INPATH '$filePath' INTO TABLE carbon_globalsort1 " +
@@ -139,7 +142,7 @@ class TestGlobalSortDataLoad extends QueryTest with 
BeforeAndAfterEach with Befo
         | CREATE TABLE carbon_globalsort_partitioned(name STRING, city STRING, 
age INT)
         | PARTITIONED BY (id INT)
         | STORED BY 'org.apache.carbondata.format'
-        | TBLPROPERTIES('PARTITION_TYPE'='HASH','NUM_PARTITIONS'='3', 
'SORT_SCOPE'='GLOBAL_SORT')
+        | TBLPROPERTIES('PARTITION_TYPE'='HASH','NUM_PARTITIONS'='3', 
'SORT_SCOPE'='GLOBAL_SORT', 'sort_columns' = 'name, city')
       """.stripMargin)
 
     intercept[MalformedCarbonCommandException] {
@@ -165,7 +168,7 @@ class TestGlobalSortDataLoad extends QueryTest with 
BeforeAndAfterEach with Befo
     sql(
       """
         | CREATE TABLE carbon_localsort_twice(id INT, name STRING, city 
STRING, age INT)
-        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')
+        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT', 'sort_columns' = 'name, city')
       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$filePath' INTO TABLE 
carbon_localsort_twice")
     sql(s"LOAD DATA LOCAL INPATH '$filePath' INTO TABLE 
carbon_localsort_twice")
@@ -225,7 +228,7 @@ class TestGlobalSortDataLoad extends QueryTest with 
BeforeAndAfterEach with Befo
     sql(
       """
         | CREATE TABLE carbon_localsort_delete(id INT, name STRING, city 
STRING, age INT)
-        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')
+        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT', 'sort_columns' = 'name, city')
       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$filePath' INTO TABLE 
carbon_localsort_delete")
     sql("DELETE FROM carbon_localsort_delete WHERE id = 1").show
@@ -244,7 +247,7 @@ class TestGlobalSortDataLoad extends QueryTest with 
BeforeAndAfterEach with Befo
     sql(
       """
         | CREATE TABLE carbon_localsort_update(id INT, name STRING, city 
STRING, age INT)
-        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')
+        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT', 'sort_columns' = 'name, city')
       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$filePath' INTO TABLE 
carbon_localsort_update")
 
@@ -341,6 +344,7 @@ class TestGlobalSortDataLoad extends QueryTest with 
BeforeAndAfterEach with Befo
          | floatField FLOAT
          | )
          | STORED BY 'org.apache.carbondata.format'
+         | 
TBLPROPERTIES('sort_scope'='local_sort','sort_columns'='stringField')
        """.stripMargin)
     sql(
       s"""
@@ -363,7 +367,7 @@ class TestGlobalSortDataLoad extends QueryTest with 
BeforeAndAfterEach with Befo
          | charField CHAR(5),
          | floatField FLOAT
          | )
-         | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT')
+         | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('SORT_SCOPE'='GLOBAL_SORT', 'sort_columns' = 'stringField')
        """.stripMargin)
     sql(
       s"""

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataFrame.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataFrame.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataFrame.scala
index 3186ccd..e7494d6 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataFrame.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataFrame.scala
@@ -305,7 +305,8 @@ class TestLoadDataFrame extends QueryTest with 
BeforeAndAfterAll {
       .mode(SaveMode.Overwrite)
   }
 
-  test("test load dataframe with sort_columns not specified," +
+  // now by default all the dimensions are not selected for sorting in no_sort
+  ignore("test load dataframe with sort_columns not specified," +
        " by default all string columns will be sort_columns") {
     // all string column will be sort_columns by default
     getDefaultWriter("df_write_sort_column_not_specified").save()

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
index 61271e1..121a455 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
@@ -701,7 +701,7 @@ class TestLoadDataWithHiveSyntaxDefaultFormat extends 
QueryTest with BeforeAndAf
     sql(
       s"load data local inpath '$resourcesPath/double.csv' into table 
double_test options" +
       "('FILEHEADER'='empno,salary')")
-    checkAnswer(sql("select salary from double_test limit 
1"),Row(7.756787654567891E23))
+    checkAnswer(sql("select salary from double_test where empno =\"'abc'\" 
limit 1"),Row(7.756787654567891E23))
   }
 
   test("test table with specified table path") {
@@ -715,7 +715,7 @@ class TestLoadDataWithHiveSyntaxDefaultFormat extends 
QueryTest with BeforeAndAf
       s"load data local inpath '$resourcesPath/double.csv' into table 
table_path_test options" +
       "('FILEHEADER'='empno,salary')")
     assert(new File(path).exists())
-    checkAnswer(sql("select salary from table_path_test limit 
1"),Row(7.756787654567891E23))
+    checkAnswer(sql("select salary from table_path_test where empno =\"'abc'\" 
limit 1"),Row(7.756787654567891E23))
     sql("drop table table_path_test")
     assert(! new File(path).exists())
     assert(intercept[AnalysisException](
@@ -735,7 +735,7 @@ class TestLoadDataWithHiveSyntaxDefaultFormat extends 
QueryTest with BeforeAndAf
       s"load data local inpath '$resourcesPath/double.csv' into table 
test.table_path_test options" +
       "('FILEHEADER'='empno,salary')")
     assert(new File(path).exists())
-    checkAnswer(sql("select salary from test.table_path_test limit 
1"),Row(7.756787654567891E23))
+    checkAnswer(sql("select salary from test.table_path_test where empno 
=\"'abc'\" limit 1"),Row(7.756787654567891E23))
     sql("drop table test.table_path_test")
     assert(! new File(path).exists())
     assert(intercept[AnalysisException](

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapStatus.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapStatus.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapStatus.scala
index fec2279..683098e 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapStatus.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/TestDataMapStatus.scala
@@ -53,7 +53,7 @@ class TestDataMapStatus extends QueryTest with 
BeforeAndAfterAll {
     sql(
       """
         | CREATE TABLE datamapstatustest(id int, name string, city string, age 
int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('sort_scope'='local_sort','sort_columns'='name,city')
       """.stripMargin)
     sql(
       s"""create datamap statusdatamap on table datamapstatustest
@@ -74,7 +74,7 @@ class TestDataMapStatus extends QueryTest with 
BeforeAndAfterAll {
     sql(
       """
         | CREATE TABLE datamapstatustest(id int, name string, city string, age 
int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('sort_scope'='local_sort','sort_columns'='name,city')
       """.stripMargin)
     sql(
       s"""create datamap statusdatamap on table datamapstatustest
@@ -96,7 +96,7 @@ class TestDataMapStatus extends QueryTest with 
BeforeAndAfterAll {
     sql(
       """
         | CREATE TABLE datamapstatustest1(id int, name string, city string, 
age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('sort_scope'='local_sort','sort_columns'='name,city')
       """.stripMargin)
     sql(
       s"""create datamap statusdatamap1 on table datamapstatustest1
@@ -123,7 +123,7 @@ class TestDataMapStatus extends QueryTest with 
BeforeAndAfterAll {
     sql(
       """
         | CREATE TABLE datamapstatustest2(id int, name string, city string, 
age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('sort_scope'='local_sort','sort_columns'='name,city')
       """.stripMargin)
     sql(
       s"""create datamap statusdatamap2 on table datamapstatustest2
@@ -157,7 +157,7 @@ class TestDataMapStatus extends QueryTest with 
BeforeAndAfterAll {
     sql(
       """
         | CREATE TABLE datamapstatustest3(id int, name string, city string, 
age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('sort_scope'='local_sort','sort_columns'='name,city')
       """.stripMargin)
     intercept[MalformedDataMapCommandException] {
       sql(
@@ -175,7 +175,7 @@ class TestDataMapStatus extends QueryTest with 
BeforeAndAfterAll {
     sql(
       """
         | CREATE TABLE datamapstatustest3(id int, name string, city string, 
age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED BY 'org.apache.carbondata.format' 
TBLPROPERTIES('sort_scope'='local_sort','sort_columns'='name,city')
       """.stripMargin)
     sql(
       s"""create datamap statusdatamap3 on table datamapstatustest3

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/TestInsertAndOtherCommandConcurrent.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/TestInsertAndOtherCommandConcurrent.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/TestInsertAndOtherCommandConcurrent.scala
index c079529..bbe1368 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/TestInsertAndOtherCommandConcurrent.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/TestInsertAndOtherCommandConcurrent.scala
@@ -90,7 +90,9 @@ class TestInsertAndOtherCommandConcurrent extends QueryTest 
with BeforeAndAfterA
 
   private def createTable(tableName: String, schema: StructType): Unit = {
     val schemaString = schema.fields.map(x => x.name + " " + 
x.dataType.typeName).mkString(", ")
-    sql(s"CREATE TABLE $tableName ($schemaString) stored as carbondata")
+    sql(s"CREATE TABLE $tableName ($schemaString) stored as carbondata 
tblproperties" +
+        
s"('sort_scope'='local_sort','sort_columns'='o_country,o_name,o_phonetype,o_serialname,"
 +
+        s"o_comment')")
   }
 
   override def afterAll {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
index 744a310..9e7106c 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/iud/UpdateCarbonTableTestCase.scala
@@ -423,7 +423,8 @@ class UpdateCarbonTableTestCase extends QueryTest with 
BeforeAndAfterAll {
         sql("update update_with_bad_record set (item)=(3.45)").show()
         sql("drop table if exists update_with_bad_record")
       }
-      assert(errorMessage.getMessage.contains("Data load failed due to bad 
record"))
+      assert(errorMessage.getMessage
+        .contains("Update operation failed"))
     } finally {
       CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "FORCE")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
index 2fabeb3..df97d0f 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumns.scala
@@ -389,6 +389,9 @@ class TestSortColumns extends QueryTest with 
BeforeAndAfterAll {
     dropTestTables
     CarbonProperties.getInstance().addProperty(
       CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, 
CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
+        CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
   }
 
   def dropTestTables = {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumnsWithUnsafe.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumnsWithUnsafe.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumnsWithUnsafe.scala
index 16e434b..136474e 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumnsWithUnsafe.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/sortcolumns/TestSortColumnsWithUnsafe.scala
@@ -252,6 +252,10 @@ class TestSortColumnsWithUnsafe extends QueryTest with 
BeforeAndAfterAll with Be
         CarbonCommonConstants.ENABLE_UNSAFE_IN_QUERY_EXECUTION_DEFAULTVALUE)
 
     dropTable
+
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.LOAD_SORT_SCOPE,
+        CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)
   }
 
   def dropTable = {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/InsertTaskCompletionListener.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/InsertTaskCompletionListener.scala
 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/InsertTaskCompletionListener.scala
index 7246645..b6a2fa7 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/InsertTaskCompletionListener.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/InsertTaskCompletionListener.scala
@@ -21,7 +21,7 @@ import org.apache.spark.TaskContext
 import 
org.apache.spark.sql.carbondata.execution.datasources.tasklisteners.CarbonLoadTaskCompletionListener
 import org.apache.spark.sql.execution.command.ExecutionErrors
 
-import org.apache.carbondata.core.util.ThreadLocalTaskInfo
+import org.apache.carbondata.core.util.{DataTypeUtil, ThreadLocalTaskInfo}
 import org.apache.carbondata.processing.loading.{DataLoadExecutor, 
FailureCauses}
 import org.apache.carbondata.spark.util.CommonUtil
 
@@ -34,12 +34,15 @@ class InsertTaskCompletionListener(dataLoadExecutor: 
DataLoadExecutor,
     }
     catch {
       case e: Exception =>
-        if (executorErrors.failureCauses != FailureCauses.BAD_RECORDS) {
+        if (executorErrors.failureCauses == FailureCauses.NONE) {
+          // If already error happened before task completion,
+          // that error need to be thrown. Not the new error. Hence skip this.
           throw e
         }
     }
     finally {
       
CommonUtil.clearUnsafeMemory(ThreadLocalTaskInfo.getCarbonTaskInfo.getTaskId)
+      DataTypeUtil.clearFormatter()
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
index f7249b8..f76a8d9 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
@@ -39,7 +39,7 @@ import 
org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.metadata.datatype.DataTypes
 import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, 
SegmentStatus}
-import org.apache.carbondata.core.util.{CarbonProperties, 
CarbonTimeStatisticsFactory, ThreadLocalTaskInfo}
+import org.apache.carbondata.core.util.{CarbonProperties, 
CarbonTimeStatisticsFactory, DataTypeUtil, ThreadLocalTaskInfo}
 import org.apache.carbondata.core.util.path.CarbonTablePath
 import org.apache.carbondata.processing.loading.{DataLoadExecutor, 
FailureCauses, TableProcessingOperations}
 import org.apache.carbondata.processing.loading.csvinput.{BlockDetails, 
CSVInputFormat, CSVRecordReaderIterator}
@@ -141,6 +141,8 @@ class NewCarbonDataLoadRDD[K, V](
           loadMetadataDetails)
         // Initialize to set carbon properties
         loader.initialize()
+        // need to clear thread local before every load.
+        DataTypeUtil.clearFormatter()
         val executor = new DataLoadExecutor()
         // in case of success, failure or cancelation clear memory and stop 
execution
         context

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/QueryTaskCompletionListener.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/QueryTaskCompletionListener.scala
 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/QueryTaskCompletionListener.scala
index 97449c5..3c2e2c7 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/QueryTaskCompletionListener.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/QueryTaskCompletionListener.scala
@@ -27,7 +27,7 @@ import org.apache.spark.sql.profiler.{Profiler, QueryTaskEnd}
 import org.apache.carbondata.common.logging.LogServiceFactory
 import org.apache.carbondata.core.memory.UnsafeMemoryManager
 import org.apache.carbondata.core.stats.{QueryStatistic, 
QueryStatisticsConstants, QueryStatisticsRecorder}
-import org.apache.carbondata.core.util.{TaskMetricsMap, ThreadLocalTaskInfo}
+import org.apache.carbondata.core.util.{DataTypeUtil, TaskMetricsMap, 
ThreadLocalTaskInfo}
 import org.apache.carbondata.spark.InitInputMetrics
 
 class QueryTaskCompletionListener(freeMemory: Boolean,
@@ -51,6 +51,7 @@ class QueryTaskCompletionListener(freeMemory: Boolean,
     if (freeMemory) {
       UnsafeMemoryManager.INSTANCE
         .freeMemoryAll(ThreadLocalTaskInfo.getCarbonTaskInfo.getTaskId)
+      DataTypeUtil.clearFormatter()
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
index c971573..b21aee1 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
@@ -734,13 +734,13 @@ abstract class CarbonDDLSqlParser extends 
AbstractCarbonSparkSQLParser {
     }
 
     // All columns in sortkey should be there in create table cols
-    val sortKeyOption = tableProperties.get(CarbonCommonConstants.SORT_COLUMNS)
-    var sortKeyDimsTmp: Seq[String] = Seq[String]()
-    val sortKeyString: String = if (sortKeyOption.isDefined) {
-      CarbonUtil.unquoteChar(sortKeyOption.get) trim
-    } else {
-      ""
+    var sortKeyOption = tableProperties.get(CarbonCommonConstants.SORT_COLUMNS)
+    if (!sortKeyOption.isDefined) {
+      // default no columns are selected for sorting in no_sort scope
+      sortKeyOption = Some("")
     }
+    val sortKeyString: String = CarbonUtil.unquoteChar(sortKeyOption.get) trim
+    var sortKeyDimsTmp: Seq[String] = Seq[String]()
     if (!sortKeyString.isEmpty) {
       val sortKey = sortKeyString.split(',').map(_.trim)
       if (sortKey.diff(sortKey.distinct).length > 0 ||

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
index 745eee7..14efabb 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchemaCommon.scala
@@ -40,7 +40,7 @@ import 
org.apache.carbondata.core.metadata.schema.table.{CarbonTable, RelationId
 import org.apache.carbondata.core.metadata.schema.table.column.{ColumnSchema, 
ParentColumnTableRelation}
 import org.apache.carbondata.core.service.impl.ColumnUniqueIdGenerator
 import org.apache.carbondata.core.statusmanager.{LoadMetadataDetails, 
SegmentUpdateStatusManager}
-import org.apache.carbondata.core.util.{CarbonUtil, DataTypeUtil}
+import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil, 
DataTypeUtil}
 import org.apache.carbondata.processing.loading.FailureCauses
 import org.apache.carbondata.processing.loading.model.CarbonLoadModel
 import org.apache.carbondata.processing.merger.CompactionType
@@ -848,6 +848,18 @@ class TableNewProcessor(cm: TableModel) {
       tableSchema.getTableId,
       cm.databaseNameOp.getOrElse("default"))
     tablePropertiesMap.put("bad_record_path", badRecordsPath)
+    if (tablePropertiesMap.get("sort_columns") != null) {
+      val sortCol = tablePropertiesMap.get("sort_columns")
+      if ((!sortCol.trim.isEmpty) && tablePropertiesMap.get("sort_scope") == 
null) {
+        // If sort_scope is not specified, but sort_columns are present, set 
sort_scope as
+        // local_sort in carbon_properties (cannot add in table properties as 
if user sets carbon
+        // properties it won't be reflected as table properties is given 
higher priority)
+        if 
(CarbonProperties.getInstance().getProperty(CarbonCommonConstants.LOAD_SORT_SCOPE)
 ==
+            null) {
+          tablePropertiesMap.put("sort_scope", "LOCAL_SORT")
+        }
+      }
+    }
     tableSchema.setTableProperties(tablePropertiesMap)
     if (cm.bucketFields.isDefined) {
       val bucketCols = cm.bucketFields.get.bucketColumns.map { b =>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonSparkDataSourceUtil.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonSparkDataSourceUtil.scala
 
b/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonSparkDataSourceUtil.scala
index 0d7ddae..1649afd 100644
--- 
a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonSparkDataSourceUtil.scala
+++ 
b/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/CarbonSparkDataSourceUtil.scala
@@ -241,7 +241,7 @@ object CarbonSparkDataSourceUtil {
         } else {
           cols.split(",").map(_.trim)
         }
-      case _ => null
+      case _ => Array[String]()
     }
     builder.sortBy(sortCols)
     val invertedIdxCols = options.get(CarbonCommonConstants.INVERTED_INDEX) 
match {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/tasklisteners/CarbonTaskCompletionListener.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/tasklisteners/CarbonTaskCompletionListener.scala
 
b/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/tasklisteners/CarbonTaskCompletionListener.scala
index 9d889d4..f4bbcf4 100644
--- 
a/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/tasklisteners/CarbonTaskCompletionListener.scala
+++ 
b/integration/spark-datasource/src/main/scala/org/apache/spark/sql/carbondata/execution/datasources/tasklisteners/CarbonTaskCompletionListener.scala
@@ -26,7 +26,7 @@ import org.apache.spark.util.TaskCompletionListener
 
 import org.apache.carbondata.common.logging.LogServiceFactory
 import org.apache.carbondata.core.memory.UnsafeMemoryManager
-import org.apache.carbondata.core.util.ThreadLocalTaskInfo
+import org.apache.carbondata.core.util.{DataTypeUtil, ThreadLocalTaskInfo}
 import org.apache.carbondata.hadoop.internal.ObjectArrayWritable
 
 /**
@@ -54,6 +54,7 @@ case class CarbonQueryTaskCompletionListenerImpl(iter: 
RecordReaderIterator[Inte
       UnsafeMemoryManager.INSTANCE
         .freeMemoryAll(ThreadLocalTaskInfo.getCarbonTaskInfo.getTaskId)
     }
+    DataTypeUtil.clearFormatter()
   }
 }
 
@@ -67,6 +68,7 @@ case class CarbonLoadTaskCompletionListenerImpl(recordWriter: 
RecordWriter[NullW
     } finally {
       UnsafeMemoryManager.INSTANCE
         .freeMemoryAll(ThreadLocalTaskInfo.getCarbonTaskInfo.getTaskId)
+      DataTypeUtil.clearFormatter()
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
index dce2953..b77fdc8 100644
--- 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/AlterTableValidationTestCase.scala
@@ -523,7 +523,8 @@ class AlterTableValidationTestCase extends Spark2QueryTest 
with BeforeAndAfterAl
     }
   }
 
-  test("describe formatted for default sort_columns pre and post alter") {
+  // after changing default sort_scope to no_sort, all dimensions are not 
selected for sorting.
+  ignore("describe formatted for default sort_columns pre and post alter") {
     sql("CREATE TABLE defaultSortColumnsWithAlter (empno int, empname String, 
designation String,role String, doj Timestamp) STORED BY 
'org.apache.carbondata.format' " +
         
"tblproperties('DICTIONARY_INCLUDE'='empno','DICTIONARY_EXCLUDE'='role')")
     sql("alter table defaultSortColumnsWithAlter drop columns (designation)")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
index e0aa012..5ea79e3 100644
--- 
a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
+++ 
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
@@ -645,7 +645,9 @@ class AddColumnTestCases extends Spark2QueryTest with 
BeforeAndAfterAll {
     checkExistence(sql("desc formatted NO_INVERTED_CARBON"),false,"Inverted 
Index Columns name, col1")
   }
 
-  test("inverted index after alter command") {
+  // sort_columns cannot be given for newly added column, so inverted index 
will not be displayed
+  // if it is not in sort_columns
+  ignore("inverted index after alter command") {
     sql("drop table if exists NO_INVERTED_CARBON")
     sql(
       """

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/integration/spark2/src/test/scala/org/apache/spark/sql/CarbonGetTableDetailComandTestCase.scala
----------------------------------------------------------------------
diff --git 
a/integration/spark2/src/test/scala/org/apache/spark/sql/CarbonGetTableDetailComandTestCase.scala
 
b/integration/spark2/src/test/scala/org/apache/spark/sql/CarbonGetTableDetailComandTestCase.scala
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java
----------------------------------------------------------------------
diff --git 
a/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java
 
b/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java
index 759cf04..78049a4 100644
--- 
a/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java
+++ 
b/processing/src/main/java/org/apache/carbondata/processing/loading/model/LoadOption.java
@@ -188,7 +188,7 @@ public class LoadOption {
     }
 
     optionsFinal.put("single_pass", String.valueOf(singlePass));
-    optionsFinal.put("sort_scope", "local_sort");
+    optionsFinal.put("sort_scope", 
CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT);
     optionsFinal.put("sort_column_bounds", Maps.getOrDefault(options, 
"sort_column_bounds", ""));
     optionsFinal.put(CarbonCommonConstants.CARBON_LOAD_MIN_SIZE_INMB,
         Maps.getOrDefault(options, 
CarbonCommonConstants.CARBON_LOAD_MIN_SIZE_INMB,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
----------------------------------------------------------------------
diff --git 
a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
 
b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
index 0d38f5c..4c25ce3 100644
--- 
a/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
+++ 
b/processing/src/main/java/org/apache/carbondata/processing/loading/steps/CarbonRowDataWriterProcessorStepImpl.java
@@ -152,7 +152,7 @@ public class CarbonRowDataWriterProcessorStepImpl extends 
AbstractDataLoadProces
       if (e instanceof BadRecordFoundException) {
         throw new BadRecordFoundException(e.getMessage(), e);
       }
-      throw new CarbonDataLoadingException("There is an unexpected error: " + 
e.getMessage(), e);
+      throw new CarbonDataLoadingException(e.getMessage(), e);
     }
     return null;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
----------------------------------------------------------------------
diff --git 
a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
 
b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
index 5c8f701..2fe7de4 100644
--- 
a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
+++ 
b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
@@ -57,7 +57,8 @@ import org.apache.hadoop.conf.Configuration;
 public class CarbonWriterBuilder {
   private Schema schema;
   private String path;
-  private String[] sortColumns;
+  //initialize with empty array , as no columns should be selected for sorting 
in NO_SORT
+  private String[] sortColumns = new String[0];
   private int blockletSize;
   private int blockSize;
   private long timestamp;
@@ -540,6 +541,19 @@ public class CarbonWriterBuilder {
     }
     // for the longstring field, change the datatype from string to varchar
     this.schema = updateSchemaFields(carbonSchema, longStringColumns);
+    if (sortColumns != null && sortColumns.length != 0) {
+      if (options == null || options.get("sort_scope") == null) {
+        // If sort_columns are specified and sort_scope is not specified,
+        // change sort scope to local_sort as now by default sort scope is 
no_sort.
+        if 
(CarbonProperties.getInstance().getProperty(CarbonCommonConstants.LOAD_SORT_SCOPE)
+            == null) {
+          if (options == null) {
+            options = new HashMap<>();
+          }
+          options.put("sort_scope", "local_sort");
+        }
+      }
+    }
     // build CarbonTable using schema
     CarbonTable table = buildCarbonTable();
     // build LoadModel

http://git-wip-us.apache.org/repos/asf/carbondata/blob/cabafe56/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
----------------------------------------------------------------------
diff --git 
a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java 
b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
index a3624b0..72510e9 100644
--- 
a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
+++ 
b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/CarbonReaderTest.java
@@ -75,20 +75,11 @@ public class CarbonReaderTest extends TestCase {
     CarbonReader reader = CarbonReader.builder(path, "_temp")
         .projection(new String[]{"name", "age"}).build();
 
-    // expected output after sorting
-    String[] name = new String[200];
-    Integer[] age = new Integer[200];
-    for (int i = 0; i < 200; i++) {
-      name[i] = "robot" + (i / 10);
-      age[i] = i;
-    }
-
     int i = 0;
     while (reader.hasNext()) {
       Object[] row = (Object[]) reader.readNextRow();
-      // Default sort column is applied for dimensions. So, need  to validate 
accordingly
-      assert(Arrays.asList(name).contains(row[0]));
-      assert(Arrays.asList(age).contains(row[1]));
+      Assert.assertEquals(("robot" + (i % 10)), row[0]);
+      Assert.assertEquals(i, row[1]);
       i++;
     }
     Assert.assertEquals(i, 200);
@@ -102,9 +93,8 @@ public class CarbonReaderTest extends TestCase {
     i = 0;
     while (reader2.hasNext()) {
       Object[] row = (Object[]) reader2.readNextRow();
-      // Default sort column is applied for dimensions. So, need  to validate 
accordingly
-      assert(Arrays.asList(name).contains(row[0]));
-      assert(Arrays.asList(age).contains(row[1]));
+      Assert.assertEquals(("robot" + (i % 10)), row[0]);
+      Assert.assertEquals(i, row[1]);
       i++;
     }
     Assert.assertEquals(i, 200);
@@ -578,22 +568,14 @@ public class CarbonReaderTest extends TestCase {
         .projection(new String[]{"name", "age", "age", "name"})
         .build();
 
-    // expected output after sorting
-    String[] name = new String[100];
-    int[] age = new int[100];
-    for (int i = 0; i < 100; i++) {
-      name[i] = "robot" + (i / 10);
-      age[i] = (i % 10) * 10 + i / 10;
-    }
-
     int i = 0;
     while (reader.hasNext()) {
       Object[] row = (Object[]) reader.readNextRow();
       // Default sort column is applied for dimensions. So, need  to validate 
accordingly
-      Assert.assertEquals(name[i], row[0]);
-      Assert.assertEquals(age[i], row[1]);
-      Assert.assertEquals(age[i], row[2]);
-      Assert.assertEquals(name[i], row[3]);
+      Assert.assertEquals(("robot" + (i % 10)), row[0]);
+      Assert.assertEquals(i, row[1]);
+      Assert.assertEquals(i, row[2]);
+      Assert.assertEquals("robot" + (i % 10), row[3]);
       i++;
     }
     Assert.assertEquals(i, 100);
@@ -700,20 +682,11 @@ public class CarbonReaderTest extends TestCase {
         .projection(new String[]{"name", "age"})
         .build();
 
-    // expected output after sorting
-    String[] name = new String[100];
-    int[] age = new int[100];
-    for (int i = 0; i < 100; i++) {
-      name[i] = "robot" + (i / 10);
-      age[i] = (i % 10) * 10 + i / 10;
-    }
-
     int i = 0;
     while (reader.hasNext()) {
       Object[] row = (Object[]) reader.readNextRow();
-      // Default sort column is applied for dimensions. So, need  to validate 
accordingly
-      Assert.assertEquals(name[i], row[0]);
-      Assert.assertEquals(age[i], row[1]);
+      Assert.assertEquals(("robot" + (i % 10)), row[0]);
+      Assert.assertEquals(i, row[1]);
       i++;
     }
     Assert.assertEquals(i, 100);
@@ -736,20 +709,11 @@ public class CarbonReaderTest extends TestCase {
 
     CarbonReader reader = CarbonReader.builder(path).build();
 
-    // expected output after sorting
-    String[] name = new String[100];
-    int[] age = new int[100];
-    for (int i = 0; i < 100; i++) {
-      name[i] = "robot" + (i / 10);
-      age[i] = (i % 10) * 10 + i / 10;
-    }
-
     int i = 0;
     while (reader.hasNext()) {
       Object[] row = (Object[]) reader.readNextRow();
-      // Default sort column is applied for dimensions. So, need  to validate 
accordingly
-      Assert.assertEquals(name[i], row[0]);
-      Assert.assertEquals(age[i], row[1]);
+      Assert.assertEquals(("robot" + (i % 10)), row[0]);
+      Assert.assertEquals(i, row[1]);
       i++;
     }
     Assert.assertEquals(i, 100);
@@ -802,20 +766,11 @@ public class CarbonReaderTest extends TestCase {
         .projection(new String[]{"name", "age"})
         .build();
 
-    // expected output after sorting
-    String[] name = new String[100];
-    int[] age = new int[100];
-    for (int i = 0; i < 100; i++) {
-      name[i] = "robot" + (i / 10);
-      age[i] = (i % 10) * 10 + i / 10;
-    }
-
     int i = 0;
     while (reader.hasNext()) {
       Object[] row = (Object[]) reader.readNextRow();
-      // Default sort column is applied for dimensions. So, need  to validate 
accordingly
-      Assert.assertEquals(name[i], row[0]);
-      Assert.assertEquals(age[i], row[1]);
+      Assert.assertEquals(("robot" + (i % 10)), row[0]);
+      Assert.assertEquals(i, row[1]);
       i++;
     }
     Assert.assertEquals(i, 100);
@@ -1192,20 +1147,11 @@ public class CarbonReaderTest extends TestCase {
 
     CarbonReader reader = CarbonReader.builder(path, "_temp").build();
 
-    // expected output after sorting
-    String[] name = new String[100];
-    int[] age = new int[100];
-    for (int i = 0; i < 100; i++) {
-      name[i] = "robot" + (i / 10);
-      age[i] = (i % 10) * 10 + i / 10;
-    }
-
     int i = 0;
     while (reader.hasNext()) {
       Object[] row = (Object[]) reader.readNextRow();
-      // Default sort column is applied for dimensions. So, need  to validate 
accordingly
-      Assert.assertEquals(name[i], row[0]);
-      Assert.assertEquals(age[i], row[1]);
+      Assert.assertEquals(("robot" + (i % 10)), row[0]);
+      Assert.assertEquals(i, row[1]);
       i++;
     }
     Assert.assertEquals(i, 100);
@@ -1227,19 +1173,11 @@ public class CarbonReaderTest extends TestCase {
 
     CarbonReader reader = CarbonReader.builder(path, "_temp").build();
 
-    // expected output after sorting
-    String[] name = new String[100];
-    int[] age = new int[100];
-    for (int i = 0; i < 100; i++) {
-      name[i] = "robot" + (i / 10);
-      age[i] = (i % 10) * 10 + i / 10;
-    }
-
     int i = 0;
     while (reader.hasNext()) {
       Object[] row = (Object[]) reader.readNextRow();
-      Assert.assertEquals(name[i], row[0]);
-      Assert.assertEquals(age[i], row[1]);
+      Assert.assertEquals(("robot" + (i % 10)), row[0]);
+      Assert.assertEquals(i, row[1]);
       i++;
     }
     reader.close();

Reply via email to