[2/3] incubator-carbondata git commit: WIP provide dictionary server/client framework

2016-12-29 Thread jackylk
WIP provide dictionary server/client framework

complete the netty communication

netty

implement methods, and use new dictionarygenerator

trigger msg: initial, generate, size

add msg: write dictionary

fix write mutil dictionary file

ObjectDecoder

fixerror

fix query error

remove no used files

shutdown dictionary server and client

fixDictServerCloseBug and format code

testOnePass

fix second load

fix complex

fix multil level complex

optimize DictionaryKey

fix testcase

fix testcase and get dict cache

 fix checkstyle

remove some useless judgement conditions

fix complex test case

fix testcase

fix mutil task

fix print

use json for serialize, and fix write dictionary issues

fix duplicate dictionary

write dictionary once

multi thread write dictionary

add testcase

RemoveUselessCode

fix comments

fix nullpointerexception

optimize

support predef column dictionary

use kryo


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/05b26549
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/05b26549
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/05b26549

Branch: refs/heads/master
Commit: 05b26549e929c2a461b0d18e5b58d3cd61bec1d5
Parents: 241f45f
Author: ravipesala 
Authored: Tue Nov 15 19:15:17 2016 +0530
Committer: jackylk 
Committed: Thu Dec 29 20:49:05 2016 +0800

--
 .../AbstractColumnDictionaryInfo.java   |   2 +-
 .../dictionary/AbstractDictionaryCache.java |  25 --
 .../cache/dictionary/ColumnDictionaryInfo.java  |   2 +-
 .../dictionary/ColumnReverseDictionaryInfo.java |   2 +-
 .../cache/dictionary/ForwardDictionary.java |   4 +-
 .../cache/dictionary/ReverseDictionary.java |   4 +-
 .../metadata/schema/table/CarbonTable.java  |  62 +++--
 .../core/constants/CarbonCommonConstants.java   |  10 +
 .../dictionary/client/DictionaryClient.java |  92 +++
 .../client/DictionaryClientHandler.java | 109 +
 .../dictionary/generator/DictionaryWriter.java  |  29 +++
 .../IncrementalColumnDictionaryGenerator.java   | 241 +++
 .../generator/ServerDictionaryGenerator.java|  74 ++
 .../generator/TableDictionaryGenerator.java | 116 +
 .../dictionary/generator/key/DictionaryKey.java |  92 +++
 .../dictionary/generator/key/KryoRegister.java  |  68 ++
 .../dictionary/server/DictionaryServer.java |  93 +++
 .../server/DictionaryServerHandler.java | 108 +
 .../apache/carbondata/core/util/CarbonUtil.java |  27 +++
 .../spark/util/GlobalDictionaryUtil.scala   |   4 +-
 .../spark/sql/catalyst/CarbonDDLSqlParser.scala |   3 +-
 .../spark/rdd/CarbonDataRDDFactory.scala|  15 ++
 .../execution/command/carbonTableSchema.scala   |  79 +-
 .../test/resources/columndictionary/country.csv |   5 +
 .../test/resources/columndictionary/name.csv|  10 +
 .../spark/src/test/resources/dataIncrement.csv  |  21 ++
 .../complexType/TestCreateTableWithDouble.scala |   2 +
 .../dataload/TestLoadDataWithSinglePass.scala   | 114 +
 .../filterexpr/FilterProcessorTestCase.scala|   2 +-
 .../processing/datatypes/PrimitiveDataType.java |  41 +++-
 .../processing/model/CarbonLoadModel.java   |  45 
 .../newflow/CarbonDataLoadConfiguration.java|  42 
 .../newflow/DataLoadProcessBuilder.java |  11 +
 .../impl/DictionaryFieldConverterImpl.java  |  52 +++-
 .../converter/impl/FieldEncoderFactory.java |  23 +-
 .../converter/impl/RowConverterImpl.java|  45 +++-
 .../DictionaryServerClientDictionary.java   |  95 
 37 files changed, 1692 insertions(+), 77 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/05b26549/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
 
b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
index ad766d7..ab400f0 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
@@ -282,7 +282,7 @@ public abstract class AbstractColumnDictionaryInfo 
implements DictionaryInfo {
* 2. Filter scenarios where from value surrogate key has to be found.
*
* @param value dictionary value
-   * @return if found returns key else 0
+   * @return if found returns key else INVALID_SURROGATE_KEY
*/
   @Override public int getSurrogateKey(String value) {
 byte[] keyData = 
value.getBytes

incubator-carbondata git commit: [CARBONDATA-581] Wrong number of executors requested when preferred locations are not obtained This closes #487

2017-01-01 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 7508cba29 -> f9971909b


[CARBONDATA-581] Wrong number of executors requested when preferred locations 
are not obtained This closes #487


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/f9971909
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/f9971909
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/f9971909

Branch: refs/heads/master
Commit: f9971909b45fd7938ed4b912b5c5ea1d20197355
Parents: 20a0b9e 7508cba
Author: jackylk 
Authored: Sun Jan 1 19:40:12 2017 +0800
Committer: jackylk 
Committed: Sun Jan 1 19:40:12 2017 +0800

--
 .../main/scala/org/apache/spark/sql/hive/DistributionUtil.scala  | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--




[2/2] incubator-carbondata git commit: [CARBONDATA-218]Using CSVInputFormat instead of spark-csv during dictionary geneartion This closes #494

2017-01-04 Thread jackylk
[CARBONDATA-218]Using CSVInputFormat instead of spark-csv during dictionary 
geneartion This closes #494


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/82072ee1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/82072ee1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/82072ee1

Branch: refs/heads/master
Commit: 82072ee194ee3e103ad2699548695d60874f6657
Parents: af956f5 e909375
Author: jackylk 
Authored: Wed Jan 4 17:43:39 2017 +0800
Committer: jackylk 
Committed: Wed Jan 4 17:43:39 2017 +0800

--
 integration/spark-common/pom.xml|   5 -
 .../carbondata/spark/csv/CarbonCsvReader.scala  | 182 --
 .../spark/csv/CarbonCsvRelation.scala   | 249 ---
 .../carbondata/spark/csv/CarbonTextFile.scala   |  91 ---
 .../carbondata/spark/csv/DefaultSource.scala| 183 --
 .../spark/rdd/CarbonGlobalDictionaryRDD.scala   |  30 +++
 .../spark/rdd/NewCarbonDataLoadRDD.scala|  16 +-
 .../carbondata/spark/util/CommonUtil.scala  |  46 
 .../spark/util/GlobalDictionaryUtil.scala   |  75 +++---
 integration/spark/pom.xml   |   5 -
 .../spark/CarbonDataFrameWriter.scala   |  40 ++-
 .../spark/rdd/CarbonDataRDDFactory.scala|   3 +-
 .../spark/sql/CarbonDatasourceRelation.scala|   2 -
 .../dataload/DefaultSourceTestCase.scala| 105 
 .../TestLoadDataWithNotProperInputFile.scala|   1 +
 .../spark/rdd/CarbonDataRDDFactory.scala|   3 +-
 pom.xml |   1 -
 17 files changed, 158 insertions(+), 879 deletions(-)
--




[1/2] incubator-carbondata git commit: unify csv reader

2017-01-04 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master af956f533 -> 82072ee19


unify csv reader


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/e9093755
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/e9093755
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/e9093755

Branch: refs/heads/master
Commit: e90937555ea443bea77c75079e9babb3b703fe0f
Parents: af956f5
Author: QiangCai 
Authored: Tue Jan 3 16:28:06 2017 +0800
Committer: jackylk 
Committed: Wed Jan 4 17:42:46 2017 +0800

--
 integration/spark-common/pom.xml|   5 -
 .../carbondata/spark/csv/CarbonCsvReader.scala  | 182 --
 .../spark/csv/CarbonCsvRelation.scala   | 249 ---
 .../carbondata/spark/csv/CarbonTextFile.scala   |  91 ---
 .../carbondata/spark/csv/DefaultSource.scala| 183 --
 .../spark/rdd/CarbonGlobalDictionaryRDD.scala   |  30 +++
 .../spark/rdd/NewCarbonDataLoadRDD.scala|  16 +-
 .../carbondata/spark/util/CommonUtil.scala  |  46 
 .../spark/util/GlobalDictionaryUtil.scala   |  75 +++---
 integration/spark/pom.xml   |   5 -
 .../spark/CarbonDataFrameWriter.scala   |  40 ++-
 .../spark/rdd/CarbonDataRDDFactory.scala|   3 +-
 .../spark/sql/CarbonDatasourceRelation.scala|   2 -
 .../dataload/DefaultSourceTestCase.scala| 105 
 .../TestLoadDataWithNotProperInputFile.scala|   1 +
 .../spark/rdd/CarbonDataRDDFactory.scala|   3 +-
 pom.xml |   1 -
 17 files changed, 158 insertions(+), 879 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e9093755/integration/spark-common/pom.xml
--
diff --git a/integration/spark-common/pom.xml b/integration/spark-common/pom.xml
index 8934c89..4bb4e0a 100644
--- a/integration/spark-common/pom.xml
+++ b/integration/spark-common/pom.xml
@@ -75,11 +75,6 @@
   junit
 
 
-  com.databricks
-  spark-csv_${scala.binary.version}
-  1.2.0
-
-
   org.scalatest
   scalatest_${scala.binary.version}
   2.2.1

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e9093755/integration/spark-common/src/main/scala/org/apache/carbondata/spark/csv/CarbonCsvReader.scala
--
diff --git 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/csv/CarbonCsvReader.scala
 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/csv/CarbonCsvReader.scala
deleted file mode 100644
index 551fc9c..000
--- 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/csv/CarbonCsvReader.scala
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.databricks.spark.sql.readers
-
-/**
- * Parser for parsing lines in bulk. Use this when efficiency is desired.
- *
- * @param iter iterator over lines in the file
- * @param fieldSep the delimiter used to separate fields in a line
- * @param lineSep the delimiter used to separate lines
- * @param quote character used to quote fields
- * @param escape character used to escape the quote character
- * @param ignoreLeadingSpace ignore white space before a field
- * @param ignoreTrailingSpace ignore white space after a field
- * @param headers headers for the columns
- * @param inputBufSize size of buffer to use for parsing input, tune for 
performance
- * @param maxCols maximum number of columns allowed, for safety against bad 
inputs
- */
-class CarbonBulkCsvReader (iter: Iterator[String],
-split: Int,
-fieldSep: Char = ',',
-lineSep: String = "\n",
-quote: Char = '"',
-escape: Char = '\\',
-commentMarker: Char = '#',
-ignoreLeadingSpace: Bool

[1/2] incubator-carbondata git commit: [CARBONDATA-568][Follow-Up] clean up code for carbon-core module

2017-01-04 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 2ad623dcb -> f8ef805e9


[CARBONDATA-568][Follow-Up] clean up code for carbon-core module

using "new java.util.LinkedHashSet" instead of "new util.LinkedHashSet"


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/109d3833
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/109d3833
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/109d3833

Branch: refs/heads/master
Commit: 109d3833b5e9fd4ed9c2f231145c149be71903a5
Parents: 2ad623d
Author: Zhang Zhichao <441586...@qq.com>
Authored: Thu Jan 5 00:03:41 2017 +0800
Committer: Zhang Zhichao <441586...@qq.com>
Committed: Thu Jan 5 00:03:41 2017 +0800

--
 .../scala/org/apache/spark/sql/CarbonDatasourceRelation.scala | 7 ++-
 1 file changed, 2 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/109d3833/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
--
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
index 2091598..6cbc517 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
@@ -17,9 +17,6 @@
 
 package org.apache.spark.sql
 
-import java.util
-import java.util.LinkedHashSet
-
 import scala.collection.JavaConverters._
 import scala.language.implicitConversions
 
@@ -204,7 +201,7 @@ case class CarbonRelation(
   }
 
   val dimensionsAttr = {
-val sett = new util.LinkedHashSet(
+val sett = new java.util.LinkedHashSet(
   
tableMeta.carbonTable.getDimensionByTableName(tableMeta.carbonTableIdentifier.getTableName)
   .asScala.asJava)
 sett.asScala.toSeq.filter(!_.getColumnSchema.isInvisible).map(dim => {
@@ -230,7 +227,7 @@ case class CarbonRelation(
 
   val measureAttr = {
 val factTable = tableMeta.carbonTable.getFactTableName
-new util.LinkedHashSet(
+new java.util.LinkedHashSet(
   tableMeta.carbonTable.
   getMeasureByTableName(tableMeta.carbonTable.getFactTableName).
   asScala.asJava).asScala.toSeq.filter(!_.getColumnSchema.isInvisible)



[2/2] incubator-carbondata git commit: [CARBONDATA-568][Minor][Follow-Up] clean up code for carbon-core module This closes #498

2017-01-04 Thread jackylk
[CARBONDATA-568][Minor][Follow-Up] clean up code for carbon-core module This 
closes #498


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/f8ef805e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/f8ef805e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/f8ef805e

Branch: refs/heads/master
Commit: f8ef805e9254c35fafcbc907a14d84c0219eb10a
Parents: 2ad623d 109d383
Author: jackylk 
Authored: Thu Jan 5 00:27:23 2017 +0800
Committer: jackylk 
Committed: Thu Jan 5 00:27:23 2017 +0800

--
 .../scala/org/apache/spark/sql/CarbonDatasourceRelation.scala | 7 ++-
 1 file changed, 2 insertions(+), 5 deletions(-)
--




[01/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master b0750c192 -> 49727a273


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
--
diff --git 
a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
 
b/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
deleted file mode 100644
index adb7a1c..000
--- 
a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.spark.testsuite.allqueries
-
-import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-
-class InsertIntoCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
-  var timeStampPropOrig: String = _
-  override def beforeAll {
-dropTableIfExists
-timeStampPropOrig = 
CarbonProperties.getInstance().getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT)
-
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
-sql("create table THive (imei string,deviceInformationId int,MAC 
string,deviceColor string,device_backColor string,modelId string,marketName 
string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series 
string,productionDate timestamp,bomCode string,internalModels string, 
deliveryTime string, channelsId string, channelsName string , deliveryAreaId 
string, deliveryCountry string, deliveryProvince string, deliveryCity 
string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, 
ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, 
ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet 
string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion 
string, Active_operaSysVersion string, Active_BacVerNumber string, 
Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer 
string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, 
Active_phonePADPartitionedVersions st
 ring, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), 
Latest_HOUR string, Latest_areaId string, Latest_country string, 
Latest_province string, Latest_city string, Latest_district string, 
Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, 
Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer 
string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, 
Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, 
Latest_phonePADPartitionedVersions string, Latest_operatorId string, 
gamePointDescription string,gamePointId double,contractNumber BigInt) ROW 
FORMAT DELIMITED FIELDS TERMINATED BY ','")
-sql("LOAD DATA local INPATH '../spark/src/test/resources/100_olap.csv' 
INTO TABLE THive")
-  }
-  test("insert from hive") {
-sql("create table TCarbon1 (imei string,deviceInformationId int,MAC 
string,deviceColor string,device_backColor string,modelId string,marketName 
string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series 
string,productionDate timestamp,bomCode string,internalModels string, 
deliveryTime string, channelsId string, channelsName string , deliveryAreaId 
string, deliveryCountry string, deliveryProvince string, deliveryCity 
string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, 
ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, 
ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet 
string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion 
string, 

[21/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
--
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
new file mode 100644
index 000..9912ec4
--- /dev/null
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
@@ -0,0 +1,159 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.common.util
+
+import java.util.{Locale, TimeZone}
+
+import org.apache.carbondata.common.logging.LogServiceFactory
+import scala.collection.JavaConversions._
+
+import org.apache.spark.sql.catalyst.plans._
+import org.apache.spark.sql.catalyst.util._
+import org.apache.spark.sql.test.TestQueryExecutor
+import org.apache.spark.sql.{DataFrame, Row, SQLContext}
+
+class QueryTest extends PlanTest {
+
+  val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+
+  // Timezone is fixed to America/Los_Angeles for those timezone sensitive 
tests (timestamp_*)
+  TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
+  // Add Locale setting
+  Locale.setDefault(Locale.US)
+
+  /**
+   * Runs the plan and makes sure the answer contains all of the keywords, or 
the
+   * none of keywords are listed in the answer
+   * @param df the [[DataFrame]] to be executed
+   * @param exists true for make sure the keywords are listed in the output, 
otherwise
+   *   to make sure none of the keyword are not listed in the 
output
+   * @param keywords keyword in string array
+   */
+  def checkExistence(df: DataFrame, exists: Boolean, keywords: String*) {
+val outputs = df.collect().map(_.mkString).mkString
+for (key <- keywords) {
+  if (exists) {
+assert(outputs.contains(key), s"Failed for $df ($key doesn't exist in 
result)")
+  } else {
+assert(!outputs.contains(key), s"Failed for $df ($key existed in the 
result)")
+  }
+}
+  }
+
+  def sqlTest(sqlString: String, expectedAnswer: Seq[Row])(implicit 
sqlContext: SQLContext) {
+test(sqlString) {
+  checkAnswer(sqlContext.sql(sqlString), expectedAnswer)
+}
+  }
+
+  /**
+   * Runs the plan and makes sure the answer matches the expected result.
+   * @param df the [[DataFrame]] to be executed
+   * @param expectedAnswer the expected result in a [[Seq]] of [[Row]]s.
+   */
+  protected def checkAnswer(df: DataFrame, expectedAnswer: Seq[Row]): Unit = {
+QueryTest.checkAnswer(df, expectedAnswer) match {
+  case Some(errorMessage) => fail(errorMessage)
+  case None =>
+}
+  }
+
+  protected def checkAnswer(df: DataFrame, expectedAnswer: Row): Unit = {
+checkAnswer(df, Seq(expectedAnswer))
+  }
+
+  protected def checkAnswer(df: DataFrame, expectedAnswer: DataFrame): Unit = {
+checkAnswer(df, expectedAnswer.collect())
+  }
+
+  def sql(sqlText: String): DataFrame = TestQueryExecutor.INSTANCE.sql(sqlText)
+
+  val sqlContext: SQLContext = TestQueryExecutor.INSTANCE.sqlContext
+
+  val storeLocation = TestQueryExecutor.storeLocation
+  val resourcesPath = TestQueryExecutor.resourcesPath
+  val integrationPath = TestQueryExecutor.integrationPath
+}
+
+object QueryTest {
+
+  def checkAnswer(df: DataFrame, expectedAnswer: java.util.List[Row]): String 
= {
+checkAnswer(df, expectedAnswer.toSeq) match {
+  case Some(errorMessage) => errorMessage
+  case None => null
+}
+  }
+
+  /**
+   * Runs the plan and makes sure the answer matches the expected result.
+   * If there was exception during the execution or the contents of the 
DataFrame does not
+   * match the expected result, an error message will be returned. Otherwise, 
a [[None]] will
+   * be returned.
+   * @param df the [[DataFrame]] to be executed
+   * @param expectedAnswer the expected result in a [[Seq]] of [[Row]]s.
+   */
+  def checkAnswer(df: DataFrame, expectedAnswer: Seq[Row]): Option[String] = {
+val isSorted = df.logicalPlan.collect { ca

[03/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark2/src/test/resources/dataDiff.csv
--
diff --git a/integration/spark2/src/test/resources/dataDiff.csv 
b/integration/spark2/src/test/resources/dataDiff.csv
deleted file mode 100644
index 6407b10..000
--- a/integration/spark2/src/test/resources/dataDiff.csv
+++ /dev/null
@@ -1,1001 +0,0 @@
-ID,date,country,name,phonetype,serialname,salary
-1,2015/7/23,china,aaa1,phone197,ASD69643,15000
-2,2015/7/24,china,aaa2,phone756,ASD42892,15001
-3,2015/7/25,china,aaa3,phone1904,ASD37014,15002
-4,2015/7/26,china,aaa4,phone2435,ASD66902,15003
-5,2015/7/27,china,aaa5,phone2441,ASD90633,15004
-6,2015/7/28,china,aaa6,phone294,ASD59961,15005
-7,2015/7/29,china,aaa7,phone610,ASD14875,15006
-8,2015/7/30,china,aaa8,phone1848,ASD57308,15007
-9,2015/7/18,china,aaa9,phone706,ASD86717,15008
-10,2015/7/19,usa,aaa10,phone685,ASD30505,15009
-11,2015/7/18,china,aaa11,phone1554,ASD26101,15010
-12,2015/7/19,china,aaa12,phone1781,ASD85711,15011
-13,2015/7/20,china,aaa13,phone943,ASD39200,15012
-14,2015/7/21,china,aaa14,phone1954,ASD80468,15013
-15,2015/7/22,china,aaa15,phone451,ASD1954,15014
-16,2015/7/23,china,aaa16,phone390,ASD38513,15015
-17,2015/7/24,china,aaa17,phone1929,ASD86213,15016
-18,2015/7/25,usa,aaa18,phone910,ASD88812,15017
-19,2015/7/26,china,aaa19,phone2151,ASD9316,15018
-20,2015/7/27,china,aaa20,phone2625,ASD62597,15019
-21,2015/7/28,china,aaa21,phone1371,ASD27896,15020
-22,2015/7/29,china,aaa22,phone945,ASD79760,15021
-23,2015/7/30,china,aaa23,phone2177,ASD45410,15022
-24,2015/7/31,china,aaa24,phone1586,ASD80645,15023
-25,2015/8/1,china,aaa25,phone1310,ASD36408,15024
-26,2015/8/2,china,aaa26,phone1579,ASD14571,15025
-27,2015/8/3,china,aaa27,phone2123,ASD36243,15026
-28,2015/8/4,china,aaa28,phone2334,ASD57825,15027
-29,2015/8/5,china,aaa29,phone1166,ASD26161,15028
-30,2015/8/6,china,aaa30,phone2248,ASD47899,15029
-31,2015/8/7,china,aaa31,phone475,ASD89811,15030
-32,2015/8/8,china,aaa32,phone2499,ASD87974,15031
-33,2015/8/9,china,aaa33,phone2333,ASD62408,15032
-34,2015/8/10,china,aaa34,phone1128,ASD73138,15033
-35,2015/8/11,china,aaa35,phone1063,ASD29573,15034
-36,2015/8/12,china,aaa36,phone1633,ASD82574,15035
-37,2015/8/13,china,aaa37,phone775,ASD47938,15036
-38,2015/8/14,china,aaa38,phone817,ASD40947,15037
-39,2015/8/15,china,aaa39,phone2221,ASD6379,15038
-40,2015/8/16,china,aaa40,phone2289,ASD48374,15039
-41,2015/8/17,china,aaa41,phone599,ASD44560,15040
-42,2015/8/18,china,aaa42,phone384,ASD613,15041
-43,2015/8/19,china,aaa43,phone731,ASD66050,15042
-44,2015/8/20,china,aaa44,phone2128,ASD39759,15043
-45,2015/8/21,china,aaa45,phone1503,ASD31200,15044
-46,2015/8/22,china,aaa46,phone1833,ASD22945,15045
-47,2015/8/23,china,aaa47,phone2346,ASD80162,15046
-48,2015/8/24,china,aaa48,phone2714,ASD27822,15047
-49,2015/8/25,china,aaa49,phone1582,ASD21279,15048
-50,2015/8/26,china,aaa50,phone83,ASD17242,15049
-51,2015/8/27,china,aaa51,phone54,ASD29131,15050
-52,2015/8/28,china,aaa52,phone526,ASD73647,15051
-53,2015/8/29,china,aaa53,phone1308,ASD80493,15052
-54,2015/8/30,china,aaa54,phone2785,ASD30573,15053
-55,2015/8/31,china,aaa55,phone2133,ASD49757,15054
-56,2015/9/1,china,aaa56,phone871,ASD54753,15055
-57,2015/9/2,china,aaa57,phone1570,ASD25758,15056
-58,2015/9/3,china,aaa58,phone434,ASD30291,15057
-59,2015/9/4,china,aaa59,phone2023,ASD60739,15058
-60,2015/9/5,china,aaa60,phone1755,ASD4955,15059
-61,2015/9/6,china,aaa61,phone1120,ASD41678,15060
-62,2015/9/7,china,aaa62,phone526,ASD73647,15061
-63,2015/9/8,china,aaa63,phone111,ASD20917,15062
-64,2015/9/9,china,aaa64,phone2477,ASD78171,15063
-65,2015/9/10,china,aaa65,phone1458,ASD3023,15064
-66,2015/9/11,china,aaa66,phone33,ASD54379,15065
-67,2015/9/12,china,aaa67,phone1710,ASD65296,15066
-68,2015/9/13,china,aaa68,phone118,ASD4568,15067
-69,2015/9/14,china,aaa69,phone2772,ASD42161,15068
-70,2015/9/15,china,aaa70,phone1013,ASD88261,15069
-71,2015/9/16,china,aaa71,phone1606,ASD33903,15070
-72,2015/9/17,china,aaa72,phone2800,ASD60308,15071
-73,2015/9/18,china,aaa73,phone2461,ASD14645,15072
-74,2015/9/19,china,aaa74,phone1038,ASD66620,15073
-75,2015/9/20,china,aaa75,phone2882,ASD23220,15074
-76,2015/9/21,china,aaa76,phone1665,ASD31618,15075
-77,2015/9/22,china,aaa77,phone2991,ASD37964,15076
-78,2015/9/23,china,aaa78,phone620,ASD7257,15077
-79,2015/9/24,china,aaa79,phone1097,ASD12510,15078
-80,2015/9/25,usa,aaa80,phone1668,ASD41149,15079
-81,2015/9/26,china,aaa81,phone2869,ASD95862,15080
-82,2015/9/27,china,aaa82,phone2506,ASD77011,15081
-83,2015/9/28,china,aaa83,phone2897,ASD6674,15082
-84,2015/9/29,china,aaa84,phone954,ASD72595,15083
-85,2015/9/30,china,aaa85,phone1382,ASD86617,15084
-86,2015/10/1,china,aaa86,phone284,ASD31454,15085
-87,2015/10/2,china,aaa87,phone1000,ASD1404,15086
-88,2015/10/3,china,aaa88,phone1813,ASD6955,15087
-89,2015/10/4,china,aaa89,phone2301,ASD14198,15088
-90,2015/10/5,france,aaa90,phone791,ASD13426

[04/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/sortexpr/AllDataTypesTestCaseSort.scala
--
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/sortexpr/AllDataTypesTestCaseSort.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/sortexpr/AllDataTypesTestCaseSort.scala
deleted file mode 100644
index 5cbe93d..000
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/sortexpr/AllDataTypesTestCaseSort.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.spark.testsuite.sortexpr
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-/**
- * Test Class for sort expression query on multiple datatypes
- * @author N00902756
- *
- */
-
-class AllDataTypesTestCaseSort extends QueryTest with BeforeAndAfterAll {
-
-  override def beforeAll {
-sql("CREATE TABLE alldatatypestablesort (empno int, empname String, 
designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname 
String, deptno int, deptname String, projectcode int, projectjoindate 
Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) 
STORED BY 'org.apache.carbondata.format'")
-sql("LOAD DATA local inpath './src/test/resources/data.csv' INTO TABLE 
alldatatypestablesort OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')");
-
-sql("CREATE TABLE alldatatypestablesort_hive (empno int, empname String, 
designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname 
String, deptno int, deptname String, projectcode int, projectjoindate 
Timestamp, projectenddate Timestamp,attendance int,utilization int,salary 
int)row format delimited fields terminated by ','")
-sql("LOAD DATA local inpath './src/test/resources/datawithoutheader.csv' 
INTO TABLE alldatatypestablesort_hive");
-
-  }
-
-  test("select empno,empname,utilization,count(salary),sum(empno) from 
alldatatypestablesort where empname in ('arvind','ayushi') group by 
empno,empname,utilization order by empno") {
-checkAnswer(
-  sql("select empno,empname,utilization,count(salary),sum(empno) from 
alldatatypestablesort where empname in ('arvind','ayushi') group by 
empno,empname,utilization order by empno"),
-  sql("select empno,empname,utilization,count(salary),sum(empno) from 
alldatatypestablesort_hive where empname in ('arvind','ayushi') group by 
empno,empname,utilization order by empno"))
-  }
-
-  override def afterAll {
-sql("drop table alldatatypestablesort")
-sql("drop table alldatatypestablesort_hive")
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/sortexpr/IntegerDataTypeTestCase.scala
--
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/sortexpr/IntegerDataTypeTestCase.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/sortexpr/IntegerDataTypeTestCase.scala
deleted file mode 100644
index 3a97884..000
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/sortexpr/IntegerDataTypeTestCase.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License i

[27/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
--
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
new file mode 100644
index 000..2f865fd
--- /dev/null
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataGeneral.scala
@@ -0,0 +1,139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.integration.spark.testsuite.dataload
+
+import java.math.BigDecimal
+
+import scala.collection.mutable.ArrayBuffer
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.common.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.carbon.path.{CarbonStorePath, 
CarbonTablePath}
+import org.apache.carbondata.core.datastorage.store.impl.FileFactory
+
+class TestLoadDataGeneral extends QueryTest with BeforeAndAfterAll {
+
+  override def beforeAll {
+sql("DROP TABLE IF EXISTS loadtest")
+sql(
+  """
+| CREATE TABLE loadtest(id int, name string, city string, age int)
+| STORED BY 'org.apache.carbondata.format'
+  """.stripMargin)
+
+  }
+
+  private def checkSegmentExists(
+  segmentId: String,
+  datbaseName: String,
+  tableName: String): Boolean = {
+val carbonTable = 
org.apache.carbondata.core.carbon.metadata.CarbonMetadata.getInstance()
+  .getCarbonTable(datbaseName + "_" + tableName)
+val partitionPath = CarbonStorePath.getCarbonTablePath(storeLocation,
+  carbonTable.getCarbonTableIdentifier).getPartitionDir("0")
+val fileType: FileFactory.FileType = FileFactory.getFileType(partitionPath)
+val carbonFile = FileFactory.getCarbonFile(partitionPath, fileType)
+val segments: ArrayBuffer[String] = ArrayBuffer()
+carbonFile.listFiles.foreach { file =>
+  segments += 
CarbonTablePath.DataPathUtil.getSegmentId(file.getAbsolutePath + "/dummy")
+}
+segments.contains(segmentId)
+  }
+
+  test("test data loading CSV file") {
+val testData = s"$resourcesPath/sample.csv"
+sql(s"LOAD DATA LOCAL INPATH '$testData' into table loadtest")
+checkAnswer(
+  sql("SELECT COUNT(*) FROM loadtest"),
+  Seq(Row(4))
+)
+  }
+
+  test("test data loading CSV file without extension name") {
+val testData = s"$resourcesPath/sample"
+sql(s"LOAD DATA LOCAL INPATH '$testData' into table loadtest")
+checkAnswer(
+  sql("SELECT COUNT(*) FROM loadtest"),
+  Seq(Row(8))
+)
+  }
+
+  test("test data loading GZIP compressed CSV file") {
+val testData = s"$resourcesPath/sample.csv.gz"
+sql(s"LOAD DATA LOCAL INPATH '$testData' into table loadtest")
+checkAnswer(
+  sql("SELECT COUNT(*) FROM loadtest"),
+  Seq(Row(12))
+)
+  }
+
+  test("test data loading BZIP2 compressed CSV file") {
+val testData = s"$resourcesPath/sample.csv.bz2"
+sql(s"LOAD DATA LOCAL INPATH '$testData' into table loadtest")
+checkAnswer(
+  sql("SELECT COUNT(*) FROM loadtest"),
+  Seq(Row(16))
+)
+  }
+
+  test("test data loading CSV file with delimiter char \\017") {
+val testData = s"$resourcesPath/sample_withDelimiter017.csv"
+sql(s"LOAD DATA LOCAL INPATH '$testData' into table loadtest options 
('delimiter'='\\017')")
+checkAnswer(
+  sql("SELECT COUNT(*) FROM loadtest"),
+  Seq(Row(20))
+)
+  }
+
+  test("test data loading with invalid values for mesasures") {
+val testData = s"$resourcesPath/invalidMeasures.csv"
+sql("drop table if exists invalidMeasures")
+sql("CREATE TABLE invalidMeasures (country String, salary double, age 
decimal(10,2)) STORED BY 'carbondata'")
+sql(s"LOAD DATA LOCAL INPATH '$testData' into table invalidMeasures 
options('Fileheader'='country,salary,age')")
+checkAnswer(
+  sql

[35/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/10dim_4msr.csv
--
diff --git a/integration/spark-common-test/src/test/resources/10dim_4msr.csv 
b/integration/spark-common-test/src/test/resources/10dim_4msr.csv
new file mode 100644
index 000..5727288
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/10dim_4msr.csv
@@ -0,0 +1,1000 @@
+column1338,column233,column383,column45,column549,column639,column727,column817,column94,column106,908,316,489,573
+column1814,column2342,column333,column429,column50,column611,column716,column816,column910,column1043,922,409,948,330
+column1355,column2217,column315,column439,column536,column643,column749,column823,column927,column1039,109,776,613,925
+column1771,column2363,column318,column434,column544,column625,column78,column825,column924,column1034,66,378,438,123
+column1610,column2313,column388,column429,column537,column613,column742,column828,column927,column109,548,786,927,385
+column1577,column2191,column323,column414,column529,column613,column78,column845,column93,column1035,380,910,261,442
+column1801,column2196,column382,column440,column50,column632,column717,column82,column936,column1019,844,681,293,312
+column1127,column235,column374,column416,column55,column617,column730,column85,column928,column1015,624,825,172,481
+column1905,column2137,column334,column444,column525,column619,column716,column813,column94,column1022,709,953,85,735
+column1129,column2375,column374,column447,column55,column66,column725,column812,column932,column105,879,974,351,844
+column1666,column2268,column395,column426,column55,column642,column75,column836,column90,column1042,438,959,430,809
+column1259,column231,column311,column42,column516,column646,column74,column80,column916,column1033,393,552,916,770
+column1614,column2197,column316,column447,column542,column619,column746,column827,column931,column1024,794,149,513,836
+column1417,column20,column326,column434,column521,column631,column740,column88,column90,column1047,785,444,478,764
+column1192,column2467,column360,column427,column55,column632,column711,column814,column944,column1010,185,70,317,389
+column1105,column2448,column34,column47,column517,column614,column737,column88,column927,column1025,18,545,466,651
+column1447,column2177,column326,column431,column55,column639,column741,column819,column924,column105,701,777,313,270
+column1512,column2116,column349,column41,column57,column615,column722,column836,column944,column1016,785,948,834,830
+column1485,column227,column390,column441,column520,column614,column728,column837,column914,column1047,378,629,253,473
+column1932,column260,column342,column412,column549,column61,column723,column88,column921,column1040,640,711,467,1041
+column1522,column2349,column364,column441,column529,column64,column74,column817,column939,column1031,732,995,991,55
+column1541,column2355,column377,column420,column532,column63,column72,column820,column917,column1019,378,553,913,953
+column1608,column2437,column350,column439,column56,column641,column716,column813,column934,column1040,693,600,952,545
+column1479,column2248,column316,column425,column534,column624,column732,column848,column927,column107,631,361,383,332
+column1717,column225,column348,column42,column537,column622,column719,column843,column921,column1010,219,67,544,666
+column1961,column2350,column314,column45,column526,column635,column716,column832,column920,column100,1008,580,457,846
+column1899,column262,column398,column413,column55,column618,column713,column829,column920,column1019,359,839,651,297
+column1886,column263,column380,column433,column524,column69,column75,column83,column92,column1011,147,271,67,760
+column1752,column2491,column359,column447,column513,column614,column737,column816,column914,column100,98,544,330,1021
+column1913,column2430,column372,column420,column512,column64,column749,column833,column922,column1049,885,55,843,176
+column1112,column2248,column370,column47,column57,column68,column78,column841,column933,column1012,148,298,805,788
+column1157,column2462,column354,column413,column515,column65,column74,column833,column940,column1019,674,449,723,396
+column1104,column2454,column319,column440,column538,column68,column710,column815,column96,column1049,903,399,18,234
+column1391,column2335,column390,column413,column52,column636,column73,column88,column925,column1026,840,752,267,237
+column1994,column2358,column376,column44,column54,column627,column741,column839,column93,column1032,237,543,20,70
+column1364,column2334,column374,column418,column516,column644,column746,column812,column942,column1043,956,135,892,203
+column1495,column2151,column319,column410,column533,column69,column715,column834,column933,column1030,423,561,856,868
+column1684,column27,column324,column444,column541,column637,column726,column820,column931,column1016,458,913,500,746
+column1975,column2314,co

[25/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithOldCarbonDataFile.scala
--
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithOldCarbonDataFile.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithOldCarbonDataFile.scala
new file mode 100644
index 000..0deb14e
--- /dev/null
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithOldCarbonDataFile.scala
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.spark.testsuite.allqueries
+
+import org.apache.spark.sql.common.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
+/*
+ * Test Class for query without data load
+ *
+ */
+class TestQueryWithOldCarbonDataFile extends QueryTest with BeforeAndAfterAll {
+  override def beforeAll {
+ 
CarbonProperties.getInstance.addProperty(CarbonCommonConstants.CARBON_DATA_FILE_VERSION,
 "V1");
+sql("drop table if exists OldFormatTable")
+sql("drop table if exists OldFormatTableHIVE")
+ sql("""
+   CREATE TABLE IF NOT EXISTS OldFormatTable
+   (country String,
+   name String, phonetype String, serialname String, salary Int)
+   STORED BY 'carbondata'
+   """)
+  sql("""
+   CREATE TABLE IF NOT EXISTS OldFormatTableHIVE
+   (country String,
+   name String, phonetype String, serialname String, salary Int)
+  row format delimited fields terminated by ','
+   """)  
+sql(s"LOAD DATA local inpath '$resourcesPath/OLDFORMATTABLE.csv' INTO 
table OldFormatTable")
+   sql(s"""
+   LOAD DATA LOCAL INPATH '$resourcesPath/OLDFORMATTABLEHIVE.csv' into 
table OldFormatTableHIVE
+   """)
+
+  }
+
+  
CarbonProperties.getInstance.addProperty(CarbonCommonConstants.CARBON_DATA_FILE_VERSION,
 "V2")
+  test("Test select * query") {
+checkAnswer(
+  sql("select * from OldFormatTable"), sql("select * from 
OldFormatTableHIVE")
+)
+  }
+
+  override def afterAll {
+ 
CarbonProperties.getInstance.addProperty(CarbonCommonConstants.CARBON_DATA_FILE_VERSION,
 "V1")
+sql("drop table if exists OldFormatTable")
+sql("drop table if exists OldFormatTableHIVE")
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithoutDataLoad.scala
--
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithoutDataLoad.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithoutDataLoad.scala
new file mode 100644
index 000..4c95633
--- /dev/null
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/TestQueryWithoutDataLoad.scala
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and lim

[17/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/complexdata.csv
--
diff --git a/integration/spark/src/test/resources/complexdata.csv 
b/integration/spark/src/test/resources/complexdata.csv
deleted file mode 100644
index c089e93..000
--- a/integration/spark/src/test/resources/complexdata.csv
+++ /dev/null
@@ -1,100 +0,0 @@
-1,109,4ROM size,29-11-2015,1AA1$2BB1,MAC1$MAC2$MAC3,7:Chinese:Hubei 
Province:yichang:yichang:yichang$7:India:New 
Delhi:delhi:delhi:delhi,29-11-2015$29-11-2015:29-11-2015,109,2738.562
-10,93,1ROM size,29-11-2015,1AA10$2BB10,MAC4$MAC5$MAC6,7:Chinese:Hubei 
Province:yichang:yichang:yichang$7:India:New 
Delhi:delhi:delhi:delhi,30-11-2015$30-11-2015:30-11-2015,93,1714.635
-100,2591,2ROM size,29-11-2015,1AA100$2BB100,MAC7$MAC8$MAC9,4:Chinese:Hunan 
Province:xiangtan:xiangtan:jianshelu$4:India:Hunan 
Province:xiangtan:xiangtan:jianshelu,01-12-2015$01-12-2015:01-12-2015,2591,1271
-1000,2531,2ROM size,29-11-2015,1AA1000$2BB1000,MAC10$$MAC12,6:Chinese:Hubei 
Province:wuhan:hongshan:hongshan$6:India:New 
Delhi:wuhan:hongshan:hongshan,02-12-2015$02-12-2015:02-12-2015,2531,692
-1,2408,0ROM 
size,29-11-2015,1AA1$2BB1,MAC13$$MAC15,2:Chinese:Guangdong 
Province:guangzhou:longhua:mingzhi$2:India:Guangdong 
Province:guangzhou:longhua:mingzhi,03-12-2015$03-12-2015:03-12-2015,2408,2175
-10,1815,0ROM 
size,29-11-2015,1AA10$2BB10,MAC16$$MAC18,6:Chinese:Hubei 
Province:wuhan:hongshan:hongshan$6:India:New 
Delhi:wuhan:hongshan:hongshan,04-12-2015$04-12-2015:04-12-2015,1815,136
-100,2479,4ROM 
size,29-11-2015,1AA100$2BB100,MAC19$$MAC21,7:Chinese:Hubei 
Province:yichang:yichang:yichang$7:India:New 
Delhi:delhi:delhi:delhi,05-12-2015$05-12-2015:05-12-2015,2479,1600
-11,1845,7ROM size,29-11-2015,1AA11$,MAC22$$MAC24,4:Chinese:Hunan 
Province:xiangtan:xiangtan:jianshelu$4:India:Hunan 
Province:xiangtan:xiangtan:jianshelu,06-12-2015$06-12-2015:06-12-2015,1845,505
-12,2008,1ROM size,29-11-2015,1AA12$,MAC25$$MAC27,3:Chinese:Hunan 
Province:changsha:yuhua:shazitang$3:India:Hunan 
Province:changsha:yuhua:shazitang,07-12-2015$07-12-2015:07-12-2015,2008,1341
-13,1121,5ROM size,29-11-2015,1AA13$,MAC28$$MAC30,5:Chinese:Hunan 
Province:zhuzhou:tianyuan:tianyua$5:India:Hunan 
Province:zhuzhou:tianyuan:tianyua,08-12-2015$08-12-2015:08-12-2015,1121,2239
-14,1511,8ROM size,29-11-2015,1AA14$,MAC31$$MAC33,7:Chinese:Hubei 
Province:yichang:yichang:yichang$7:India:New 
Delhi:delhi:delhi:delhi,09-12-2015$09-12-2015:09-12-2015,1511,2970
-15,2759,0ROM size,29-11-2015,1AA15$,MAC34$$MAC36,7:Chinese:Hubei 
Province:yichang:yichang:yichang$7:India:New 
Delhi:delhi:delhi:delhi,10-12-2015$10-12-2015:10-12-2015,2759,2593
-16,2069,7ROM size,29-11-2015,1AA16$,MAC37$$MAC39,3:Chinese:Hunan 
Province:changsha:yuhua:shazitang$3:India:Hunan 
Province:changsha:yuhua:shazitang,11-12-2015$11-12-2015:11-12-2015,2069,2572
-17,396,7ROM size,29-11-2015,1AA17$,MAC40$$MAC42,3:Chinese:Hunan 
Province:changsha:yuhua:shazitang$3:India:Hunan 
Province:changsha:yuhua:shazitang,12-12-2015$12-12-2015:12-12-2015,396,1991
-18,104,2ROM size,29-11-2015,1AA18$,MAC43$$MAC45,3:Chinese:Hunan 
Province:changsha:yuhua:shazitang$3:India:Hunan 
Province:changsha:yuhua:shazitang,13-12-2015$13-12-2015:13-12-2015,104,1442
-19,477,3ROM size,29-11-2015,1AA19$,MAC46$$MAC48,7:Chinese:Hubei 
Province:yichang:yichang:yichang$7:India:New 
Delhi:delhi:delhi:delhi,14-12-2015$14-12-2015:14-12-2015,477,1841
-10001,546,8ROM size,29-11-2015,1AA10001$2,MAC49$$MAC51,3:Chinese:Hunan 
Province:changsha:yuhua:shazitang$3:India:Hunan 
Province:changsha:yuhua:shazitang,15-12-2015$15-12-2015:15-12-2015,546,298
-100010,2696,3ROM 
size,29-11-2015,1AA100010$2BB100010,MAC52$$MAC54,5:Chinese:Hunan 
Province:zhuzhou:tianyuan:tianyua$5:India:Hunan 
Province:zhuzhou:tianyuan:tianyua,16-12-2015$16-12-2015:16-12-2015,2696,79
-100011,466,2ROM 
size,29-11-2015,1AA100011$2BB100011,MAC55$$MAC57,2:Chinese:Guangdong 
Province:guangzhou:longhua:mingzhi$2:India:Guangdong 
Province:guangzhou:longhua:mingzhi,17-12-2015$17-12-2015:17-12-2015,466,202
-100012,2644,2ROM 
size,29-11-2015,1AA100012$2BB100012,MAC58$$MAC60,4:Chinese:Hunan 
Province:xiangtan:xiangtan:jianshelu$4:India:Hunan 
Province:xiangtan:xiangtan:jianshelu,18-12-2015$18-12-2015:18-12-2015,2644,568
-100013,2167,3ROM 
size,29-11-2015,1AA100013$2BB100013,MAC61$MAC62,3:Chinese:Hunan 
Province:changsha:yuhua:shazitang$3:India:Hunan 
Province:changsha:yuhua:shazitang,19-12-2015$19-12-2015:19-12-2015,2167,355
-100014,1069,7ROM 
size,29-11-2015,1AA100014$2BB100014,MAC64$MAC65,5:Chinese:Hunan 
Province:zhuzhou:tianyuan:tianyua$5:India:Hunan 
Province:zhuzhou:tianyuan:tianyua,20-12-2015$20-12-2015:20-12-2015,1069,151
-100015,1447,9ROM 
size,29-11-2015,1AA100015$2BB100015,MAC67$MAC68,4:Chinese:Hunan 
Province:xiangtan:xiangtan:ji

[14/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/example-data.csv
--
diff --git a/integration/spark/src/test/resources/example-data.csv 
b/integration/spark/src/test/resources/example-data.csv
deleted file mode 100644
index 72d1063..000
--- a/integration/spark/src/test/resources/example-data.csv
+++ /dev/null
@@ -1,1001 +0,0 @@
-ID,date,country,name,phonetype,serialname,salary
-1,2015/7/23,china,aaa1,phone197,ASD69643,15000
-2,2015/7/24,china,aaa2,phone756,ASD42892,15001
-3,2015/7/25,china,aaa3,phone1904,ASD37014,15002
-4,2015/7/26,china,aaa4,phone2435,ASD66902,15003
-5,2015/7/27,china,aaa5,phone2441,ASD90633,15004
-6,2015/7/28,china,aaa6,phone294,ASD59961,15005
-7,2015/7/29,china,aaa7,phone610,ASD14875,15006
-8,2015/7/30,china,aaa8,phone1848,ASD57308,15007
-9,2015/7/18,china,aaa9,phone706,ASD86717,15008
-10,2015/7/19,usa,aaa10,phone685,ASD30505,15009
-11,2015/7/18,china,aaa11,phone1554,ASD26101,15010
-12,2015/7/19,china,aaa12,phone1781,ASD85711,15011
-13,2015/7/20,china,aaa13,phone943,ASD39200,15012
-14,2015/7/21,china,aaa14,phone1954,ASD80468,15013
-15,2015/7/22,china,aaa15,phone451,ASD1954,15014
-16,2015/7/23,china,aaa16,phone390,ASD38513,15015
-17,2015/7/24,china,aaa17,phone1929,ASD86213,15016
-18,2015/7/25,usa,aaa18,phone910,ASD88812,15017
-19,2015/7/26,china,aaa19,phone2151,ASD9316,15018
-20,2015/7/27,china,aaa20,phone2625,ASD62597,15019
-21,2015/7/28,china,aaa21,phone1371,ASD27896,15020
-22,2015/7/29,china,aaa22,phone945,ASD79760,15021
-23,2015/7/30,china,aaa23,phone2177,ASD45410,15022
-24,2015/7/31,china,aaa24,phone1586,ASD80645,15023
-25,2015/8/1,china,aaa25,phone1310,ASD36408,15024
-26,2015/8/2,china,aaa26,phone1579,ASD14571,15025
-27,2015/8/3,china,aaa27,phone2123,ASD36243,15026
-28,2015/8/4,china,aaa28,phone2334,ASD57825,15027
-29,2015/8/5,china,aaa29,phone1166,ASD26161,15028
-30,2015/8/6,china,aaa30,phone2248,ASD47899,15029
-31,2015/8/7,china,aaa31,phone475,ASD89811,15030
-32,2015/8/8,china,aaa32,phone2499,ASD87974,15031
-33,2015/8/9,china,aaa33,phone2333,ASD62408,15032
-34,2015/8/10,china,aaa34,phone1128,ASD73138,15033
-35,2015/8/11,china,aaa35,phone1063,ASD29573,15034
-36,2015/8/12,china,aaa36,phone1633,ASD82574,15035
-37,2015/8/13,china,aaa37,phone775,ASD47938,15036
-38,2015/8/14,china,aaa38,phone817,ASD40947,15037
-39,2015/8/15,china,aaa39,phone2221,ASD6379,15038
-40,2015/8/16,china,aaa40,phone2289,ASD48374,15039
-41,2015/8/17,china,aaa41,phone599,ASD44560,15040
-42,2015/8/18,china,aaa42,phone384,ASD613,15041
-43,2015/8/19,china,aaa43,phone731,ASD66050,15042
-44,2015/8/20,china,aaa44,phone2128,ASD39759,15043
-45,2015/8/21,china,aaa45,phone1503,ASD31200,15044
-46,2015/8/22,china,aaa46,phone1833,ASD22945,15045
-47,2015/8/23,china,aaa47,phone2346,ASD80162,15046
-48,2015/8/24,china,aaa48,phone2714,ASD27822,15047
-49,2015/8/25,china,aaa49,phone1582,ASD21279,15048
-50,2015/8/26,china,aaa50,phone83,ASD17242,15049
-51,2015/8/27,china,aaa51,phone54,ASD29131,15050
-52,2015/8/28,china,aaa52,phone526,ASD73647,15051
-53,2015/8/29,china,aaa53,phone1308,ASD80493,15052
-54,2015/8/30,china,aaa54,phone2785,ASD30573,15053
-55,2015/8/31,china,aaa55,phone2133,ASD49757,15054
-56,2015/9/1,china,aaa56,phone871,ASD54753,15055
-57,2015/9/2,china,aaa57,phone1570,ASD25758,15056
-58,2015/9/3,china,aaa58,phone434,ASD30291,15057
-59,2015/9/4,china,aaa59,phone2023,ASD60739,15058
-60,2015/9/5,china,aaa60,phone1755,ASD4955,15059
-61,2015/9/6,china,aaa61,phone1120,ASD41678,15060
-62,2015/9/7,china,aaa62,phone526,ASD73647,15061
-63,2015/9/8,china,aaa63,phone111,ASD20917,15062
-64,2015/9/9,china,aaa64,phone2477,ASD78171,15063
-65,2015/9/10,china,aaa65,phone1458,ASD3023,15064
-66,2015/9/11,china,aaa66,phone33,ASD54379,15065
-67,2015/9/12,china,aaa67,phone1710,ASD65296,15066
-68,2015/9/13,china,aaa68,phone118,ASD4568,15067
-69,2015/9/14,china,aaa69,phone2772,ASD42161,15068
-70,2015/9/15,china,aaa70,phone1013,ASD88261,15069
-71,2015/9/16,china,aaa71,phone1606,ASD33903,15070
-72,2015/9/17,china,aaa72,phone2800,ASD60308,15071
-73,2015/9/18,china,aaa73,phone2461,ASD14645,15072
-74,2015/9/19,china,aaa74,phone1038,ASD66620,15073
-75,2015/9/20,china,aaa75,phone2882,ASD23220,15074
-76,2015/9/21,china,aaa76,phone1665,ASD31618,15075
-77,2015/9/22,china,aaa77,phone2991,ASD37964,15076
-78,2015/9/23,china,aaa78,phone620,ASD7257,15077
-79,2015/9/24,china,aaa79,phone1097,ASD12510,15078
-80,2015/9/25,usa,aaa80,phone1668,ASD41149,15079
-81,2015/9/26,china,aaa81,phone2869,ASD95862,15080
-82,2015/9/27,china,aaa82,phone2506,ASD77011,15081
-83,2015/9/28,china,aaa83,phone2897,ASD6674,15082
-84,2015/9/29,china,aaa84,phone954,ASD72595,15083
-85,2015/9/30,china,aaa85,phone1382,ASD86617,15084
-86,2015/10/1,china,aaa86,phone284,ASD31454,15085
-87,2015/10/2,china,aaa87,phone1000,ASD1404,15086
-88,2015/10/3,china,aaa88,phone1813,ASD6955,15087
-89,2015/10/4,china,aaa89,phone2301,ASD14198,15088
-90,2015/10/5,france,aaa90,phone

[31/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/datawithcomplexspecialchar.csv
--
diff --git 
a/integration/spark-common-test/src/test/resources/datawithcomplexspecialchar.csv
 
b/integration/spark-common-test/src/test/resources/datawithcomplexspecialchar.csv
new file mode 100644
index 000..b5d1beb
--- /dev/null
+++ 
b/integration/spark-common-test/src/test/resources/datawithcomplexspecialchar.csv
@@ -0,0 +1,151 @@
+customer_id,124_string_level_province,numeric_level,date_level,Time_level,account_num,lname,fname,mi,address1,address2,address3,address4,city,country,customer_region_id,phone1,phone2,marital_status,yearly_income,gender,total_children,num_children_at_home,education,member_card,occupation,houseowner,num_cars_owned,fullname
+1,Oaxaca,9535837227,26-08-1961,08:08:08,87462024688,Nowmer,Sheri,A.,2433 
Bailey RoadTlaxiaco,Mexico,30,271-555-9715,119-555-1969,M,$30K - 
$50K,F,4,2,Partial High School,Bronze,Skilled Manual,Y,4,Sheri Nowmer
+2,https://www.google.co.in/?gws_rd=cr&ei=BF7rUqOoEc6GrgeIooHQDQ#q=India+state 
BC,1.51E+09,15-08-1947,07:14:54,87470586299,Whelply,Derrick,I.,2219 Dewing 
AvenueSooke,Canada,101,211-555-7669,807-555-9033,S,$70K - 
$90K,M,1,0,Partial High School,Bronze,Professional,N,3,Derrick Whelply
+3,&&  1 2 3 4 5 special set to test &  & &  _ _ _  
786,1,01-01-1900,23:59:59,87475757600,Derry,Jeanne,,7640 First 
Ave.Issaquah,USA,21,656-555-2272,221-555-2493,M,$50K - $70K,F,1,1,Bachelors 
Degree,Bronze,Professional,Y,2,Jeanne Derry
+4,https://www.google.co.in/?gws_rd=cr&ei=BF7rUqOoEc6GrgeIooHQDQ#q=India+state 
BC,1.51E+09,15-08-1947,07:14:54,87500482201,Spence,Michael,J.,337 Tosca 
WayBurnaby,Canada,92,929-555-7279,272-555-2844,M,$10K - $30K,M,4,4,Partial 
High School,Normal,Skilled Manual,N,2,Michael Spence
+5,SpecialCharacter level ~!@#$%^&*()_+| 
CA,0,10-05-1951,00:00:00,87514054179,Gutierrez,Maya,,8668 Via 
NerudaNovato,USA,42,387-555-7172,260-555-6936,S,$30K - $50K,F,3,0,Partial 
College,Silver,Manual,N,3,Maya Gutierrez
+6,&&  1 2 3 4 5 special set to test &  & &  _ _ _  
786,1,01-01-1900,23:59:59,87517782449,Damstra,Robert,F.,1619 Stillman 
CourtLynnwood,USA,75,922-555-5465,333-555-5915,S,$70K - 
$90K,F,3,0,Bachelors Degree,Bronze,Professional,Y,3,Robert Damstra
+7,Oaxaca,9535837227,26-08-1961,08:08:08,87521172800,Kanagaki,Rebecca,,2860 D 
Mt. Hood CircleTlaxiaco,Mexico,30,515-555-6247,934-555-9211,M,$30K - 
$50K,F,2,1,Partial High School,Bronze,Manual,Y,3,Rebecca Kanagaki
+8,"esc  !@~##%%&**(*&((*()()*  ""  some thing ""  ' DF 
",0,10-05-1951,00:00:00,87539744377,Brunner,Kim,H.,6064 Brodia CourtSan 
Andres,Mexico,106,411-555-6825,130-555-6818,M,$50K - $70K,M,2,2,Bachelors 
Degree,Bronze,Professional,Y,3,Kim Brunner
+9,https://www.google.co.in/?gws_rd=cr&ei=BF7rUqOoEc6GrgeIooHQDQ#q=India+state 
BC,1.51E+09,15-08-1947,07:14:54,87544797658,Blumberg,Brenda,C.,7560 Trees 
DriveRichmond,Canada,90,815-555-3975,642-555-6483,M,$10K - 
$30K,M,5,3,Partial High School,Normal,Skilled Manual,Y,1,Brenda Blumberg
+10,1 2 3 4 5 6 7 8 9 0 ~!@#$%^&*() some thing long 
OR,0,10-05-1951,00:00:00,87568712234,Stanz,Darren,M.,1019 Kenwal Rd.Lake 
Oswego,USA,64,847-555-5443,212-555-8635,S,$30K - $50K,M,4,0,Bachelors 
Degree,Golden,Management,N,4,Darren Stanz
+11,SpecialCharacter level ~!@#$%^&*()_+| 
CA,0,10-05-1951,00:00:00,87572821378,Murraiin,Jonathan,V.,5423 Camby Rd.La 
Mesa,USA,11,612-555-4878,747-555-6928,S,$50K - $70K,M,4,0,High School 
Degree,Bronze,Manual,N,2,Jonathan Murraiin
+12,SpecialCharacter level ~!@#$%^&*()_+| 
CA,0,10-05-1951,00:00:00,87579237222,Creek,Jewel,C.,1792 Belmont Rd.Chula 
Vista,USA,13,555-555-2714,228-555-5450,S,$30K - $50K,F,1,0,High School 
Degree,Bronze,Skilled Manual,N,3,Jewel Creek
+13,LonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmember,0,10-05-1951,00:00:00,87587122917,Medina,Peggy,A.,3796
 Keller RidgeMexico City,Mexico,2,343-555-9778,785-555-2371,S,$30K - 
$50K,M,4,0,High School Degree,Bronze,Manual,N,4,Peggy Medina
+14,SpecialCharacter level ~!@#$%^&*()_+| 
CA,0,10-05-1951,00:00:00,87592626810,Rutledge,Bryan,K.,3074 Ardith 
DriveLincoln Acres,USA,10,659-555-3160,640-555-5439,M,$50K - 
$70K,F,2,2,Bachelors Degree,Bronze,Management,Y,2,Bryan Rutledge
+15,https://www.google.co.in/?gws_rd=cr&ei=BF7rUqOoEc6GrgeIooHQDQ#q=India+state 
BC,1.51E+09,15-08-1947,07:14:54,87597749829,Cavestany,Walter,G.,7987 Seawind 
Dr.Oak Bay,Canada,99,471-555-8853,560-555-4646,S,$90K - 
$110K,M,3,0,Graduate Degree,Bronze,Professional,Y,3,Walter Cavestany
+16,select * from 
scenario1_84gsinglecsv_updatedcolumns_test3;,1.00E+14,02-10-1969,20:20:20,87603285908,Planck,Peggy,M.,4864
 S

[16/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/dataDiff.csv
--
diff --git a/integration/spark/src/test/resources/dataDiff.csv 
b/integration/spark/src/test/resources/dataDiff.csv
deleted file mode 100644
index 6407b10..000
--- a/integration/spark/src/test/resources/dataDiff.csv
+++ /dev/null
@@ -1,1001 +0,0 @@
-ID,date,country,name,phonetype,serialname,salary
-1,2015/7/23,china,aaa1,phone197,ASD69643,15000
-2,2015/7/24,china,aaa2,phone756,ASD42892,15001
-3,2015/7/25,china,aaa3,phone1904,ASD37014,15002
-4,2015/7/26,china,aaa4,phone2435,ASD66902,15003
-5,2015/7/27,china,aaa5,phone2441,ASD90633,15004
-6,2015/7/28,china,aaa6,phone294,ASD59961,15005
-7,2015/7/29,china,aaa7,phone610,ASD14875,15006
-8,2015/7/30,china,aaa8,phone1848,ASD57308,15007
-9,2015/7/18,china,aaa9,phone706,ASD86717,15008
-10,2015/7/19,usa,aaa10,phone685,ASD30505,15009
-11,2015/7/18,china,aaa11,phone1554,ASD26101,15010
-12,2015/7/19,china,aaa12,phone1781,ASD85711,15011
-13,2015/7/20,china,aaa13,phone943,ASD39200,15012
-14,2015/7/21,china,aaa14,phone1954,ASD80468,15013
-15,2015/7/22,china,aaa15,phone451,ASD1954,15014
-16,2015/7/23,china,aaa16,phone390,ASD38513,15015
-17,2015/7/24,china,aaa17,phone1929,ASD86213,15016
-18,2015/7/25,usa,aaa18,phone910,ASD88812,15017
-19,2015/7/26,china,aaa19,phone2151,ASD9316,15018
-20,2015/7/27,china,aaa20,phone2625,ASD62597,15019
-21,2015/7/28,china,aaa21,phone1371,ASD27896,15020
-22,2015/7/29,china,aaa22,phone945,ASD79760,15021
-23,2015/7/30,china,aaa23,phone2177,ASD45410,15022
-24,2015/7/31,china,aaa24,phone1586,ASD80645,15023
-25,2015/8/1,china,aaa25,phone1310,ASD36408,15024
-26,2015/8/2,china,aaa26,phone1579,ASD14571,15025
-27,2015/8/3,china,aaa27,phone2123,ASD36243,15026
-28,2015/8/4,china,aaa28,phone2334,ASD57825,15027
-29,2015/8/5,china,aaa29,phone1166,ASD26161,15028
-30,2015/8/6,china,aaa30,phone2248,ASD47899,15029
-31,2015/8/7,china,aaa31,phone475,ASD89811,15030
-32,2015/8/8,china,aaa32,phone2499,ASD87974,15031
-33,2015/8/9,china,aaa33,phone2333,ASD62408,15032
-34,2015/8/10,china,aaa34,phone1128,ASD73138,15033
-35,2015/8/11,china,aaa35,phone1063,ASD29573,15034
-36,2015/8/12,china,aaa36,phone1633,ASD82574,15035
-37,2015/8/13,china,aaa37,phone775,ASD47938,15036
-38,2015/8/14,china,aaa38,phone817,ASD40947,15037
-39,2015/8/15,china,aaa39,phone2221,ASD6379,15038
-40,2015/8/16,china,aaa40,phone2289,ASD48374,15039
-41,2015/8/17,china,aaa41,phone599,ASD44560,15040
-42,2015/8/18,china,aaa42,phone384,ASD613,15041
-43,2015/8/19,china,aaa43,phone731,ASD66050,15042
-44,2015/8/20,china,aaa44,phone2128,ASD39759,15043
-45,2015/8/21,china,aaa45,phone1503,ASD31200,15044
-46,2015/8/22,china,aaa46,phone1833,ASD22945,15045
-47,2015/8/23,china,aaa47,phone2346,ASD80162,15046
-48,2015/8/24,china,aaa48,phone2714,ASD27822,15047
-49,2015/8/25,china,aaa49,phone1582,ASD21279,15048
-50,2015/8/26,china,aaa50,phone83,ASD17242,15049
-51,2015/8/27,china,aaa51,phone54,ASD29131,15050
-52,2015/8/28,china,aaa52,phone526,ASD73647,15051
-53,2015/8/29,china,aaa53,phone1308,ASD80493,15052
-54,2015/8/30,china,aaa54,phone2785,ASD30573,15053
-55,2015/8/31,china,aaa55,phone2133,ASD49757,15054
-56,2015/9/1,china,aaa56,phone871,ASD54753,15055
-57,2015/9/2,china,aaa57,phone1570,ASD25758,15056
-58,2015/9/3,china,aaa58,phone434,ASD30291,15057
-59,2015/9/4,china,aaa59,phone2023,ASD60739,15058
-60,2015/9/5,china,aaa60,phone1755,ASD4955,15059
-61,2015/9/6,china,aaa61,phone1120,ASD41678,15060
-62,2015/9/7,china,aaa62,phone526,ASD73647,15061
-63,2015/9/8,china,aaa63,phone111,ASD20917,15062
-64,2015/9/9,china,aaa64,phone2477,ASD78171,15063
-65,2015/9/10,china,aaa65,phone1458,ASD3023,15064
-66,2015/9/11,china,aaa66,phone33,ASD54379,15065
-67,2015/9/12,china,aaa67,phone1710,ASD65296,15066
-68,2015/9/13,china,aaa68,phone118,ASD4568,15067
-69,2015/9/14,china,aaa69,phone2772,ASD42161,15068
-70,2015/9/15,china,aaa70,phone1013,ASD88261,15069
-71,2015/9/16,china,aaa71,phone1606,ASD33903,15070
-72,2015/9/17,china,aaa72,phone2800,ASD60308,15071
-73,2015/9/18,china,aaa73,phone2461,ASD14645,15072
-74,2015/9/19,china,aaa74,phone1038,ASD66620,15073
-75,2015/9/20,china,aaa75,phone2882,ASD23220,15074
-76,2015/9/21,china,aaa76,phone1665,ASD31618,15075
-77,2015/9/22,china,aaa77,phone2991,ASD37964,15076
-78,2015/9/23,china,aaa78,phone620,ASD7257,15077
-79,2015/9/24,china,aaa79,phone1097,ASD12510,15078
-80,2015/9/25,usa,aaa80,phone1668,ASD41149,15079
-81,2015/9/26,china,aaa81,phone2869,ASD95862,15080
-82,2015/9/27,china,aaa82,phone2506,ASD77011,15081
-83,2015/9/28,china,aaa83,phone2897,ASD6674,15082
-84,2015/9/29,china,aaa84,phone954,ASD72595,15083
-85,2015/9/30,china,aaa85,phone1382,ASD86617,15084
-86,2015/10/1,china,aaa86,phone284,ASD31454,15085
-87,2015/10/2,china,aaa87,phone1000,ASD1404,15086
-88,2015/10/3,china,aaa88,phone1813,ASD6955,15087
-89,2015/10/4,china,aaa89,phone2301,ASD14198,15088
-90,2015/10/5,france,aaa90,phone791,ASD13426,150

[30/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/example-data.csv
--
diff --git a/integration/spark-common-test/src/test/resources/example-data.csv 
b/integration/spark-common-test/src/test/resources/example-data.csv
new file mode 100644
index 000..72d1063
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/example-data.csv
@@ -0,0 +1,1001 @@
+ID,date,country,name,phonetype,serialname,salary
+1,2015/7/23,china,aaa1,phone197,ASD69643,15000
+2,2015/7/24,china,aaa2,phone756,ASD42892,15001
+3,2015/7/25,china,aaa3,phone1904,ASD37014,15002
+4,2015/7/26,china,aaa4,phone2435,ASD66902,15003
+5,2015/7/27,china,aaa5,phone2441,ASD90633,15004
+6,2015/7/28,china,aaa6,phone294,ASD59961,15005
+7,2015/7/29,china,aaa7,phone610,ASD14875,15006
+8,2015/7/30,china,aaa8,phone1848,ASD57308,15007
+9,2015/7/18,china,aaa9,phone706,ASD86717,15008
+10,2015/7/19,usa,aaa10,phone685,ASD30505,15009
+11,2015/7/18,china,aaa11,phone1554,ASD26101,15010
+12,2015/7/19,china,aaa12,phone1781,ASD85711,15011
+13,2015/7/20,china,aaa13,phone943,ASD39200,15012
+14,2015/7/21,china,aaa14,phone1954,ASD80468,15013
+15,2015/7/22,china,aaa15,phone451,ASD1954,15014
+16,2015/7/23,china,aaa16,phone390,ASD38513,15015
+17,2015/7/24,china,aaa17,phone1929,ASD86213,15016
+18,2015/7/25,usa,aaa18,phone910,ASD88812,15017
+19,2015/7/26,china,aaa19,phone2151,ASD9316,15018
+20,2015/7/27,china,aaa20,phone2625,ASD62597,15019
+21,2015/7/28,china,aaa21,phone1371,ASD27896,15020
+22,2015/7/29,china,aaa22,phone945,ASD79760,15021
+23,2015/7/30,china,aaa23,phone2177,ASD45410,15022
+24,2015/7/31,china,aaa24,phone1586,ASD80645,15023
+25,2015/8/1,china,aaa25,phone1310,ASD36408,15024
+26,2015/8/2,china,aaa26,phone1579,ASD14571,15025
+27,2015/8/3,china,aaa27,phone2123,ASD36243,15026
+28,2015/8/4,china,aaa28,phone2334,ASD57825,15027
+29,2015/8/5,china,aaa29,phone1166,ASD26161,15028
+30,2015/8/6,china,aaa30,phone2248,ASD47899,15029
+31,2015/8/7,china,aaa31,phone475,ASD89811,15030
+32,2015/8/8,china,aaa32,phone2499,ASD87974,15031
+33,2015/8/9,china,aaa33,phone2333,ASD62408,15032
+34,2015/8/10,china,aaa34,phone1128,ASD73138,15033
+35,2015/8/11,china,aaa35,phone1063,ASD29573,15034
+36,2015/8/12,china,aaa36,phone1633,ASD82574,15035
+37,2015/8/13,china,aaa37,phone775,ASD47938,15036
+38,2015/8/14,china,aaa38,phone817,ASD40947,15037
+39,2015/8/15,china,aaa39,phone2221,ASD6379,15038
+40,2015/8/16,china,aaa40,phone2289,ASD48374,15039
+41,2015/8/17,china,aaa41,phone599,ASD44560,15040
+42,2015/8/18,china,aaa42,phone384,ASD613,15041
+43,2015/8/19,china,aaa43,phone731,ASD66050,15042
+44,2015/8/20,china,aaa44,phone2128,ASD39759,15043
+45,2015/8/21,china,aaa45,phone1503,ASD31200,15044
+46,2015/8/22,china,aaa46,phone1833,ASD22945,15045
+47,2015/8/23,china,aaa47,phone2346,ASD80162,15046
+48,2015/8/24,china,aaa48,phone2714,ASD27822,15047
+49,2015/8/25,china,aaa49,phone1582,ASD21279,15048
+50,2015/8/26,china,aaa50,phone83,ASD17242,15049
+51,2015/8/27,china,aaa51,phone54,ASD29131,15050
+52,2015/8/28,china,aaa52,phone526,ASD73647,15051
+53,2015/8/29,china,aaa53,phone1308,ASD80493,15052
+54,2015/8/30,china,aaa54,phone2785,ASD30573,15053
+55,2015/8/31,china,aaa55,phone2133,ASD49757,15054
+56,2015/9/1,china,aaa56,phone871,ASD54753,15055
+57,2015/9/2,china,aaa57,phone1570,ASD25758,15056
+58,2015/9/3,china,aaa58,phone434,ASD30291,15057
+59,2015/9/4,china,aaa59,phone2023,ASD60739,15058
+60,2015/9/5,china,aaa60,phone1755,ASD4955,15059
+61,2015/9/6,china,aaa61,phone1120,ASD41678,15060
+62,2015/9/7,china,aaa62,phone526,ASD73647,15061
+63,2015/9/8,china,aaa63,phone111,ASD20917,15062
+64,2015/9/9,china,aaa64,phone2477,ASD78171,15063
+65,2015/9/10,china,aaa65,phone1458,ASD3023,15064
+66,2015/9/11,china,aaa66,phone33,ASD54379,15065
+67,2015/9/12,china,aaa67,phone1710,ASD65296,15066
+68,2015/9/13,china,aaa68,phone118,ASD4568,15067
+69,2015/9/14,china,aaa69,phone2772,ASD42161,15068
+70,2015/9/15,china,aaa70,phone1013,ASD88261,15069
+71,2015/9/16,china,aaa71,phone1606,ASD33903,15070
+72,2015/9/17,china,aaa72,phone2800,ASD60308,15071
+73,2015/9/18,china,aaa73,phone2461,ASD14645,15072
+74,2015/9/19,china,aaa74,phone1038,ASD66620,15073
+75,2015/9/20,china,aaa75,phone2882,ASD23220,15074
+76,2015/9/21,china,aaa76,phone1665,ASD31618,15075
+77,2015/9/22,china,aaa77,phone2991,ASD37964,15076
+78,2015/9/23,china,aaa78,phone620,ASD7257,15077
+79,2015/9/24,china,aaa79,phone1097,ASD12510,15078
+80,2015/9/25,usa,aaa80,phone1668,ASD41149,15079
+81,2015/9/26,china,aaa81,phone2869,ASD95862,15080
+82,2015/9/27,china,aaa82,phone2506,ASD77011,15081
+83,2015/9/28,china,aaa83,phone2897,ASD6674,15082
+84,2015/9/29,china,aaa84,phone954,ASD72595,15083
+85,2015/9/30,china,aaa85,phone1382,ASD86617,15084
+86,2015/10/1,china,aaa86,phone284,ASD31454,15085
+87,2015/10/2,china,aaa87,phone1000,ASD1404,15086
+88,2015/10/3,china,aaa88,phone1813,ASD6955,15087
+89,2015/10/4,china,aaa89,phone2301,ASD

[20/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/100_olap.csv
--
diff --git a/integration/spark/src/test/resources/100_olap.csv 
b/integration/spark/src/test/resources/100_olap.csv
deleted file mode 100644
index cd51615..000
--- a/integration/spark/src/test/resources/100_olap.csv
+++ /dev/null
@@ -1,99 +0,0 @@
-1AA1,1,MAC,2Device Color,9After the device shell color,109,1Market name,8RAM 
size,4ROM size,0CPU Audit,6CUP Clocked,7Series,2015-7-1 12:07:28,1,9Internal 
models,2015-7-1 12:07:06,4,guomei,7,Chinese,Hubei 
Province,yichang,yichang,yichang,1,5281803,2015-7-1 
12:08:40,2,Chinese,Guangdong Province,guangzhou,longhua,mingzhi,1,1,EMUI 
Release,Operating System Version,Background version number,3Background Flash 
version,4webUI Version number,9webUI Types of operators version,4web Type data 
card version number,3Operators version,Phone PAD Partitioned 
versions,2015,7,1,12,2,Chinese,Guangdong 
Province,guangzhou,longhua,mingzhi,1,EMUI Release,Operating System 
Version,Background version number,3Background Flash version,4webUI Version 
number,9webUI Types of operators version,4web Type data card version 
number,3Operators version,Phone PAD Partitioned versions,1,2738.562,Site 
Name2738
-1AA10,10,MAC,0Device Color,5After the device shell color,93,6Market name,4RAM 
size,1ROM size,7CPU Audit,1CUP Clocked,7Series,2015-7-1 12:07:28,10,2Internal 
models,2015-7-1 12:07:06,4,guomei,7,Chinese,Hubei 
Province,yichang,yichang,yichang,10,6805600,2015-7-1 12:08:40,6,Chinese,Hubei 
Province,wuhan,hongshan,hongshan,10,10,EMUI Release,Operating System 
Version,Background version number,4Background Flash version,9webUI Version 
number,6webUI Types of operators version,8web Type data card version 
number,9Operators version,Phone PAD Partitioned 
versions,2015,7,1,12,4,Chinese,Hunan 
Province,xiangtan,xiangtan,jianshelu,10,EMUI Release,Operating System 
Version,Background version number,4Background Flash version,9webUI Version 
number,6webUI Types of operators version,8web Type data card version 
number,9Operators version,Phone PAD Partitioned versions,10,1714.635,Site 
Name1714
-1AA100,100,MAC,1Device Color,9After the device shell color,2591,4Market 
name,7RAM size,2ROM size,0CPU Audit,5CUP Clocked,5Series,2015-7-1 
12:07:28,100,0Internal models,2015-7-1 12:07:06,6,yidong,4,Chinese,Hunan 
Province,xiangtan,xiangtan,jianshelu,100,8231335,2015-7-1 
12:08:40,7,Chinese,Hubei Province,yichang,yichang,yichang,100,100,EMUI 
Release,Operating System Version,Background version number,0Background Flash 
version,7webUI Version number,0webUI Types of operators version,2web Type data 
card version number,3Operators version,Phone PAD Partitioned 
versions,2015,7,1,12,7,Chinese,Hubei Province,yichang,yichang,yichang,100,EMUI 
Release,Operating System Version,Background version number,0Background Flash 
version,7webUI Version number,0webUI Types of operators version,2web Type data 
card version number,3Operators version,Phone PAD Partitioned 
versions,100,1271,Site Name1271
-1AA1000,1000,MAC,2Device Color,8After the device shell color,2531,9Market 
name,5RAM size,2ROM size,1CPU Audit,9CUP Clocked,5Series,2015-7-1 
12:07:28,1000,7Internal models,2015-7-1 12:07:06,3,shuling,6,Chinese,Hubei 
Province,wuhan,hongshan,hongshan,1000,8978765,2015-7-1 12:08:40,3,Chinese,Hunan 
Province,changsha,yuhua,shazitang,1000,1000,EMUI Release,Operating System 
Version,Background version number,5Background Flash version,0webUI Version 
number,9webUI Types of operators version,9web Type data card version 
number,2Operators version,Phone PAD Partitioned 
versions,2015,7,1,12,7,Chinese,Hubei Province,yichang,yichang,yichang,1000,EMUI 
Release,Operating System Version,Background version number,5Background Flash 
version,0webUI Version number,9webUI Types of operators version,9web Type data 
card version number,2Operators version,Phone PAD Partitioned 
versions,1000,692,Site Name692
-1AA1,1,MAC,0Device Color,1After the device shell color,2408,8Market 
name,1RAM size,0ROM size,3CPU Audit,1CUP Clocked,7Series,2015-7-1 
12:07:28,1,4Internal models,2015-7-1 12:07:06,1,taobao,2,Chinese,Guangdong 
Province,guangzhou,longhua,mingzhi,1,3784858,2015-7-1 
12:08:40,4,Chinese,Hunan Province,xiangtan,xiangtan,jianshelu,1,1,EMUI 
Release,Operating System Version,Background version number,2Background Flash 
version,7webUI Version number,5webUI Types of operators version,5web Type data 
card version number,0Operators version,Phone PAD Partitioned 
versions,2015,7,1,12,7,Chinese,Hubei 
Province,yichang,yichang,yichang,1,EMUI Release,Operating System 
Version,Background version number,2Background Flash version,7webUI Version 
number,5webUI Types of operators version,5web Type data card version 
number,0Operators version,Phone PAD Partitioned versions,1,2175,Site 
Name2175
-1AA10,10,MAC,3Device Color,5After th

[23/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
--
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
new file mode 100644
index 000..249e4ac
--- /dev/null
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
@@ -0,0 +1,326 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.spark.testsuite.dataretention
+
+import java.text.SimpleDateFormat
+
+import org.apache.carbondata.core.updatestatus.SegmentStatusManager
+import org.apache.carbondata.locks.{LockUsage, CarbonLockFactory, ICarbonLock}
+import org.apache.commons.lang3.time.DateUtils
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.common.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.carbon.path.CarbonStorePath
+import org.apache.carbondata.core.carbon.{AbsoluteTableIdentifier, 
CarbonTableIdentifier}
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.load.LoadMetadataDetails
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
+
+/**
+ * This class contains data retention test cases
+ * Created by Manohar on 5/9/2016.
+ */
+class DataRetentionTestCase extends QueryTest with BeforeAndAfterAll {
+
+  val absoluteTableIdentifierForLock: AbsoluteTableIdentifier = new
+  AbsoluteTableIdentifier(storeLocation,
+new CarbonTableIdentifier(CarbonCommonConstants.DATABASE_DEFAULT_NAME, 
"retentionlock", "200"))
+  val absoluteTableIdentifierForRetention: AbsoluteTableIdentifier = new
+  AbsoluteTableIdentifier(storeLocation,
+new CarbonTableIdentifier(
+  CarbonCommonConstants.DATABASE_DEFAULT_NAME, 
"DataRetentionTable".toLowerCase(), "300"))
+  val carbonTablePath = CarbonStorePath
+.getCarbonTablePath(absoluteTableIdentifierForRetention.getStorePath,
+  
absoluteTableIdentifierForRetention.getCarbonTableIdentifier).getMetadataDirectoryPath
+
+  var carbonDateFormat = new 
SimpleDateFormat(CarbonCommonConstants.CARBON_TIMESTAMP)
+  var defaultDateFormat = new SimpleDateFormat(CarbonCommonConstants
+.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+  val carbonTableStatusLock: ICarbonLock = CarbonLockFactory
+.getCarbonLockObj(absoluteTableIdentifierForLock.getCarbonTableIdentifier, 
LockUsage.TABLE_STATUS_LOCK)
+  val carbonDeleteSegmentLock: ICarbonLock = CarbonLockFactory
+.getCarbonLockObj(absoluteTableIdentifierForLock.getCarbonTableIdentifier, 
LockUsage.DELETE_SEGMENT_LOCK)
+  val carbonCleanFilesLock: ICarbonLock = CarbonLockFactory
+.getCarbonLockObj(absoluteTableIdentifierForLock.getCarbonTableIdentifier, 
LockUsage.CLEAN_FILES_LOCK)
+  val carbonMetadataLock: ICarbonLock = CarbonLockFactory
+.getCarbonLockObj(absoluteTableIdentifierForLock.getCarbonTableIdentifier, 
LockUsage.METADATA_LOCK)
+
+
+  override def beforeAll {
+
CarbonProperties.getInstance.addProperty(CarbonCommonConstants.MAX_QUERY_EXECUTION_TIME,
 "1")
+CarbonProperties.getInstance()
+  .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "/MM/dd")
+sql(
+  "CREATE table DataRetentionTable (ID int, date String, country String, 
name " +
+  "String," +
+  "phonetype String, serialname String, salary int) stored by 
'org.apache.carbondata.format'"
+
+)
+sql(
+  "CREATE table retentionlock (ID int, date String, country String, name " 
+
+  "String," +
+  "phonetype String, serialname String, salary int) stored by 
'org.apache.carbondata.format'"
+
+)
+
+sql(
+  s"LOAD DATA LOCAL INPATH '$resourcesPath/dataretention1.csv' INTO TABLE 
retentionlock " +
+  "OPTIONS('DELIMITER' =  ','

[19/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/10dim_4msr.csv
--
diff --git a/integration/spark/src/test/resources/10dim_4msr.csv 
b/integration/spark/src/test/resources/10dim_4msr.csv
deleted file mode 100644
index 5727288..000
--- a/integration/spark/src/test/resources/10dim_4msr.csv
+++ /dev/null
@@ -1,1000 +0,0 @@
-column1338,column233,column383,column45,column549,column639,column727,column817,column94,column106,908,316,489,573
-column1814,column2342,column333,column429,column50,column611,column716,column816,column910,column1043,922,409,948,330
-column1355,column2217,column315,column439,column536,column643,column749,column823,column927,column1039,109,776,613,925
-column1771,column2363,column318,column434,column544,column625,column78,column825,column924,column1034,66,378,438,123
-column1610,column2313,column388,column429,column537,column613,column742,column828,column927,column109,548,786,927,385
-column1577,column2191,column323,column414,column529,column613,column78,column845,column93,column1035,380,910,261,442
-column1801,column2196,column382,column440,column50,column632,column717,column82,column936,column1019,844,681,293,312
-column1127,column235,column374,column416,column55,column617,column730,column85,column928,column1015,624,825,172,481
-column1905,column2137,column334,column444,column525,column619,column716,column813,column94,column1022,709,953,85,735
-column1129,column2375,column374,column447,column55,column66,column725,column812,column932,column105,879,974,351,844
-column1666,column2268,column395,column426,column55,column642,column75,column836,column90,column1042,438,959,430,809
-column1259,column231,column311,column42,column516,column646,column74,column80,column916,column1033,393,552,916,770
-column1614,column2197,column316,column447,column542,column619,column746,column827,column931,column1024,794,149,513,836
-column1417,column20,column326,column434,column521,column631,column740,column88,column90,column1047,785,444,478,764
-column1192,column2467,column360,column427,column55,column632,column711,column814,column944,column1010,185,70,317,389
-column1105,column2448,column34,column47,column517,column614,column737,column88,column927,column1025,18,545,466,651
-column1447,column2177,column326,column431,column55,column639,column741,column819,column924,column105,701,777,313,270
-column1512,column2116,column349,column41,column57,column615,column722,column836,column944,column1016,785,948,834,830
-column1485,column227,column390,column441,column520,column614,column728,column837,column914,column1047,378,629,253,473
-column1932,column260,column342,column412,column549,column61,column723,column88,column921,column1040,640,711,467,1041
-column1522,column2349,column364,column441,column529,column64,column74,column817,column939,column1031,732,995,991,55
-column1541,column2355,column377,column420,column532,column63,column72,column820,column917,column1019,378,553,913,953
-column1608,column2437,column350,column439,column56,column641,column716,column813,column934,column1040,693,600,952,545
-column1479,column2248,column316,column425,column534,column624,column732,column848,column927,column107,631,361,383,332
-column1717,column225,column348,column42,column537,column622,column719,column843,column921,column1010,219,67,544,666
-column1961,column2350,column314,column45,column526,column635,column716,column832,column920,column100,1008,580,457,846
-column1899,column262,column398,column413,column55,column618,column713,column829,column920,column1019,359,839,651,297
-column1886,column263,column380,column433,column524,column69,column75,column83,column92,column1011,147,271,67,760
-column1752,column2491,column359,column447,column513,column614,column737,column816,column914,column100,98,544,330,1021
-column1913,column2430,column372,column420,column512,column64,column749,column833,column922,column1049,885,55,843,176
-column1112,column2248,column370,column47,column57,column68,column78,column841,column933,column1012,148,298,805,788
-column1157,column2462,column354,column413,column515,column65,column74,column833,column940,column1019,674,449,723,396
-column1104,column2454,column319,column440,column538,column68,column710,column815,column96,column1049,903,399,18,234
-column1391,column2335,column390,column413,column52,column636,column73,column88,column925,column1026,840,752,267,237
-column1994,column2358,column376,column44,column54,column627,column741,column839,column93,column1032,237,543,20,70
-column1364,column2334,column374,column418,column516,column644,column746,column812,column942,column1043,956,135,892,203
-column1495,column2151,column319,column410,column533,column69,column715,column834,column933,column1030,423,561,856,868
-column1684,column27,column324,column444,column541,column637,column726,column820,column931,column1016,458,913,500,746
-column1975,column2314,column375,column44,column522,column612,column7

[34/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/IUD/T_Hive1.csv
--
diff --git a/integration/spark-common-test/src/test/resources/IUD/T_Hive1.csv 
b/integration/spark-common-test/src/test/resources/IUD/T_Hive1.csv
new file mode 100644
index 000..418ae5c
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/IUD/T_Hive1.csv
@@ -0,0 +1,10 @@
+TRUE,1,450,304034400,2034343000, 
121.5,4.99,2.44,SE3423ee,asfdsffdfg,ERTETRWT,2012-01-11 
03:04:05.123456719,2012-01-20
+TRUE,2,423,3046340,2003454300, 
121.5,4.99,2.44,SE3423ee,asfdsffdfg,EtryTRWT,2012-01-12 
03:14:05.123456729,2012-01-20
+TRUE,3,453,3003445,203450, 
121.5,4.99,2.44,SE3423ee,asfdsffdfg,ERTEerWT,2012-01-13 
03:24:05.123456739,2012-01-20
+TRUE,4,4350,3044364,20, 
121.5,4.99,2.44,SE3423ee,asfdsffdfg,ERTtryWT,2012-01-14 
23:04:05.123456749,2012-01-20
+TRUE,114,4520,3430,200430, 
121.5,4.99,2.44,RE3423ee,asfdsffdfg,4RTETRWT,2012-01-01 
23:04:05.123456819,2012-01-20
+FALSE,123,454,3040,20, 
121.5,4.99,2.44,RE3423ee,asfrewerfg,6RTETRWT,2012-01-02 
23:04:05.123456829,2012-01-20
+TRUE,11,4530,340,20, 
121.5,4.99,2.44,SE3423ee,asfdsffder,TRTETRWT,2012-01-03 
05:04:05.123456839,2012-01-20
+TRUE,14,4590,3000400,20, 
121.5,4.99,2.44,ASD423ee,asfertfdfg,HRTETRWT,2012-01-04 
05:04:05.123456849,2012-01-20
+FALSE,41,4250,0,20, 
121.5,4.99,2.44,SAD423ee,asrtsffdfg,HRTETRWT,2012-01-05 
05:04:05.123456859,2012-01-20
+TRUE,13,4510,30400,20, 
121.5,4.99,2.44,DE3423ee,asfrtffdfg,YHTETRWT,2012-01-06 
06:04:05.123456869,2012-01-20

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/IUD/comp1.csv
--
diff --git a/integration/spark-common-test/src/test/resources/IUD/comp1.csv 
b/integration/spark-common-test/src/test/resources/IUD/comp1.csv
new file mode 100644
index 000..9738e06
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/IUD/comp1.csv
@@ -0,0 +1,11 @@
+c1,c2,c3,c5
+a,1,aa,aaa
+b,2,bb,bbb
+c,3,cc,ccc
+d,4,dd,ddd
+e,5,ee,eee
+f,6,ff,fff
+g,7,gg,ggg
+h,8,hh,hhh
+i,9,ii,iii
+j,10,jj,jjj

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/IUD/comp2.csv
--
diff --git a/integration/spark-common-test/src/test/resources/IUD/comp2.csv 
b/integration/spark-common-test/src/test/resources/IUD/comp2.csv
new file mode 100644
index 000..5a28d5c
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/IUD/comp2.csv
@@ -0,0 +1,11 @@
+c1,c2,c3,c5
+a,11,aa,aaa
+b,12,bb,bbb
+c,13,cc,ccc
+d,14,dd,ddd
+e,15,ee,eee
+f,16,ff,fff
+g,17,gg,ggg
+h,18,hh,hhh
+i,19,ii,iii
+j,20,jj,jjj

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/IUD/comp3.csv
--
diff --git a/integration/spark-common-test/src/test/resources/IUD/comp3.csv 
b/integration/spark-common-test/src/test/resources/IUD/comp3.csv
new file mode 100644
index 000..a555f71
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/IUD/comp3.csv
@@ -0,0 +1,11 @@
+c1,c2,c3,c5
+a,21,aa,aaa
+b,22,bb,bbb
+c,23,cc,ccc
+d,24,dd,ddd
+e,25,ee,eee
+f,26,ff,fff
+g,27,gg,ggg
+h,28,hh,hhh
+i,29,ii,iii
+j,30,jj,jjj

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/IUD/comp4.csv
--
diff --git a/integration/spark-common-test/src/test/resources/IUD/comp4.csv 
b/integration/spark-common-test/src/test/resources/IUD/comp4.csv
new file mode 100644
index 000..0450a19
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/IUD/comp4.csv
@@ -0,0 +1,11 @@
+c1,c2,c3,c5
+a,31,aa,aaa
+b,32,bb,bbb
+c,33,cc,ccc
+d,34,dd,ddd
+e,35,ee,eee
+f,36,ff,fff
+g,37,gg,ggg
+h,38,hh,hhh
+i,39,ii,iii
+j,40,jj,jjj

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/IUD/dest.csv
--
diff --git a/integration/spark-common-test/src/test/resources/IUD/dest.csv 
b/integration/spark-common-test/src/test/resources/IUD/dest.csv
new file mode 100644
index 000..4ef9aa5
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/IUD/dest.csv
@@ -0,0 +1,6 @@
+c1,c2,c3,c5
+a,1,aa,aaa
+b,2,bb,bbb
+c,3,cc,ccc
+d,4,dd,ddd
+e,5,ee,eee
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a

[36/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/100_olap.csv
--
diff --git a/integration/spark-common-test/src/test/resources/100_olap.csv 
b/integration/spark-common-test/src/test/resources/100_olap.csv
new file mode 100644
index 000..cd51615
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/100_olap.csv
@@ -0,0 +1,99 @@
+1AA1,1,MAC,2Device Color,9After the device shell color,109,1Market name,8RAM 
size,4ROM size,0CPU Audit,6CUP Clocked,7Series,2015-7-1 12:07:28,1,9Internal 
models,2015-7-1 12:07:06,4,guomei,7,Chinese,Hubei 
Province,yichang,yichang,yichang,1,5281803,2015-7-1 
12:08:40,2,Chinese,Guangdong Province,guangzhou,longhua,mingzhi,1,1,EMUI 
Release,Operating System Version,Background version number,3Background Flash 
version,4webUI Version number,9webUI Types of operators version,4web Type data 
card version number,3Operators version,Phone PAD Partitioned 
versions,2015,7,1,12,2,Chinese,Guangdong 
Province,guangzhou,longhua,mingzhi,1,EMUI Release,Operating System 
Version,Background version number,3Background Flash version,4webUI Version 
number,9webUI Types of operators version,4web Type data card version 
number,3Operators version,Phone PAD Partitioned versions,1,2738.562,Site 
Name2738
+1AA10,10,MAC,0Device Color,5After the device shell color,93,6Market name,4RAM 
size,1ROM size,7CPU Audit,1CUP Clocked,7Series,2015-7-1 12:07:28,10,2Internal 
models,2015-7-1 12:07:06,4,guomei,7,Chinese,Hubei 
Province,yichang,yichang,yichang,10,6805600,2015-7-1 12:08:40,6,Chinese,Hubei 
Province,wuhan,hongshan,hongshan,10,10,EMUI Release,Operating System 
Version,Background version number,4Background Flash version,9webUI Version 
number,6webUI Types of operators version,8web Type data card version 
number,9Operators version,Phone PAD Partitioned 
versions,2015,7,1,12,4,Chinese,Hunan 
Province,xiangtan,xiangtan,jianshelu,10,EMUI Release,Operating System 
Version,Background version number,4Background Flash version,9webUI Version 
number,6webUI Types of operators version,8web Type data card version 
number,9Operators version,Phone PAD Partitioned versions,10,1714.635,Site 
Name1714
+1AA100,100,MAC,1Device Color,9After the device shell color,2591,4Market 
name,7RAM size,2ROM size,0CPU Audit,5CUP Clocked,5Series,2015-7-1 
12:07:28,100,0Internal models,2015-7-1 12:07:06,6,yidong,4,Chinese,Hunan 
Province,xiangtan,xiangtan,jianshelu,100,8231335,2015-7-1 
12:08:40,7,Chinese,Hubei Province,yichang,yichang,yichang,100,100,EMUI 
Release,Operating System Version,Background version number,0Background Flash 
version,7webUI Version number,0webUI Types of operators version,2web Type data 
card version number,3Operators version,Phone PAD Partitioned 
versions,2015,7,1,12,7,Chinese,Hubei Province,yichang,yichang,yichang,100,EMUI 
Release,Operating System Version,Background version number,0Background Flash 
version,7webUI Version number,0webUI Types of operators version,2web Type data 
card version number,3Operators version,Phone PAD Partitioned 
versions,100,1271,Site Name1271
+1AA1000,1000,MAC,2Device Color,8After the device shell color,2531,9Market 
name,5RAM size,2ROM size,1CPU Audit,9CUP Clocked,5Series,2015-7-1 
12:07:28,1000,7Internal models,2015-7-1 12:07:06,3,shuling,6,Chinese,Hubei 
Province,wuhan,hongshan,hongshan,1000,8978765,2015-7-1 12:08:40,3,Chinese,Hunan 
Province,changsha,yuhua,shazitang,1000,1000,EMUI Release,Operating System 
Version,Background version number,5Background Flash version,0webUI Version 
number,9webUI Types of operators version,9web Type data card version 
number,2Operators version,Phone PAD Partitioned 
versions,2015,7,1,12,7,Chinese,Hubei Province,yichang,yichang,yichang,1000,EMUI 
Release,Operating System Version,Background version number,5Background Flash 
version,0webUI Version number,9webUI Types of operators version,9web Type data 
card version number,2Operators version,Phone PAD Partitioned 
versions,1000,692,Site Name692
+1AA1,1,MAC,0Device Color,1After the device shell color,2408,8Market 
name,1RAM size,0ROM size,3CPU Audit,1CUP Clocked,7Series,2015-7-1 
12:07:28,1,4Internal models,2015-7-1 12:07:06,1,taobao,2,Chinese,Guangdong 
Province,guangzhou,longhua,mingzhi,1,3784858,2015-7-1 
12:08:40,4,Chinese,Hunan Province,xiangtan,xiangtan,jianshelu,1,1,EMUI 
Release,Operating System Version,Background version number,2Background Flash 
version,7webUI Version number,5webUI Types of operators version,5web Type data 
card version number,0Operators version,Phone PAD Partitioned 
versions,2015,7,1,12,7,Chinese,Hubei 
Province,yichang,yichang,yichang,1,EMUI Release,Operating System 
Version,Background version number,2Background Flash version,7webUI Version 
number,5webUI Types of operators version,5web Type data card version 
number,0Operators version,Phone PAD Partitioned versions,1,2175,Site 
Name2175
+

[15/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/datawithcomplexspecialchar.csv
--
diff --git 
a/integration/spark/src/test/resources/datawithcomplexspecialchar.csv 
b/integration/spark/src/test/resources/datawithcomplexspecialchar.csv
deleted file mode 100644
index b5d1beb..000
--- a/integration/spark/src/test/resources/datawithcomplexspecialchar.csv
+++ /dev/null
@@ -1,151 +0,0 @@
-customer_id,124_string_level_province,numeric_level,date_level,Time_level,account_num,lname,fname,mi,address1,address2,address3,address4,city,country,customer_region_id,phone1,phone2,marital_status,yearly_income,gender,total_children,num_children_at_home,education,member_card,occupation,houseowner,num_cars_owned,fullname
-1,Oaxaca,9535837227,26-08-1961,08:08:08,87462024688,Nowmer,Sheri,A.,2433 
Bailey RoadTlaxiaco,Mexico,30,271-555-9715,119-555-1969,M,$30K - 
$50K,F,4,2,Partial High School,Bronze,Skilled Manual,Y,4,Sheri Nowmer
-2,https://www.google.co.in/?gws_rd=cr&ei=BF7rUqOoEc6GrgeIooHQDQ#q=India+state 
BC,1.51E+09,15-08-1947,07:14:54,87470586299,Whelply,Derrick,I.,2219 Dewing 
AvenueSooke,Canada,101,211-555-7669,807-555-9033,S,$70K - 
$90K,M,1,0,Partial High School,Bronze,Professional,N,3,Derrick Whelply
-3,&&  1 2 3 4 5 special set to test &  & &  _ _ _  
786,1,01-01-1900,23:59:59,87475757600,Derry,Jeanne,,7640 First 
Ave.Issaquah,USA,21,656-555-2272,221-555-2493,M,$50K - $70K,F,1,1,Bachelors 
Degree,Bronze,Professional,Y,2,Jeanne Derry
-4,https://www.google.co.in/?gws_rd=cr&ei=BF7rUqOoEc6GrgeIooHQDQ#q=India+state 
BC,1.51E+09,15-08-1947,07:14:54,87500482201,Spence,Michael,J.,337 Tosca 
WayBurnaby,Canada,92,929-555-7279,272-555-2844,M,$10K - $30K,M,4,4,Partial 
High School,Normal,Skilled Manual,N,2,Michael Spence
-5,SpecialCharacter level ~!@#$%^&*()_+| 
CA,0,10-05-1951,00:00:00,87514054179,Gutierrez,Maya,,8668 Via 
NerudaNovato,USA,42,387-555-7172,260-555-6936,S,$30K - $50K,F,3,0,Partial 
College,Silver,Manual,N,3,Maya Gutierrez
-6,&&  1 2 3 4 5 special set to test &  & &  _ _ _  
786,1,01-01-1900,23:59:59,87517782449,Damstra,Robert,F.,1619 Stillman 
CourtLynnwood,USA,75,922-555-5465,333-555-5915,S,$70K - 
$90K,F,3,0,Bachelors Degree,Bronze,Professional,Y,3,Robert Damstra
-7,Oaxaca,9535837227,26-08-1961,08:08:08,87521172800,Kanagaki,Rebecca,,2860 D 
Mt. Hood CircleTlaxiaco,Mexico,30,515-555-6247,934-555-9211,M,$30K - 
$50K,F,2,1,Partial High School,Bronze,Manual,Y,3,Rebecca Kanagaki
-8,"esc  !@~##%%&**(*&((*()()*  ""  some thing ""  ' DF 
",0,10-05-1951,00:00:00,87539744377,Brunner,Kim,H.,6064 Brodia CourtSan 
Andres,Mexico,106,411-555-6825,130-555-6818,M,$50K - $70K,M,2,2,Bachelors 
Degree,Bronze,Professional,Y,3,Kim Brunner
-9,https://www.google.co.in/?gws_rd=cr&ei=BF7rUqOoEc6GrgeIooHQDQ#q=India+state 
BC,1.51E+09,15-08-1947,07:14:54,87544797658,Blumberg,Brenda,C.,7560 Trees 
DriveRichmond,Canada,90,815-555-3975,642-555-6483,M,$10K - 
$30K,M,5,3,Partial High School,Normal,Skilled Manual,Y,1,Brenda Blumberg
-10,1 2 3 4 5 6 7 8 9 0 ~!@#$%^&*() some thing long 
OR,0,10-05-1951,00:00:00,87568712234,Stanz,Darren,M.,1019 Kenwal Rd.Lake 
Oswego,USA,64,847-555-5443,212-555-8635,S,$30K - $50K,M,4,0,Bachelors 
Degree,Golden,Management,N,4,Darren Stanz
-11,SpecialCharacter level ~!@#$%^&*()_+| 
CA,0,10-05-1951,00:00:00,87572821378,Murraiin,Jonathan,V.,5423 Camby Rd.La 
Mesa,USA,11,612-555-4878,747-555-6928,S,$50K - $70K,M,4,0,High School 
Degree,Bronze,Manual,N,2,Jonathan Murraiin
-12,SpecialCharacter level ~!@#$%^&*()_+| 
CA,0,10-05-1951,00:00:00,87579237222,Creek,Jewel,C.,1792 Belmont Rd.Chula 
Vista,USA,13,555-555-2714,228-555-5450,S,$30K - $50K,F,1,0,High School 
Degree,Bronze,Skilled Manual,N,3,Jewel Creek
-13,LonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmembernameLonglevelmember,0,10-05-1951,00:00:00,87587122917,Medina,Peggy,A.,3796
 Keller RidgeMexico City,Mexico,2,343-555-9778,785-555-2371,S,$30K - 
$50K,M,4,0,High School Degree,Bronze,Manual,N,4,Peggy Medina
-14,SpecialCharacter level ~!@#$%^&*()_+| 
CA,0,10-05-1951,00:00:00,87592626810,Rutledge,Bryan,K.,3074 Ardith 
DriveLincoln Acres,USA,10,659-555-3160,640-555-5439,M,$50K - 
$70K,F,2,2,Bachelors Degree,Bronze,Management,Y,2,Bryan Rutledge
-15,https://www.google.co.in/?gws_rd=cr&ei=BF7rUqOoEc6GrgeIooHQDQ#q=India+state 
BC,1.51E+09,15-08-1947,07:14:54,87597749829,Cavestany,Walter,G.,7987 Seawind 
Dr.Oak Bay,Canada,99,471-555-8853,560-555-4646,S,$90K - 
$110K,M,3,0,Graduate Degree,Bronze,Professional,Y,3,Walter Cavestany
-16,select * from 
scenario1_84gsinglecsv_updatedcolumns_test3;,1.00E+14,02-10-1969,20:20:20,87603285908,Planck,Peggy,M.,4864
 San CarlosCamacho,Mexico,27,698-555-7603,98

[18/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/IUD/T_Hive1.csv
--
diff --git a/integration/spark/src/test/resources/IUD/T_Hive1.csv 
b/integration/spark/src/test/resources/IUD/T_Hive1.csv
deleted file mode 100644
index 418ae5c..000
--- a/integration/spark/src/test/resources/IUD/T_Hive1.csv
+++ /dev/null
@@ -1,10 +0,0 @@
-TRUE,1,450,304034400,2034343000, 
121.5,4.99,2.44,SE3423ee,asfdsffdfg,ERTETRWT,2012-01-11 
03:04:05.123456719,2012-01-20
-TRUE,2,423,3046340,2003454300, 
121.5,4.99,2.44,SE3423ee,asfdsffdfg,EtryTRWT,2012-01-12 
03:14:05.123456729,2012-01-20
-TRUE,3,453,3003445,203450, 
121.5,4.99,2.44,SE3423ee,asfdsffdfg,ERTEerWT,2012-01-13 
03:24:05.123456739,2012-01-20
-TRUE,4,4350,3044364,20, 
121.5,4.99,2.44,SE3423ee,asfdsffdfg,ERTtryWT,2012-01-14 
23:04:05.123456749,2012-01-20
-TRUE,114,4520,3430,200430, 
121.5,4.99,2.44,RE3423ee,asfdsffdfg,4RTETRWT,2012-01-01 
23:04:05.123456819,2012-01-20
-FALSE,123,454,3040,20, 
121.5,4.99,2.44,RE3423ee,asfrewerfg,6RTETRWT,2012-01-02 
23:04:05.123456829,2012-01-20
-TRUE,11,4530,340,20, 
121.5,4.99,2.44,SE3423ee,asfdsffder,TRTETRWT,2012-01-03 
05:04:05.123456839,2012-01-20
-TRUE,14,4590,3000400,20, 
121.5,4.99,2.44,ASD423ee,asfertfdfg,HRTETRWT,2012-01-04 
05:04:05.123456849,2012-01-20
-FALSE,41,4250,0,20, 
121.5,4.99,2.44,SAD423ee,asrtsffdfg,HRTETRWT,2012-01-05 
05:04:05.123456859,2012-01-20
-TRUE,13,4510,30400,20, 
121.5,4.99,2.44,DE3423ee,asfrtffdfg,YHTETRWT,2012-01-06 
06:04:05.123456869,2012-01-20

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/IUD/comp1.csv
--
diff --git a/integration/spark/src/test/resources/IUD/comp1.csv 
b/integration/spark/src/test/resources/IUD/comp1.csv
deleted file mode 100644
index 9738e06..000
--- a/integration/spark/src/test/resources/IUD/comp1.csv
+++ /dev/null
@@ -1,11 +0,0 @@
-c1,c2,c3,c5
-a,1,aa,aaa
-b,2,bb,bbb
-c,3,cc,ccc
-d,4,dd,ddd
-e,5,ee,eee
-f,6,ff,fff
-g,7,gg,ggg
-h,8,hh,hhh
-i,9,ii,iii
-j,10,jj,jjj

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/IUD/comp2.csv
--
diff --git a/integration/spark/src/test/resources/IUD/comp2.csv 
b/integration/spark/src/test/resources/IUD/comp2.csv
deleted file mode 100644
index 5a28d5c..000
--- a/integration/spark/src/test/resources/IUD/comp2.csv
+++ /dev/null
@@ -1,11 +0,0 @@
-c1,c2,c3,c5
-a,11,aa,aaa
-b,12,bb,bbb
-c,13,cc,ccc
-d,14,dd,ddd
-e,15,ee,eee
-f,16,ff,fff
-g,17,gg,ggg
-h,18,hh,hhh
-i,19,ii,iii
-j,20,jj,jjj

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/IUD/comp3.csv
--
diff --git a/integration/spark/src/test/resources/IUD/comp3.csv 
b/integration/spark/src/test/resources/IUD/comp3.csv
deleted file mode 100644
index a555f71..000
--- a/integration/spark/src/test/resources/IUD/comp3.csv
+++ /dev/null
@@ -1,11 +0,0 @@
-c1,c2,c3,c5
-a,21,aa,aaa
-b,22,bb,bbb
-c,23,cc,ccc
-d,24,dd,ddd
-e,25,ee,eee
-f,26,ff,fff
-g,27,gg,ggg
-h,28,hh,hhh
-i,29,ii,iii
-j,30,jj,jjj

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/IUD/comp4.csv
--
diff --git a/integration/spark/src/test/resources/IUD/comp4.csv 
b/integration/spark/src/test/resources/IUD/comp4.csv
deleted file mode 100644
index 0450a19..000
--- a/integration/spark/src/test/resources/IUD/comp4.csv
+++ /dev/null
@@ -1,11 +0,0 @@
-c1,c2,c3,c5
-a,31,aa,aaa
-b,32,bb,bbb
-c,33,cc,ccc
-d,34,dd,ddd
-e,35,ee,eee
-f,36,ff,fff
-g,37,gg,ggg
-h,38,hh,hhh
-i,39,ii,iii
-j,40,jj,jjj

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/IUD/dest.csv
--
diff --git a/integration/spark/src/test/resources/IUD/dest.csv 
b/integration/spark/src/test/resources/IUD/dest.csv
deleted file mode 100644
index 4ef9aa5..000
--- a/integration/spark/src/test/resources/IUD/dest.csv
+++ /dev/null
@@ -1,6 +0,0 @@
-c1,c2,c3,c5
-a,1,aa,aaa
-b,2,bb,bbb
-c,3,cc,ccc
-d,4,dd,ddd
-e,5,ee,eee
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/IUD/other.csv
--
diff --git a/integration/spark/src/test/resources/IUD/other.csv 
b/integration/spark/src/test/resources/IUD/other.csv
deleted file mod

[09/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
--
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
index 18ebea1..1fde335 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
@@ -18,8 +18,6 @@
  */
 package org.apache.carbondata.spark.testsuite.allqueries
 
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.CarbonHiveContext._
 import org.apache.spark.sql.common.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
@@ -30,7 +28,7 @@ class InsertIntoCarbonTableTestCase extends QueryTest with 
BeforeAndAfterAll {
   override def beforeAll {
 sql("drop table if exists THive")
 sql("create table THive (imei string,deviceInformationId int,MAC 
string,deviceColor string,device_backColor string,modelId string,marketName 
string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series 
string,productionDate timestamp,bomCode string,internalModels string, 
deliveryTime string, channelsId string, channelsName string , deliveryAreaId 
string, deliveryCountry string, deliveryProvince string, deliveryCity 
string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, 
ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, 
ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet 
string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion 
string, Active_operaSysVersion string, Active_BacVerNumber string, 
Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer 
string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, 
Active_phonePADPartitionedVersions st
 ring, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), 
Latest_HOUR string, Latest_areaId string, Latest_country string, 
Latest_province string, Latest_city string, Latest_district string, 
Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, 
Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer 
string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, 
Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, 
Latest_phonePADPartitionedVersions string, Latest_operatorId string, 
gamePointDescription string,gamePointId double,contractNumber BigInt) ROW 
FORMAT DELIMITED FIELDS TERMINATED BY ','")
-sql("LOAD DATA local INPATH './src/test/resources/100_olap.csv' INTO TABLE 
THive")
+sql(s"LOAD DATA local INPATH '$resourcesPath/100_olap.csv' INTO TABLE 
THive")
   }
   test("insert from hive") {
 val timeStampPropOrig = 
CarbonProperties.getInstance().getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT)
@@ -63,7 +61,7 @@ class InsertIntoCarbonTableTestCase extends QueryTest with 
BeforeAndAfterAll {
  sql("drop table if exists TCarbonSource")
  sql("drop table if exists TCarbon")
  sql("create table TCarbonSource (imei string,deviceInformationId int,MAC 
string,deviceColor string,device_backColor string,modelId string,marketName 
string,AMSize string,ROMSize string,CUPAudit string,CPIClocked string,series 
string,productionDate timestamp,bomCode string,internalModels string, 
deliveryTime string, channelsId string, channelsName string , deliveryAreaId 
string, deliveryCountry string, deliveryProvince string, deliveryCity 
string,deliveryDistrict string, deliveryStreet string, oxSingleNumber string, 
ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, 
ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet 
string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion 
string, Active_operaSysVersion string, Active_BacVerNumber string, 
Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer 
string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, 
Active_phonePADPartitionedVe
 rsions string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), 
Latest_HOUR string, Latest_areaId string, Latest_country string, 
Latest_province string, Latest_city string, Latest_district string, 
Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, 
Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer 
string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, 
Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, 
Latest_phonePADPartitionedVersions 

[11/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
--
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
index 3630b61..5dab2d8 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestComplexTypeQuery.scala
@@ -19,13 +19,8 @@
 
 package org.apache.carbondata.integration.spark.testsuite.complexType
 
-import java.io.File
-
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
 import org.apache.spark.sql.Row
-import org.apache.carbondata.core.carbon.CarbonTableIdentifier
-import 
org.apache.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension
+import org.apache.spark.sql.common.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
 /**
@@ -43,29 +38,29 @@ class TestComplexTypeQuery extends QueryTest with 
BeforeAndAfterAll {
  sql("drop table if exists structusingarraycarbon").show
  sql("drop table if exists structusingarrayhive").show
  sql("create table complexcarbontable(deviceInformationId int, channelsId 
string, ROMSize string, ROMName String, purchasedate string, mobile 
struct, MAC array, locationinfo 
array>, proddate 
struct>, gamePointId 
double,contractNumber double)  STORED BY 'org.apache.carbondata.format'  
TBLPROPERTIES ('DICTIONARY_INCLUDE'='deviceInformationId', 
'DICTIONARY_EXCLUDE'='channelsId','COLUMN_GROUP'='(ROMSize,ROMName)')");
- sql("LOAD DATA local inpath './src/test/resources/complextypesample.csv' 
INTO table complexcarbontable  OPTIONS('DELIMITER'=',', 'QUOTECHAR'='\"', 
'FILEHEADER'='deviceInformationId,channelsId,ROMSize,ROMName,purchasedate,mobile,MAC,locationinfo,proddate,gamePointId,contractNumber',
 'COMPLEX_DELIMITER_LEVEL_1'='$', 'COMPLEX_DELIMITER_LEVEL_2'=':')");
+ sql("LOAD DATA local inpath '" + resourcesPath + "/complextypesample.csv' 
INTO table complexcarbontable  OPTIONS('DELIMITER'=',', 'QUOTECHAR'='\"', 
'FILEHEADER'='deviceInformationId,channelsId,ROMSize,ROMName,purchasedate,mobile,MAC,locationinfo,proddate,gamePointId,contractNumber',
 'COMPLEX_DELIMITER_LEVEL_1'='$', 'COMPLEX_DELIMITER_LEVEL_2'=':')");
  sql("create table complexhivetable(deviceInformationId int, channelsId 
string, ROMSize string, ROMName String, purchasedate string, mobile 
struct, MAC array, locationinfo 
array>, proddate 
struct>, gamePointId 
double,contractNumber double)row format delimited fields terminated by ',' 
collection items terminated by '$' map keys terminated by ':'")
- sql("LOAD DATA local inpath './src/test/resources/complextypesample.csv' 
INTO table complexhivetable");
+ sql(s"LOAD DATA local inpath '$resourcesPath/complextypesample.csv' INTO 
table complexhivetable");
  sql("create table complex_filter(test1 int, test2 array,test3 
array,test4 array,test5 array,test6 
array,test7 array) STORED BY 'org.apache.carbondata.format'")
- sql("LOAD DATA INPATH './src/test/resources/array1.csv'  INTO TABLE 
complex_filter options ('DELIMITER'=',', 'QUOTECHAR'='\"', 
'COMPLEX_DELIMITER_LEVEL_1'='$', 'FILEHEADER'= 
'test1,test2,test3,test4,test5,test6,test7')").show()
+ sql("LOAD DATA INPATH '" + resourcesPath + "/array1.csv'  INTO TABLE 
complex_filter options ('DELIMITER'=',', 'QUOTECHAR'='\"', 
'COMPLEX_DELIMITER_LEVEL_1'='$', 'FILEHEADER'= 
'test1,test2,test3,test4,test5,test6,test7')").show()
  
  sql("create table structusingarraycarbon (MAC 
struct,ActiveCountry:array>) STORED BY 
'org.apache.carbondata.format'");
- sql("LOAD DATA local INPATH './src/test/resources/struct_all.csv' INTO 
table structusingarraycarbon options ('DELIMITER'=',', 'QUOTECHAR'='\"', 
'FILEHEADER'='MAC','COMPLEX_DELIMITER_LEVEL_1'='$','COMPLEX_DELIMITER_LEVEL_2'='&')")
+ sql("LOAD DATA local INPATH '" + resourcesPath + "/struct_all.csv' INTO 
table structusingarraycarbon options ('DELIMITER'=',', 'QUOTECHAR'='\"', 
'FILEHEADER'='MAC','COMPLEX_DELIMITER_LEVEL_1'='$','COMPLEX_DELIMITER_LEVEL_2'='&')")
  sql("create table structusingarrayhive (MAC 
struct,ActiveCountry:array>)row format delimited 
fields terminated by ',' collection items terminated by '$' map keys terminated 
by '&'");
- sql("LOAD DATA local INPATH './src/test/resources/struct_all.csv' INTO 
table structusingarrayhive") 
+ sql("LOAD DATA local INPATH '" + resourcesPath + "/struct_all.csv' INTO 
table structusingarrayhive")
  
  sql("create table structusingstructCarbon(name struct

[13/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/join/employee.csv
--
diff --git a/integration/spark/src/test/resources/join/employee.csv 
b/integration/spark/src/test/resources/join/employee.csv
deleted file mode 100644
index c551653..000
--- a/integration/spark/src/test/resources/join/employee.csv
+++ /dev/null
@@ -1,1000 +0,0 @@
-empid695,empname595,mobile15,color2,102868
-empid147,empname160,mobile99,color7,108485
-empid69,empname72,mobile87,color9,102202
-empid324,empname147,mobile87,color1,101000
-empid215,empname752,mobile47,color5,109786
-empid171,empname309,mobile34,color8,105698
-empid714,empname845,mobile21,color1,103623
-empid566,empname978,mobile88,color3,107882
-empid561,empname391,mobile79,color0,107908
-empid219,empname962,mobile80,color8,105802
-empid755,empname234,mobile41,color5,103068
-empid125,empname305,mobile10,color4,101992
-empid751,empname165,mobile26,color0,108878
-empid864,empname646,mobile88,color8,109367
-empid981,empname273,mobile88,color5,108040
-empid957,empname752,mobile63,color9,105241
-empid292,empname709,mobile88,color0,101106
-empid500,empname846,mobile67,color6,101204
-empid187,empname513,mobile1,color7,105432
-empid11,empname1,mobile26,color8,102510
-empid59,empname837,mobile76,color1,103627
-empid700,empname535,mobile77,color0,108892
-empid558,empname30,mobile36,color5,101414
-empid584,empname29,mobile18,color9,105615
-empid811,empname447,mobile80,color1,106837
-empid285,empname650,mobile66,color1,103265
-empid262,empname521,mobile42,color5,105449
-empid594,empname581,mobile2,color2,109862
-empid232,empname227,mobile3,color4,103680
-empid766,empname279,mobile13,color4,102405
-empid903,empname682,mobile22,color8,103338
-empid648,empname347,mobile47,color5,10600
-empid323,empname330,mobile55,color8,10163
-empid418,empname799,mobile16,color4,108136
-empid863,empname185,mobile26,color2,105223
-empid145,empname365,mobile95,color4,101611
-empid897,empname997,mobile10,color3,10814
-empid180,empname946,mobile69,color4,109870
-empid419,empname981,mobile46,color8,105424
-empid610,empname354,mobile79,color3,102553
-empid840,empname796,mobile98,color4,109111
-empid735,empname881,mobile2,color1,106732
-empid1,empname628,mobile30,color0,109646
-empid235,empname717,mobile88,color7,101120
-empid374,empname922,mobile58,color2,105478
-empid145,empname10,mobile77,color7,107407
-empid289,empname377,mobile95,color6,106291
-empid812,empname164,mobile86,color6,10515
-empid876,empname974,mobile61,color9,106524
-empid820,empname862,mobile34,color5,106506
-empid372,empname379,mobile47,color8,106829
-empid337,empname52,mobile65,color6,107040
-empid656,empname420,mobile34,color0,104734
-empid665,empname384,mobile21,color2,106826
-empid78,empname321,mobile38,color6,109877
-empid639,empname346,mobile85,color6,109262
-empid640,empname810,mobile29,color0,104336
-empid985,empname188,mobile83,color8,101831
-empid665,empname900,mobile50,color9,10468
-empid298,empname264,mobile29,color5,105059
-empid154,empname655,mobile76,color7,101820
-empid329,empname385,mobile61,color6,109113
-empid223,empname973,mobile63,color8,101940
-empid540,empname962,mobile76,color1,101008
-empid745,empname221,mobile90,color3,10625
-empid665,empname828,mobile77,color9,10820
-empid424,empname422,mobile52,color2,103268
-empid484,empname788,mobile97,color9,108554
-empid889,empname159,mobile63,color1,104392
-empid353,empname11,mobile38,color3,105506
-empid636,empname93,mobile70,color4,102070
-empid9,empname754,mobile50,color5,103958
-empid11,empname670,mobile22,color6,107570
-empid248,empname931,mobile47,color1,102316
-empid520,empname146,mobile70,color6,106306
-empid2,empname27,mobile32,color9,103648
-empid629,empname790,mobile91,color8,102926
-empid27,empname911,mobile84,color1,104885
-empid405,empname601,mobile59,color8,105150
-empid291,empname747,mobile22,color5,106151
-empid553,empname277,mobile40,color5,102535
-empid927,empname125,mobile46,color0,106930
-empid527,empname595,mobile54,color5,109349
-empid334,empname732,mobile63,color0,109304
-empid965,empname459,mobile18,color4,102777
-empid3,empname38,mobile20,color5,102073
-empid865,empname839,mobile6,color7,101427
-empid579,empname704,mobile1,color0,101830
-empid979,empname756,mobile42,color3,107620
-empid956,empname18,mobile74,color5,102454
-empid821,empname126,mobile92,color3,102659
-empid845,empname862,mobile76,color0,10771
-empid550,empname827,mobile34,color1,107161
-empid350,empname110,mobile94,color0,103892
-empid433,empname140,mobile79,color4,10175
-empid851,empname214,mobile16,color5,103486
-empid813,empname817,mobile2,color2,103601
-empid20,empname604,mobile33,color3,105038
-empid638,empname298,mobile97,color2,104103
-empid936,empname522,mobile73,color0,109897
-empid780,empname939,mobile27,color6,10109
-empid613,empname876,mobile97,color7,107794
-empid641,empname105,mobile2,color7,10798
-empid928,empname20,mobile1,color3,106581
-em

[22/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
--
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
new file mode 100644
index 000..9596647
--- /dev/null
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.spark.testsuite.directdictionary
+
+import java.sql.Timestamp
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.common.util.QueryTest
+import org.apache.spark.sql.hive.HiveContext
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import 
org.apache.carbondata.core.keygenerator.directdictionary.timestamp.TimeStampGranularityConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
+/**
+  * Test Class for detailed query on timestamp datatypes
+  *
+  *
+  */
+class TimestampDataTypeDirectDictionaryTest extends QueryTest with 
BeforeAndAfterAll {
+  var hiveContext: HiveContext = _
+
+  override def beforeAll {
+try {
+  CarbonProperties.getInstance()
+.addProperty(TimeStampGranularityConstants.CARBON_CUTOFF_TIMESTAMP, 
"2000-12-13 02:10.00.0")
+  CarbonProperties.getInstance()
+.addProperty(TimeStampGranularityConstants.CARBON_TIME_GRANULARITY,
+  TimeStampGranularityConstants.TIME_GRAN_SEC.toString
+)
+  CarbonProperties.getInstance().addProperty("carbon.direct.dictionary", 
"true")
+  sql("drop table if exists directDictionaryTable")
+  sql("drop table if exists directDictionaryTable_hive")
+  sql(
+"CREATE TABLE if not exists directDictionaryTable (empno int,doj 
Timestamp, salary int) " +
+  "STORED BY 'org.apache.carbondata.format'"
+  )
+
+  sql(
+"CREATE TABLE if not exists directDictionaryTable_hive (empno int,doj 
Timestamp, salary int) " +
+  "row format delimited fields terminated by ','"
+  )
+
+  CarbonProperties.getInstance()
+.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, 
"-MM-dd HH:mm:ss")
+  val csvFilePath = s"$resourcesPath/datasample.csv"
+  sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE 
directDictionaryTable OPTIONS" +
+"('DELIMITER'= ',', 'QUOTECHAR'= '\"')")
+  sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE 
directDictionaryTable_hive")
+} catch {
+  case x: Throwable => CarbonProperties.getInstance()
+.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, 
"dd-MM-")
+}
+  }
+
+  test("test direct dictionary for not null condition") {
+checkAnswer(
+  sql("select doj from directDictionaryTable where doj is not null"),
+  Seq(Row(Timestamp.valueOf("2016-03-14 15:00:09.0")),
+Row(Timestamp.valueOf("2016-04-14 15:00:09.0"))
+  )
+)
+  }
+
+  test("test direct dictionary for getting all the values") {
+checkAnswer(
+  sql("select doj from directDictionaryTable"),
+  Seq(Row(Timestamp.valueOf("2016-03-14 15:00:09.0")),
+Row(Timestamp.valueOf("2016-04-14 15:00:09.0")),
+Row(null)
+  )
+)
+  }
+
+  test("test direct dictionary for not equals condition") {
+checkAnswer(
+  sql("select doj from directDictionaryTable where doj != '2016-04-14 
15:00:09'"),
+  Seq(Row(Timestamp.valueOf("2016-03-14 15:00:09"))
+  )
+)
+  }
+
+  test("test direct dictionary for null condition") {
+checkAnswer(
+  sql("select doj from directDictionaryTable where doj is null"),
+  Seq(Row(null)
+  )
+)
+  }
+
+  test("select doj from directDictionaryTable wit

[12/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/sample.csv.gz
--
diff --git a/integration/spark/src/test/resources/sample.csv.gz 
b/integration/spark/src/test/resources/sample.csv.gz
deleted file mode 100644
index 80513b8..000
Binary files a/integration/spark/src/test/resources/sample.csv.gz and /dev/null 
differ

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/sampleComplex.csv
--
diff --git a/integration/spark/src/test/resources/sampleComplex.csv 
b/integration/spark/src/test/resources/sampleComplex.csv
deleted file mode 100644
index 95ba5f2..000
--- a/integration/spark/src/test/resources/sampleComplex.csv
+++ /dev/null
@@ -1,4 +0,0 @@
-Id,number,name,gamePoint,mac
-1,1.5,Mark,1.2$2,3
-2,2,Twin,2.0$3,1.5
-3,3.0,Betty,5$2.0,2
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/sample_withDelimiter017.csv
--
diff --git a/integration/spark/src/test/resources/sample_withDelimiter017.csv 
b/integration/spark/src/test/resources/sample_withDelimiter017.csv
deleted file mode 100644
index c40b03a..000
--- a/integration/spark/src/test/resources/sample_withDelimiter017.csv
+++ /dev/null
@@ -1,5 +0,0 @@
-idnamecityage
-1davidshenzhen31
-2easonshenzhen27
-3jarrywuhan35
-3jarryBangalore35

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/shortolap.csv
--
diff --git a/integration/spark/src/test/resources/shortolap.csv 
b/integration/spark/src/test/resources/shortolap.csv
deleted file mode 100644
index 1c237b8..000
--- a/integration/spark/src/test/resources/shortolap.csv
+++ /dev/null
@@ -1,5 +0,0 @@
-imei0,2147,9279,100.05,100.055,2016-05-01 12:25:36,aa,11
-imei1,-2148,-9807,10.05,100.05,2016-05-02 19:25:15,bb,22
-imei2,2147,9279,100.05,100.055,2016-05-01 12:25:36,cc,33
-imei3,-217,-9206,100.005,100.05,2016-05-02 19:25:15,dd,44
-imei4,10,0,15.5,45,2016-05-02 19:25:15,ee,55
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/stringtypecube.xml
--
diff --git a/integration/spark/src/test/resources/stringtypecube.xml 
b/integration/spark/src/test/resources/stringtypecube.xml
deleted file mode 100644
index dff7ccc..000
--- a/integration/spark/src/test/resources/stringtypecube.xml
+++ /dev/null
@@ -1,55 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/struct_all.csv
--
diff --git a/integration/spark/src/test/resources/struct_all.csv 
b/integration/spark/src/test/resources/struct_all.csv
deleted file mode 100644
index 02b6387..000
--- a/integration/spark/src/test/resources/struct_all.csv
+++ /dev/null
@@ -1,4 +0,0 @@
-10&10$10&10
-20&20$20&20
-30&30$30&30
-40&40$40&40

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/structusingstruct.csv
--
diff --git a/integration/spark/src/test/resources/structusingstruct.csv 
b/integration/spark/src/test/resources/structusingstruct.csv
deleted file mode 100644
index 5a2bae1..000
--- a/integration/spark/src/test/resources/structusingstruct.csv
+++ /dev/null
@@ -1,2 +0,0 @@
-def$klm&abc$12
-pri$sac&pra$18
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/test.json
--
diff --git a/integration/spark/src/test/resources/test.json 
b/integration/spark/src/test/resources/test.json
deleted file mode 100644
index 50a859c..000
--- a/integration/spark/src/test/resources/test.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{"name":"Michael"}
-{"name":"Andy", "age":30}
-{"name":"Justin", "age":19}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/resources/timeStampFormatData1.csv
---

[28/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/sample.csv
--
diff --git a/integration/spark-common-test/src/test/resources/sample.csv 
b/integration/spark-common-test/src/test/resources/sample.csv
new file mode 100644
index 000..7c57de7
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/sample.csv
@@ -0,0 +1,5 @@
+id,name,city,age
+1,david,shenzhen,31
+2,eason,shenzhen,27
+3,jarry,wuhan,35
+3,jarry,Bangalore,35

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/sample.csv.bz2
--
diff --git a/integration/spark-common-test/src/test/resources/sample.csv.bz2 
b/integration/spark-common-test/src/test/resources/sample.csv.bz2
new file mode 100644
index 000..0c2417d
Binary files /dev/null and 
b/integration/spark-common-test/src/test/resources/sample.csv.bz2 differ

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/sample.csv.gz
--
diff --git a/integration/spark-common-test/src/test/resources/sample.csv.gz 
b/integration/spark-common-test/src/test/resources/sample.csv.gz
new file mode 100644
index 000..80513b8
Binary files /dev/null and 
b/integration/spark-common-test/src/test/resources/sample.csv.gz differ

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/sampleComplex.csv
--
diff --git a/integration/spark-common-test/src/test/resources/sampleComplex.csv 
b/integration/spark-common-test/src/test/resources/sampleComplex.csv
new file mode 100644
index 000..95ba5f2
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/sampleComplex.csv
@@ -0,0 +1,4 @@
+Id,number,name,gamePoint,mac
+1,1.5,Mark,1.2$2,3
+2,2,Twin,2.0$3,1.5
+3,3.0,Betty,5$2.0,2
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/sample_withDelimiter017.csv
--
diff --git 
a/integration/spark-common-test/src/test/resources/sample_withDelimiter017.csv 
b/integration/spark-common-test/src/test/resources/sample_withDelimiter017.csv
new file mode 100644
index 000..c40b03a
--- /dev/null
+++ 
b/integration/spark-common-test/src/test/resources/sample_withDelimiter017.csv
@@ -0,0 +1,5 @@
+idnamecityage
+1davidshenzhen31
+2easonshenzhen27
+3jarrywuhan35
+3jarryBangalore35

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/shortolap.csv
--
diff --git a/integration/spark-common-test/src/test/resources/shortolap.csv 
b/integration/spark-common-test/src/test/resources/shortolap.csv
new file mode 100644
index 000..1c237b8
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/shortolap.csv
@@ -0,0 +1,5 @@
+imei0,2147,9279,100.05,100.055,2016-05-01 12:25:36,aa,11
+imei1,-2148,-9807,10.05,100.05,2016-05-02 19:25:15,bb,22
+imei2,2147,9279,100.05,100.055,2016-05-01 12:25:36,cc,33
+imei3,-217,-9206,100.005,100.05,2016-05-02 19:25:15,dd,44
+imei4,10,0,15.5,45,2016-05-02 19:25:15,ee,55
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/stringtypecube.xml
--
diff --git 
a/integration/spark-common-test/src/test/resources/stringtypecube.xml 
b/integration/spark-common-test/src/test/resources/stringtypecube.xml
new file mode 100644
index 000..dff7ccc
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/stringtypecube.xml
@@ -0,0 +1,55 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/struct_all.csv
--
diff --git a/integration/spark-common-test/src/test/resources/struct_all.csv 
b/integration/spark-common-test/src/test/resources/struct_all.csv
new file mode 100644
index 000..02b6387
--- 

[05/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala
--
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala
deleted file mode 100644
index 344b0c4..000
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.spark.testsuite.filterexpr
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-/**
- * Test Class for filter expression query on multiple datatypes
- * @author N00902756
- *
- */
-
-class AllDataTypesTestCaseFilter extends QueryTest with BeforeAndAfterAll {
-
-  override def beforeAll {
-sql("CREATE TABLE alldatatypestableFilter (empno int, empname String, 
designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname 
String, deptno int, deptname String, projectcode int, projectjoindate 
Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int) 
STORED BY 'org.apache.carbondata.format'")
-sql("LOAD DATA local inpath './src/test/resources/data.csv' INTO TABLE 
alldatatypestableFilter OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\"')");
-
-sql("CREATE TABLE alldatatypestableFilter_hive (empno int, empname String, 
designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname 
String, deptno int, deptname String, projectcode int, projectjoindate 
Timestamp, projectenddate Timestamp,attendance int,utilization int,salary 
int)row format delimited fields terminated by ','")
-sql("LOAD DATA local inpath './src/test/resources/datawithoutheader.csv' 
INTO TABLE alldatatypestableFilter_hive");
-
-  }
-
-  test("select empno,empname,utilization,count(salary),sum(empno) from 
alldatatypestableFilter where empname in ('arvind','ayushi') group by 
empno,empname,utilization") {
-checkAnswer(
-  sql("select empno,empname,utilization,count(salary),sum(empno) from 
alldatatypestableFilter where empname in ('arvind','ayushi') group by 
empno,empname,utilization"),
-  sql("select empno,empname,utilization,count(salary),sum(empno) from 
alldatatypestableFilter_hive where empname in ('arvind','ayushi') group by 
empno,empname,utilization"))
-  }
-  
-  test("select empno,empname from alldatatypestableFilter where 
regexp_replace(workgroupcategoryname, 'er', 'ment') NOT IN ('development')") {
-checkAnswer(
-  sql("select empno,empname from alldatatypestableFilter where 
regexp_replace(workgroupcategoryname, 'er', 'ment') NOT IN ('development')"),
-  sql("select empno,empname from alldatatypestableFilter_hive where 
regexp_replace(workgroupcategoryname, 'er', 'ment') NOT IN ('development')"))
-  }
-  
-  test("select empno,empname from alldatatypescubeFilter where 
regexp_replace(workgroupcategoryname, 'er', 'ment') != 'development'") {
-checkAnswer(
-  sql("select empno,empname from alldatatypestableFilter where 
regexp_replace(workgroupcategoryname, 'er', 'ment') != 'development'"),
-  sql("select empno,empname from alldatatypestableFilter_hive where 
regexp_replace(workgroupcategoryname, 'er', 'ment') != 'development'"))
-  }
-  
-  override def afterAll {
-sql("drop table alldatatypestableFilter")
-sql("drop table alldatatypestableFilter_hive")
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/CountStarTestCase.scala
--
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/filterexpr/CountStarTestCas

[38/38] incubator-carbondata git commit: [CARBONDATA-601]reuse test case for integration module This closes #481

2017-01-07 Thread jackylk
[CARBONDATA-601]reuse test case for integration module This closes #481


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/49727a27
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/49727a27
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/49727a27

Branch: refs/heads/master
Commit: 49727a273c46b7bd8c896a2177be0c2ea14134f7
Parents: b0750c1 af2f204
Author: jackylk 
Authored: Sun Jan 8 00:35:50 2017 +0800
Committer: jackylk 
Committed: Sun Jan 8 00:35:50 2017 +0800

--
 .../scan/expression/ExpressionResult.java   |3 +
 integration/spark-common-test/pom.xml   |  232 
 .../spark/load/CarbonLoaderUtilTest.java|  419 +++
 .../validation/FileFooterValidator.java |  157 +++
 .../src/test/resources/100_olap.csv |   99 ++
 .../src/test/resources/10dim_4msr.csv   | 1000 +++
 .../src/test/resources/IUD/T_Hive1.csv  |   10 +
 .../src/test/resources/IUD/comp1.csv|   11 +
 .../src/test/resources/IUD/comp2.csv|   11 +
 .../src/test/resources/IUD/comp3.csv|   11 +
 .../src/test/resources/IUD/comp4.csv|   11 +
 .../src/test/resources/IUD/dest.csv |6 +
 .../src/test/resources/IUD/other.csv|3 +
 .../src/test/resources/IUD/sample.csv   |4 +
 .../src/test/resources/IUD/sample_updated.csv   |2 +
 .../src/test/resources/IUD/source2.csv  |3 +
 .../src/test/resources/IUD/source3.csv  |7 +
 .../src/test/resources/IUD/update01.csv |6 +
 .../src/test/resources/OLDFORMATTABLE.csv   |   34 +
 .../src/test/resources/OLDFORMATTABLEHIVE.csv   |   33 +
 .../test/resources/Test_Data1_Logrithmic.csv|3 +
 .../src/test/resources/alldatatypescube.xml |  109 ++
 .../20160423/1400_1405/complex.dictionary   |   20 +
 .../sample/20160423/1400_1405/sample.dictionary |9 +
 .../src/test/resources/array1.csv   |2 +
 .../src/test/resources/arrayColumnEmpty.csv |   21 +
 .../src/test/resources/avgTest.csv  |   16 +
 .../test/resources/badrecords/datasample.csv|7 +
 .../badrecords/emptyTimeStampValue.csv  |8 +
 .../test/resources/badrecords/emptyValues.csv   |8 +
 .../badrecords/insufficientColumns.csv  |4 +
 .../resources/badrecords/seriazableValue.csv|3 +
 .../src/test/resources/bigIntData.csv   |   14 +
 .../src/test/resources/bigIntDataWithHeader.csv |   13 +
 .../test/resources/bigIntDataWithoutHeader.csv  |   12 +
 .../src/test/resources/big_int_Decimal.csv  |3 +
 .../src/test/resources/channelsId.csv   |   10 +
 .../src/test/resources/character_carbon.csv |   33 +
 .../src/test/resources/character_hive.csv   |   32 +
 .../test/resources/columndictionary/country.csv |5 +
 .../test/resources/columndictionary/name.csv|   10 +
 .../src/test/resources/comment.csv  |5 +
 .../test/resources/compaction/compaction1.csv   |6 +
 .../compaction/compaction1_forhive.csv  |5 +
 .../test/resources/compaction/compaction2.csv   |6 +
 .../test/resources/compaction/compaction3.csv   |6 +
 .../resources/compaction/compactioncard2.csv|  257 
 .../compaction/compactioncard2_forhive.csv  |  256 
 .../src/test/resources/complexTypeDecimal.csv   |9 +
 .../test/resources/complexTypeDecimalNested.csv |9 +
 .../resources/complexTypeDecimalNestedHive.csv  |8 +
 .../src/test/resources/complexdata.csv  |  100 ++
 .../src/test/resources/complexdata1.csv |   54 +
 .../src/test/resources/complexdata2.csv |   46 +
 .../src/test/resources/complexdatareordered.csv |   10 +
 .../test/resources/complexdatastructextra.csv   |   10 +
 .../complextypediffentcolheaderorder.csv|  100 ++
 .../src/test/resources/complextypesample.csv|   50 +
 .../complextypespecialchardelimiter.csv |   50 +
 .../src/test/resources/data.csv |   11 +
 .../src/test/resources/data2.csv|4 +
 .../src/test/resources/data2_DiffTimeFormat.csv |4 +
 .../src/test/resources/dataDiff.csv | 1001 +++
 .../src/test/resources/dataIncrement.csv|   21 +
 .../src/test/resources/dataWithEmptyRows.csv|2 +
 .../test/resources/dataWithNullFirstLine.csv|   11 +
 .../src/test/resources/dataWithSingleQuote.csv  |7 +
 .../src/test/resources/data_alltypes.csv|   10 +
 .../src/test/resources/data_withCAPSHeader.csv  |3 +
 .../src/test/resources/data_withMixedHeader.csv |3 +
 .../src/test/resources/datadelimiter.csv|   11 +
 .../src/test/resources/datanullmeasurecol.csv   |3 +
 .../src/test/resources/dataretention1.csv   |   11 +
 .../src/test/resources

[32/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/dataDiff.csv
--
diff --git a/integration/spark-common-test/src/test/resources/dataDiff.csv 
b/integration/spark-common-test/src/test/resources/dataDiff.csv
new file mode 100644
index 000..6407b10
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/dataDiff.csv
@@ -0,0 +1,1001 @@
+ID,date,country,name,phonetype,serialname,salary
+1,2015/7/23,china,aaa1,phone197,ASD69643,15000
+2,2015/7/24,china,aaa2,phone756,ASD42892,15001
+3,2015/7/25,china,aaa3,phone1904,ASD37014,15002
+4,2015/7/26,china,aaa4,phone2435,ASD66902,15003
+5,2015/7/27,china,aaa5,phone2441,ASD90633,15004
+6,2015/7/28,china,aaa6,phone294,ASD59961,15005
+7,2015/7/29,china,aaa7,phone610,ASD14875,15006
+8,2015/7/30,china,aaa8,phone1848,ASD57308,15007
+9,2015/7/18,china,aaa9,phone706,ASD86717,15008
+10,2015/7/19,usa,aaa10,phone685,ASD30505,15009
+11,2015/7/18,china,aaa11,phone1554,ASD26101,15010
+12,2015/7/19,china,aaa12,phone1781,ASD85711,15011
+13,2015/7/20,china,aaa13,phone943,ASD39200,15012
+14,2015/7/21,china,aaa14,phone1954,ASD80468,15013
+15,2015/7/22,china,aaa15,phone451,ASD1954,15014
+16,2015/7/23,china,aaa16,phone390,ASD38513,15015
+17,2015/7/24,china,aaa17,phone1929,ASD86213,15016
+18,2015/7/25,usa,aaa18,phone910,ASD88812,15017
+19,2015/7/26,china,aaa19,phone2151,ASD9316,15018
+20,2015/7/27,china,aaa20,phone2625,ASD62597,15019
+21,2015/7/28,china,aaa21,phone1371,ASD27896,15020
+22,2015/7/29,china,aaa22,phone945,ASD79760,15021
+23,2015/7/30,china,aaa23,phone2177,ASD45410,15022
+24,2015/7/31,china,aaa24,phone1586,ASD80645,15023
+25,2015/8/1,china,aaa25,phone1310,ASD36408,15024
+26,2015/8/2,china,aaa26,phone1579,ASD14571,15025
+27,2015/8/3,china,aaa27,phone2123,ASD36243,15026
+28,2015/8/4,china,aaa28,phone2334,ASD57825,15027
+29,2015/8/5,china,aaa29,phone1166,ASD26161,15028
+30,2015/8/6,china,aaa30,phone2248,ASD47899,15029
+31,2015/8/7,china,aaa31,phone475,ASD89811,15030
+32,2015/8/8,china,aaa32,phone2499,ASD87974,15031
+33,2015/8/9,china,aaa33,phone2333,ASD62408,15032
+34,2015/8/10,china,aaa34,phone1128,ASD73138,15033
+35,2015/8/11,china,aaa35,phone1063,ASD29573,15034
+36,2015/8/12,china,aaa36,phone1633,ASD82574,15035
+37,2015/8/13,china,aaa37,phone775,ASD47938,15036
+38,2015/8/14,china,aaa38,phone817,ASD40947,15037
+39,2015/8/15,china,aaa39,phone2221,ASD6379,15038
+40,2015/8/16,china,aaa40,phone2289,ASD48374,15039
+41,2015/8/17,china,aaa41,phone599,ASD44560,15040
+42,2015/8/18,china,aaa42,phone384,ASD613,15041
+43,2015/8/19,china,aaa43,phone731,ASD66050,15042
+44,2015/8/20,china,aaa44,phone2128,ASD39759,15043
+45,2015/8/21,china,aaa45,phone1503,ASD31200,15044
+46,2015/8/22,china,aaa46,phone1833,ASD22945,15045
+47,2015/8/23,china,aaa47,phone2346,ASD80162,15046
+48,2015/8/24,china,aaa48,phone2714,ASD27822,15047
+49,2015/8/25,china,aaa49,phone1582,ASD21279,15048
+50,2015/8/26,china,aaa50,phone83,ASD17242,15049
+51,2015/8/27,china,aaa51,phone54,ASD29131,15050
+52,2015/8/28,china,aaa52,phone526,ASD73647,15051
+53,2015/8/29,china,aaa53,phone1308,ASD80493,15052
+54,2015/8/30,china,aaa54,phone2785,ASD30573,15053
+55,2015/8/31,china,aaa55,phone2133,ASD49757,15054
+56,2015/9/1,china,aaa56,phone871,ASD54753,15055
+57,2015/9/2,china,aaa57,phone1570,ASD25758,15056
+58,2015/9/3,china,aaa58,phone434,ASD30291,15057
+59,2015/9/4,china,aaa59,phone2023,ASD60739,15058
+60,2015/9/5,china,aaa60,phone1755,ASD4955,15059
+61,2015/9/6,china,aaa61,phone1120,ASD41678,15060
+62,2015/9/7,china,aaa62,phone526,ASD73647,15061
+63,2015/9/8,china,aaa63,phone111,ASD20917,15062
+64,2015/9/9,china,aaa64,phone2477,ASD78171,15063
+65,2015/9/10,china,aaa65,phone1458,ASD3023,15064
+66,2015/9/11,china,aaa66,phone33,ASD54379,15065
+67,2015/9/12,china,aaa67,phone1710,ASD65296,15066
+68,2015/9/13,china,aaa68,phone118,ASD4568,15067
+69,2015/9/14,china,aaa69,phone2772,ASD42161,15068
+70,2015/9/15,china,aaa70,phone1013,ASD88261,15069
+71,2015/9/16,china,aaa71,phone1606,ASD33903,15070
+72,2015/9/17,china,aaa72,phone2800,ASD60308,15071
+73,2015/9/18,china,aaa73,phone2461,ASD14645,15072
+74,2015/9/19,china,aaa74,phone1038,ASD66620,15073
+75,2015/9/20,china,aaa75,phone2882,ASD23220,15074
+76,2015/9/21,china,aaa76,phone1665,ASD31618,15075
+77,2015/9/22,china,aaa77,phone2991,ASD37964,15076
+78,2015/9/23,china,aaa78,phone620,ASD7257,15077
+79,2015/9/24,china,aaa79,phone1097,ASD12510,15078
+80,2015/9/25,usa,aaa80,phone1668,ASD41149,15079
+81,2015/9/26,china,aaa81,phone2869,ASD95862,15080
+82,2015/9/27,china,aaa82,phone2506,ASD77011,15081
+83,2015/9/28,china,aaa83,phone2897,ASD6674,15082
+84,2015/9/29,china,aaa84,phone954,ASD72595,15083
+85,2015/9/30,china,aaa85,phone1382,ASD86617,15084
+86,2015/10/1,china,aaa86,phone284,ASD31454,15085
+87,2015/10/2,china,aaa87,phone1000,ASD1404,15086
+88,2015/10/3,china,aaa88,phone1813,ASD6955,15087
+89,2015/10/4,china,aaa89,phone2301,ASD14198,15088
+90,

[24/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
--
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
new file mode 100644
index 000..7396594
--- /dev/null
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataLoadWithColumnsMoreThanSchema.scala
@@ -0,0 +1,139 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.spark.testsuite.dataload
+
+import org.apache.spark.sql.common.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
+
+/**
+ * This class will test data load in which number of columns in data are more 
than
+ * the number of columns in schema
+ */
+class TestDataLoadWithColumnsMoreThanSchema extends QueryTest with 
BeforeAndAfterAll {
+
+  override def beforeAll {
+sql("DROP TABLE IF EXISTS char_test")
+sql("DROP TABLE IF EXISTS hive_char_test")
+sql("CREATE TABLE char_test (imei string,age int,task bigint,num 
double,level decimal(10,3),productdate timestamp,mark int,name string)STORED BY 
'org.apache.carbondata.format'")
+sql("CREATE TABLE hive_char_test (imei string,age int,task bigint,num 
double,level decimal(10,3),productdate timestamp,mark int,name string)row 
format delimited fields terminated by ','")
+sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/character_carbon.csv' into 
table char_test")
+sql(s"LOAD DATA local inpath '$resourcesPath/character_hive.csv' INTO 
table hive_char_test")
+  }
+
+  test("test count(*) to check for data loss") {
+checkAnswer(sql("select count(*) from char_test"),
+  sql("select count(*) from hive_char_test"))
+  }
+
+  test("test for invalid value of maxColumns") {
+sql("DROP TABLE IF EXISTS max_columns_test")
+sql("CREATE TABLE max_columns_test (imei string,age int,task bigint,num 
double,level decimal(10,3),productdate timestamp,mark int,name string)STORED BY 
'org.apache.carbondata.format'")
+try {
+  sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/character_carbon.csv' into 
table max_columns_test options('MAXCOLUMNS'='avfgd')")
+  assert(false)
+} catch {
+  case _: Throwable => assert(true)
+}
+  }
+
+  test("test for valid value of maxColumns") {
+sql("DROP TABLE IF EXISTS valid_max_columns_test")
+sql("CREATE TABLE valid_max_columns_test (imei string,age int,task 
bigint,num double,level decimal(10,3),productdate timestamp,mark int,name 
string)STORED BY 'org.apache.carbondata.format'")
+try {
+  sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/character_carbon.csv' into 
table valid_max_columns_test options('MAXCOLUMNS'='400')")
+  checkAnswer(sql("select count(*) from valid_max_columns_test"),
+sql("select count(*) from hive_char_test"))
+} catch {
+  case _: Throwable => assert(false)
+}
+  }
+
+  test("test with invalid maxColumns value") {
+sql(
+  "CREATE TABLE max_columns_value_test (imei string,age int,task 
bigint,num double,level " +
+  "decimal(10,3),productdate timestamp,mark int,name string) STORED BY 
'org.apache.carbondata" +
+  ".format'")
+try {
+  sql(
+s"LOAD DATA LOCAL INPATH '$resourcesPath/character_carbon.csv' into 
table " +
+"max_columns_value_test 
options('FILEHEADER='imei,age','MAXCOLUMNS'='2')")
+  throw new MalformedCarbonCommandException("Invalid")
+} catch {
+  case me: MalformedCarbonCommandException =>
+assert(false)
+  case _: Throwable => assert(true)
+}
+  }
+
+  test("test for maxcolumns option value greater than threshold value for 
maxcolumns") {
+sql("DROP TABLE IF EXISTS valid_max_columns_test")
+sql("CREATE TABLE valid_max_columns_test (imei string,a

[26/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
--
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
new file mode 100644
index 000..a0f12f5
--- /dev/null
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
@@ -0,0 +1,1163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.spark.testsuite.allqueries
+
+import org.apache.spark.sql.common.util.QueryTest
+import org.apache.spark.sql.{Row, SaveMode}
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
+/**
+  * Test Class for all query on multiple datatypes
+  *
+  */
+class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
+
+  override def beforeAll {
+clean
+
+sql("create table if not exists Carbon_automation_test (imei 
string,deviceInformationId int,MAC string,deviceColor string,device_backColor 
string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit 
string,CPIClocked string,series string,productionDate timestamp,bomCode 
string,internalModels string, deliveryTime string, channelsId string, 
channelsName string , deliveryAreaId string, deliveryCountry string, 
deliveryProvince string, deliveryCity string,deliveryDistrict string, 
deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, 
ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity 
string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, 
Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion 
string, Active_BacVerNumber string, Active_BacFlashVer string, 
Active_webUIVersion string, Active_webUITypeCarrVer 
string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, 
Active
 _phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int, 
Latest_DAY int, Latest_HOUR string, Latest_areaId string, Latest_country 
string, Latest_province string, Latest_city string, Latest_district string, 
Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, 
Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer 
string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, 
Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, 
Latest_phonePADPartitionedVersions string, Latest_operatorId string, 
gamePointDescription string, gamePointId int,contractNumber int) STORED BY 
'org.apache.carbondata.format' 
TBLPROPERTIES('DICTIONARY_INCLUDE'='Latest_MONTH,Latest_DAY,deviceInformationId')")
+CarbonProperties.getInstance()
+  
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
+sql(s"""LOAD DATA LOCAL INPATH '$resourcesPath/100_olap.csv' INTO table 
Carbon_automation_test options('DELIMITER'= ',', 'QUOTECHAR'= '\"', 
'FILEHEADER'= 
'imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_dis

[02/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
--
diff --git 
a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
 
b/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
deleted file mode 100644
index 36f9006..000
--- 
a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
+++ /dev/null
@@ -1,1166 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.spark.testsuite.allqueries
-
-import java.io.File
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-
-/**
-  * Test Class for all query on multiple datatypes
-  *
-  */
-class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
-
-  override def beforeAll {
-dropAllTable
-val currentDirectory = new File(this.getClass.getResource("/").getPath + 
"/../../../spark")
-  .getCanonicalPath
-
-sql("drop table if exists Carbon_automation_test")
-sql("drop table if exists Carbon_automation_hive")
-sql("drop table if exists Carbon_automation_test_hive")
-
-sql("create table if not exists Carbon_automation_test (imei 
string,deviceInformationId int,MAC string,deviceColor string,device_backColor 
string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit 
string,CPIClocked string,series string,productionDate timestamp,bomCode 
string,internalModels string, deliveryTime string, channelsId string, 
channelsName string , deliveryAreaId string, deliveryCountry string, 
deliveryProvince string, deliveryCity string,deliveryDistrict string, 
deliveryStreet string, oxSingleNumber string,contractNumber int, 
ActiveCheckTime string, ActiveAreaId string, ActiveCountry string, 
ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet 
string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion 
string, Active_operaSysVersion string, Active_BacVerNumber string, 
Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer 
string,Active_webTypeDataVerNumber string, Active_operatorsVer
 sion string, Active_phonePADPartitionedVersions string, Latest_YEAR int, 
Latest_MONTH int, Latest_DAY int, Latest_HOUR string, Latest_areaId string, 
Latest_country string, Latest_province string, Latest_city string, 
Latest_district string, Latest_street string, Latest_releaseId string, 
Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber 
string, Latest_BacFlashVer string, Latest_webUIVersion string, 
Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, 
Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, 
Latest_operatorId string, gamePointId int,gamePointDescription string)  USING 
org.apache.spark.sql.CarbonSource OPTIONS('dbName'='default', 
'tableName'='Carbon_automation_test','DICTIONARY_INCLUDE'='Latest_MONTH,Latest_DAY,deviceInformationId')")
-
-CarbonProperties.getInstance()
-  
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
-
-sql("create table if not exists Carbon_automation_hive (imei 
string,deviceInformationId int,MAC string,deviceColor string,device_backColor 
string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit 
string,CPIClocked string,series string,productionDate timestamp,bomCode 
string,internalModels string, deliveryTime string, channelsId string, 
channelsName string , deliveryAreaId string, deliveryCountry string, 
deliveryProvince string, deliveryCity string,deliveryDistrict string, 
deliveryStreet string, oxSingleNumber string,contractNumber int, 
ActiveCheckTime string, ActiveAre

[08/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBoundaryConditionsTest.scala
--
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBoundaryConditionsTest.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBoundaryConditionsTest.scala
deleted file mode 100644
index fad2ba2..000
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBoundaryConditionsTest.scala
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.spark.testsuite.datacompaction
-
-import java.io.File
-
-import org.apache.carbondata.core.updatestatus.SegmentStatusManager
-
-import scala.collection.JavaConverters._
-
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.carbon.{AbsoluteTableIdentifier, 
CarbonTableIdentifier}
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-
-/**
- * FT for data compaction Boundary condition verification.
- */
-class DataCompactionBoundaryConditionsTest extends QueryTest with 
BeforeAndAfterAll {
-  val currentDirectory = new File(this.getClass.getResource("/").getPath + 
"/../../")
-.getCanonicalPath
-  val resource = currentDirectory + "/src/test/resources/"
-
-  val storeLocation = new File(this.getClass.getResource("/").getPath + 
"/../test").getCanonicalPath
-  val carbonTableIdentifier: CarbonTableIdentifier =
-new CarbonTableIdentifier("default", "boundarytest".toLowerCase(), "1")
-
-  override def beforeAll {
-CarbonProperties.getInstance()
-  .addProperty(CarbonCommonConstants.COMPACTION_SEGMENT_LEVEL_THRESHOLD, 
"2,2")
-sql("drop table if exists  boundarytest")
-CarbonProperties.getInstance()
-  .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "mm/dd/")
-sql(
-  "CREATE TABLE IF NOT EXISTS boundarytest (country String, ID Int, date " 
+
-  "Timestamp, name " +
-  "String, " +
-  "phonetype String, serialname String, salary Int) STORED BY 
'org.apache.carbondata" +
-  ".format'"
-)
-
-  }
-
-  /**
-   * Compaction verificatoin in case of no loads.
-   */
-  test("check if compaction is completed correctly.") {
-
-try {
-  sql("alter table boundarytest compact 'minor'")
-  sql("alter table boundarytest compact 'major'")
-}
-catch {
-  case e: Exception =>
-assert(false)
-}
-  }
-
-  /**
-   * Compaction verificatoin in case of one loads.
-   */
-  test("check if compaction is completed correctly for one load.") {
-
-val currentDirectory = new File(this.getClass.getResource("/").getPath + 
"/../../")
-  .getCanonicalPath
-var csvFilePath1 = currentDirectory + 
"/src/test/resources/compaction/compaction1.csv"
-
-
-sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE boundarytest 
" +
-"OPTIONS" +
-"('DELIMITER'= ',', 'QUOTECHAR'= '\"')"
-)
-sql("alter table boundarytest compact 'minor'")
-sql("alter table boundarytest compact 'major'")
-
-  }
-
-
-  override def afterAll {
-sql("drop table if exists  boundarytest")
-CarbonProperties.getInstance()
-  .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-")
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionCardinalityBoundryTest.scala
--
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionCardinalityBoundryTest.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompaction

[33/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/complexdata.csv
--
diff --git a/integration/spark-common-test/src/test/resources/complexdata.csv 
b/integration/spark-common-test/src/test/resources/complexdata.csv
new file mode 100644
index 000..c089e93
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/complexdata.csv
@@ -0,0 +1,100 @@
+1,109,4ROM size,29-11-2015,1AA1$2BB1,MAC1$MAC2$MAC3,7:Chinese:Hubei 
Province:yichang:yichang:yichang$7:India:New 
Delhi:delhi:delhi:delhi,29-11-2015$29-11-2015:29-11-2015,109,2738.562
+10,93,1ROM size,29-11-2015,1AA10$2BB10,MAC4$MAC5$MAC6,7:Chinese:Hubei 
Province:yichang:yichang:yichang$7:India:New 
Delhi:delhi:delhi:delhi,30-11-2015$30-11-2015:30-11-2015,93,1714.635
+100,2591,2ROM size,29-11-2015,1AA100$2BB100,MAC7$MAC8$MAC9,4:Chinese:Hunan 
Province:xiangtan:xiangtan:jianshelu$4:India:Hunan 
Province:xiangtan:xiangtan:jianshelu,01-12-2015$01-12-2015:01-12-2015,2591,1271
+1000,2531,2ROM size,29-11-2015,1AA1000$2BB1000,MAC10$$MAC12,6:Chinese:Hubei 
Province:wuhan:hongshan:hongshan$6:India:New 
Delhi:wuhan:hongshan:hongshan,02-12-2015$02-12-2015:02-12-2015,2531,692
+1,2408,0ROM 
size,29-11-2015,1AA1$2BB1,MAC13$$MAC15,2:Chinese:Guangdong 
Province:guangzhou:longhua:mingzhi$2:India:Guangdong 
Province:guangzhou:longhua:mingzhi,03-12-2015$03-12-2015:03-12-2015,2408,2175
+10,1815,0ROM 
size,29-11-2015,1AA10$2BB10,MAC16$$MAC18,6:Chinese:Hubei 
Province:wuhan:hongshan:hongshan$6:India:New 
Delhi:wuhan:hongshan:hongshan,04-12-2015$04-12-2015:04-12-2015,1815,136
+100,2479,4ROM 
size,29-11-2015,1AA100$2BB100,MAC19$$MAC21,7:Chinese:Hubei 
Province:yichang:yichang:yichang$7:India:New 
Delhi:delhi:delhi:delhi,05-12-2015$05-12-2015:05-12-2015,2479,1600
+11,1845,7ROM size,29-11-2015,1AA11$,MAC22$$MAC24,4:Chinese:Hunan 
Province:xiangtan:xiangtan:jianshelu$4:India:Hunan 
Province:xiangtan:xiangtan:jianshelu,06-12-2015$06-12-2015:06-12-2015,1845,505
+12,2008,1ROM size,29-11-2015,1AA12$,MAC25$$MAC27,3:Chinese:Hunan 
Province:changsha:yuhua:shazitang$3:India:Hunan 
Province:changsha:yuhua:shazitang,07-12-2015$07-12-2015:07-12-2015,2008,1341
+13,1121,5ROM size,29-11-2015,1AA13$,MAC28$$MAC30,5:Chinese:Hunan 
Province:zhuzhou:tianyuan:tianyua$5:India:Hunan 
Province:zhuzhou:tianyuan:tianyua,08-12-2015$08-12-2015:08-12-2015,1121,2239
+14,1511,8ROM size,29-11-2015,1AA14$,MAC31$$MAC33,7:Chinese:Hubei 
Province:yichang:yichang:yichang$7:India:New 
Delhi:delhi:delhi:delhi,09-12-2015$09-12-2015:09-12-2015,1511,2970
+15,2759,0ROM size,29-11-2015,1AA15$,MAC34$$MAC36,7:Chinese:Hubei 
Province:yichang:yichang:yichang$7:India:New 
Delhi:delhi:delhi:delhi,10-12-2015$10-12-2015:10-12-2015,2759,2593
+16,2069,7ROM size,29-11-2015,1AA16$,MAC37$$MAC39,3:Chinese:Hunan 
Province:changsha:yuhua:shazitang$3:India:Hunan 
Province:changsha:yuhua:shazitang,11-12-2015$11-12-2015:11-12-2015,2069,2572
+17,396,7ROM size,29-11-2015,1AA17$,MAC40$$MAC42,3:Chinese:Hunan 
Province:changsha:yuhua:shazitang$3:India:Hunan 
Province:changsha:yuhua:shazitang,12-12-2015$12-12-2015:12-12-2015,396,1991
+18,104,2ROM size,29-11-2015,1AA18$,MAC43$$MAC45,3:Chinese:Hunan 
Province:changsha:yuhua:shazitang$3:India:Hunan 
Province:changsha:yuhua:shazitang,13-12-2015$13-12-2015:13-12-2015,104,1442
+19,477,3ROM size,29-11-2015,1AA19$,MAC46$$MAC48,7:Chinese:Hubei 
Province:yichang:yichang:yichang$7:India:New 
Delhi:delhi:delhi:delhi,14-12-2015$14-12-2015:14-12-2015,477,1841
+10001,546,8ROM size,29-11-2015,1AA10001$2,MAC49$$MAC51,3:Chinese:Hunan 
Province:changsha:yuhua:shazitang$3:India:Hunan 
Province:changsha:yuhua:shazitang,15-12-2015$15-12-2015:15-12-2015,546,298
+100010,2696,3ROM 
size,29-11-2015,1AA100010$2BB100010,MAC52$$MAC54,5:Chinese:Hunan 
Province:zhuzhou:tianyuan:tianyua$5:India:Hunan 
Province:zhuzhou:tianyuan:tianyua,16-12-2015$16-12-2015:16-12-2015,2696,79
+100011,466,2ROM 
size,29-11-2015,1AA100011$2BB100011,MAC55$$MAC57,2:Chinese:Guangdong 
Province:guangzhou:longhua:mingzhi$2:India:Guangdong 
Province:guangzhou:longhua:mingzhi,17-12-2015$17-12-2015:17-12-2015,466,202
+100012,2644,2ROM 
size,29-11-2015,1AA100012$2BB100012,MAC58$$MAC60,4:Chinese:Hunan 
Province:xiangtan:xiangtan:jianshelu$4:India:Hunan 
Province:xiangtan:xiangtan:jianshelu,18-12-2015$18-12-2015:18-12-2015,2644,568
+100013,2167,3ROM 
size,29-11-2015,1AA100013$2BB100013,MAC61$MAC62,3:Chinese:Hunan 
Province:changsha:yuhua:shazitang$3:India:Hunan 
Province:changsha:yuhua:shazitang,19-12-2015$19-12-2015:19-12-2015,2167,355
+100014,1069,7ROM 
size,29-11-2015,1AA100014$2BB100014,MAC64$MAC65,5:Chinese:Hunan 
Province:zhuzhou:tianyuan:tianyua$5:India:Hunan 
Province:zhuzhou:tianyuan:tianyua,20-12-2015$20-12-2015:20-12-2015,1069,151
+100015,1447,9ROM 
size,29-11-2015,1AA100015$2BB100015,MAC67$MAC68,4:

[06/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
--
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
deleted file mode 100644
index cdfe2e6..000
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
+++ /dev/null
@@ -1,254 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.spark.testsuite.deleteTable
-
-import java.io.File
-
-import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-/**
- * test class for testing the create cube DDL.
- */
-class TestDeleteTableNewDDL extends QueryTest with BeforeAndAfterAll {
-
-  val currentDirectory = new File(this.getClass.getResource("/").getPath + 
"/../../")
-.getCanonicalPath
-  val resource = currentDirectory + "/src/test/resources/"
-
-  override def beforeAll: Unit = {
-
-sql("CREATE TABLE IF NOT EXISTS table1(empno Int, empname Array, 
designation String, doj Timestamp, "
-+ "workgroupcategory Int, workgroupcategoryname String, deptno Int, 
deptname String, projectcode Int, "
-+ "projectjoindate Timestamp, projectenddate Timestamp , attendance 
Int,utilization Int,salary Int )"
-+ " STORED BY 'org.apache.carbondata.format' ")
-sql("CREATE TABLE IF NOT EXISTS table2(empno Int, empname Array, 
designation String, doj Timestamp, "
-+ "workgroupcategory Int, workgroupcategoryname String, deptno Int, 
deptname String, projectcode Int, "
-+ "projectjoindate Timestamp, projectenddate Timestamp , attendance 
Int,utilization Int,salary Int )"
-+ " STORED BY 'org.apache.carbondata.format' ")
-
-  }
-
-  // normal deletion case
-  test("drop table Test with new DDL") {
-sql("drop table table1")
-  }
-  
-  test("test drop database cascade command") {
-sql("create database testdb")
-sql("use testdb")
-sql("CREATE TABLE IF NOT EXISTS testtable(empno Int, empname string, 
utilization Int,salary Int)"
-+ " STORED BY 'org.apache.carbondata.format' ")
-try {
-  sql("drop database testdb")
-  assert(false)
-} catch {
-  case e : Exception => 
-}
-sql("drop database testdb cascade")
-try {
-  sql("use testdb")
-  assert(false)
-} catch {
-  case e : Exception => 
-}
-sql("use default")
-  }
-
-  // deletion case with if exists
-  test("drop table if exists Test with new DDL") {
-sql("drop table if exists table2")
-
-  }
-
-  // try to delete after deletion with if exists
-  test("drop table after deletion with if exists with new DDL") {
-sql("drop table if exists table2")
-
-  }
-
-  // try to delete after deletion with out if exists. this should fail
-  test("drop table after deletion with new DDL") {
-try {
-  sql("drop table table2")
-  fail("failed") // this should not be executed as exception is expected
-}
-catch {
-  case e: Exception => // pass the test case as this is expected
-}
-
-
-  }
-
-  test("drop table using case insensitive table name") {
-// create table
-sql(
-  "CREATE table CaseInsensitiveTable (ID int, date String, country String, 
name " +
-  "String," +
-  "phonetype String, serialname String, salary int) stored by 
'org.apache.carbondata.format'" +
-  "TBLPROPERTIES('DICTIONARY_INCLUDE'='ID, salary')"
-)
-// table should drop wihout any error
-sql("drop table caseInsensitiveTable")
-
-// Now create same table, it should not give any error.
-sql(
-  "CREATE table CaseInsensitiveTable (ID int, date String, country String, 
name " +
-  "String," +
-  "phonetype String, serialname String, salary int) stored 

[10/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
--
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
index 274477c..3c52949 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
@@ -19,12 +19,10 @@
 
 package org.apache.carbondata.spark.testsuite.allqueries
 
-import java.io.File
-
-import org.apache.spark.sql.{Row, SaveMode}
-import org.apache.spark.sql.common.util.CarbonHiveContext._
 import org.apache.spark.sql.common.util.QueryTest
+import org.apache.spark.sql.{Row, SaveMode}
 import org.scalatest.BeforeAndAfterAll
+
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
 
@@ -36,30 +34,15 @@ class AllDataTypesTestCaseAggregate extends QueryTest with 
BeforeAndAfterAll {
 
   override def beforeAll {
 clean
-val currentDirectory = new File(this.getClass.getResource("/").getPath + 
"/../../")
-  .getCanonicalPath
 
-sql("drop table if exists Carbon_automation_test")
-sql("drop table if exists Carbon_automation_hive")
-sql("drop table if exists Carbon_automation_test_hive")
-
-sql("create table if not exists Carbon_automation_test (imei 
string,deviceInformationId int,MAC string,deviceColor string,device_backColor 
string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit 
string,CPIClocked string,series string,productionDate timestamp,bomCode 
string,internalModels string, deliveryTime string, channelsId string, 
channelsName string , deliveryAreaId string, deliveryCountry string, 
deliveryProvince string, deliveryCity string,deliveryDistrict string, 
deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, 
ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity 
string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, 
Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion 
string, Active_BacVerNumber string, Active_BacFlashVer string, 
Active_webUIVersion string, Active_webUITypeCarrVer 
string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, 
Active
 _phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int, 
Latest_DAY int, Latest_HOUR string, Latest_areaId string, Latest_country 
string, Latest_province string, Latest_city string, Latest_district string, 
Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, 
Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer 
string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, 
Latest_webTypeDataVerNumber string, Latest_operatorsVersion string, 
Latest_phonePADPartitionedVersions string, Latest_operatorId string, 
gamePointDescription string, gamePointId int,contractNumber int) STORED BY 
'org.apache.carbondata.format' 
TBLPROPERTIES('DICTIONARY_INCLUDE'='Latest_MONTH,Latest_DAY,deviceInformationId')");
+sql("create table if not exists Carbon_automation_test (imei 
string,deviceInformationId int,MAC string,deviceColor string,device_backColor 
string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit 
string,CPIClocked string,series string,productionDate timestamp,bomCode 
string,internalModels string, deliveryTime string, channelsId string, 
channelsName string , deliveryAreaId string, deliveryCountry string, 
deliveryProvince string, deliveryCity string,deliveryDistrict string, 
deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, 
ActiveAreaId string, ActiveCountry string, ActiveProvince string, Activecity 
string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId string, 
Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion 
string, Active_BacVerNumber string, Active_BacFlashVer string, 
Active_webUIVersion string, Active_webUITypeCarrVer 
string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, 
Active
 _phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int, 
Latest_DAY int, Latest_HOUR string, Latest_areaId string, Latest_country 
string, Latest_province string, Latest_city string, Latest_district string, 
Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, 
Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer 
string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, 
Latest_webTypeDataVerNumber string, Late

[07/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
--
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
deleted file mode 100644
index ad695f4..000
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
+++ /dev/null
@@ -1,695 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.spark.testsuite.dataload
-
-import java.io.File
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.CarbonHiveContext._
-import org.apache.spark.sql.common.util.QueryTest
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-import org.scalatest.BeforeAndAfterAll
-
-/**
-  * Test Class for data loading with hive syntax and old syntax
-  *
-  */
-class TestLoadDataWithHiveSyntax extends QueryTest with BeforeAndAfterAll {
-
-  override def beforeAll {
-sql("drop table if exists escapechar1")
-sql("drop table if exists escapechar2")
-sql("drop table if exists escapechar3")
-sql("drop table if exists specialcharacter1")
-sql("drop table if exists specialcharacter2")
-sql("drop table if exists collessthanschema")
-sql("drop table if exists decimalarray")
-sql("drop table if exists decimalstruct")
-sql("drop table if exists carbontable")
-sql("drop table if exists hivetable")
-sql("drop table if exists testtable")
-sql("drop table if exists testhivetable")
-sql("drop table if exists testtable1")
-sql("drop table if exists testhivetable1")
-sql("drop table if exists complexcarbontable")
-sql("drop table if exists complex_t3")
-sql("drop table if exists complex_hive_t3")
-sql("drop table if exists header_test")
-sql("drop table if exists duplicateColTest")
-sql("drop table if exists mixed_header_test")
-sql("drop table if exists primitivecarbontable")
-sql("drop table if exists UPPERCASEcube")
-sql("drop table if exists lowercaseCUBE")
-sql("drop table if exists carbontable1")
-sql("drop table if exists hivetable1")
-sql("drop table if exists comment_test")
-sql("drop table if exists smallinttable")
-sql("drop table if exists smallinthivetable")
-sql(
-  "CREATE table carbontable (empno int, empname String, designation 
String, doj String, " +
-"workgroupcategory int, workgroupcategoryname String, deptno int, 
deptname String, " +
-"projectcode int, projectjoindate String, projectenddate String, 
attendance int," +
-"utilization int,salary int) STORED BY 'org.apache.carbondata.format'"
-)
-sql(
-  "create table hivetable(empno int, empname String, designation string, 
doj String, " +
-"workgroupcategory int, workgroupcategoryname String,deptno int, 
deptname String, " +
-"projectcode int, projectjoindate String,projectenddate String, 
attendance String," +
-"utilization String,salary String)row format delimited fields 
terminated by ','"
-)
-
-  }
-
-  test("create table with smallint type and query smallint table") {
-sql(
-  "create table smallinttable(empno smallint, empname String, designation 
string, " +
-"doj String, workgroupcategory int, workgroupcategoryname 
String,deptno int, " +
-"deptname String, projectcode int, projectjoindate 
String,projectenddate String, " +
-"attendance String, utilization String,salary String)" +
-"STORED BY 'org.apache.carbondata.format'"
-)
-
-sql(
-  "create table smallinthivetable(empno smallint, empname String, 
designation string, " +
-"doj String, workgroupcategory int, workgroupcategoryname 
String,deptno int, " +
-"deptname String, projectcode int, projectjoindate 
String,projectenddate String, "

[37/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
reuse test case for integration module

fix comments


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/af2f204e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/af2f204e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/af2f204e

Branch: refs/heads/master
Commit: af2f204e4fbc38b973a26106879c92c0618fba02
Parents: b0750c1
Author: QiangCai 
Authored: Thu Dec 29 22:43:29 2016 +0800
Committer: jackylk 
Committed: Sun Jan 8 00:35:04 2017 +0800

--
 .../scan/expression/ExpressionResult.java   |3 +
 integration/spark-common-test/pom.xml   |  232 
 .../spark/load/CarbonLoaderUtilTest.java|  419 +++
 .../validation/FileFooterValidator.java |  157 +++
 .../src/test/resources/100_olap.csv |   99 ++
 .../src/test/resources/10dim_4msr.csv   | 1000 +++
 .../src/test/resources/IUD/T_Hive1.csv  |   10 +
 .../src/test/resources/IUD/comp1.csv|   11 +
 .../src/test/resources/IUD/comp2.csv|   11 +
 .../src/test/resources/IUD/comp3.csv|   11 +
 .../src/test/resources/IUD/comp4.csv|   11 +
 .../src/test/resources/IUD/dest.csv |6 +
 .../src/test/resources/IUD/other.csv|3 +
 .../src/test/resources/IUD/sample.csv   |4 +
 .../src/test/resources/IUD/sample_updated.csv   |2 +
 .../src/test/resources/IUD/source2.csv  |3 +
 .../src/test/resources/IUD/source3.csv  |7 +
 .../src/test/resources/IUD/update01.csv |6 +
 .../src/test/resources/OLDFORMATTABLE.csv   |   34 +
 .../src/test/resources/OLDFORMATTABLEHIVE.csv   |   33 +
 .../test/resources/Test_Data1_Logrithmic.csv|3 +
 .../src/test/resources/alldatatypescube.xml |  109 ++
 .../20160423/1400_1405/complex.dictionary   |   20 +
 .../sample/20160423/1400_1405/sample.dictionary |9 +
 .../src/test/resources/array1.csv   |2 +
 .../src/test/resources/arrayColumnEmpty.csv |   21 +
 .../src/test/resources/avgTest.csv  |   16 +
 .../test/resources/badrecords/datasample.csv|7 +
 .../badrecords/emptyTimeStampValue.csv  |8 +
 .../test/resources/badrecords/emptyValues.csv   |8 +
 .../badrecords/insufficientColumns.csv  |4 +
 .../resources/badrecords/seriazableValue.csv|3 +
 .../src/test/resources/bigIntData.csv   |   14 +
 .../src/test/resources/bigIntDataWithHeader.csv |   13 +
 .../test/resources/bigIntDataWithoutHeader.csv  |   12 +
 .../src/test/resources/big_int_Decimal.csv  |3 +
 .../src/test/resources/channelsId.csv   |   10 +
 .../src/test/resources/character_carbon.csv |   33 +
 .../src/test/resources/character_hive.csv   |   32 +
 .../test/resources/columndictionary/country.csv |5 +
 .../test/resources/columndictionary/name.csv|   10 +
 .../src/test/resources/comment.csv  |5 +
 .../test/resources/compaction/compaction1.csv   |6 +
 .../compaction/compaction1_forhive.csv  |5 +
 .../test/resources/compaction/compaction2.csv   |6 +
 .../test/resources/compaction/compaction3.csv   |6 +
 .../resources/compaction/compactioncard2.csv|  257 
 .../compaction/compactioncard2_forhive.csv  |  256 
 .../src/test/resources/complexTypeDecimal.csv   |9 +
 .../test/resources/complexTypeDecimalNested.csv |9 +
 .../resources/complexTypeDecimalNestedHive.csv  |8 +
 .../src/test/resources/complexdata.csv  |  100 ++
 .../src/test/resources/complexdata1.csv |   54 +
 .../src/test/resources/complexdata2.csv |   46 +
 .../src/test/resources/complexdatareordered.csv |   10 +
 .../test/resources/complexdatastructextra.csv   |   10 +
 .../complextypediffentcolheaderorder.csv|  100 ++
 .../src/test/resources/complextypesample.csv|   50 +
 .../complextypespecialchardelimiter.csv |   50 +
 .../src/test/resources/data.csv |   11 +
 .../src/test/resources/data2.csv|4 +
 .../src/test/resources/data2_DiffTimeFormat.csv |4 +
 .../src/test/resources/dataDiff.csv | 1001 +++
 .../src/test/resources/dataIncrement.csv|   21 +
 .../src/test/resources/dataWithEmptyRows.csv|2 +
 .../test/resources/dataWithNullFirstLine.csv|   11 +
 .../src/test/resources/dataWithSingleQuote.csv  |7 +
 .../src/test/resources/data_alltypes.csv|   10 +
 .../src/test/resources/data_withCAPSHeader.csv  |3 +
 .../src/test/resources/data_withMixedHeader.csv |3 +
 .../src/test/resources/datadelimiter.csv|   11 +
 .../src/test/resources/datanullmeasurecol.csv   |3 +
 .../src/test/resources/dataretention1.csv   |   11 +
 .../src/test/resources/dataretention2.csv   |   11

[29/38] incubator-carbondata git commit: reuse test case for integration module

2017-01-07 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/resources/join/employee.csv
--
diff --git a/integration/spark-common-test/src/test/resources/join/employee.csv 
b/integration/spark-common-test/src/test/resources/join/employee.csv
new file mode 100644
index 000..c551653
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/join/employee.csv
@@ -0,0 +1,1000 @@
+empid695,empname595,mobile15,color2,102868
+empid147,empname160,mobile99,color7,108485
+empid69,empname72,mobile87,color9,102202
+empid324,empname147,mobile87,color1,101000
+empid215,empname752,mobile47,color5,109786
+empid171,empname309,mobile34,color8,105698
+empid714,empname845,mobile21,color1,103623
+empid566,empname978,mobile88,color3,107882
+empid561,empname391,mobile79,color0,107908
+empid219,empname962,mobile80,color8,105802
+empid755,empname234,mobile41,color5,103068
+empid125,empname305,mobile10,color4,101992
+empid751,empname165,mobile26,color0,108878
+empid864,empname646,mobile88,color8,109367
+empid981,empname273,mobile88,color5,108040
+empid957,empname752,mobile63,color9,105241
+empid292,empname709,mobile88,color0,101106
+empid500,empname846,mobile67,color6,101204
+empid187,empname513,mobile1,color7,105432
+empid11,empname1,mobile26,color8,102510
+empid59,empname837,mobile76,color1,103627
+empid700,empname535,mobile77,color0,108892
+empid558,empname30,mobile36,color5,101414
+empid584,empname29,mobile18,color9,105615
+empid811,empname447,mobile80,color1,106837
+empid285,empname650,mobile66,color1,103265
+empid262,empname521,mobile42,color5,105449
+empid594,empname581,mobile2,color2,109862
+empid232,empname227,mobile3,color4,103680
+empid766,empname279,mobile13,color4,102405
+empid903,empname682,mobile22,color8,103338
+empid648,empname347,mobile47,color5,10600
+empid323,empname330,mobile55,color8,10163
+empid418,empname799,mobile16,color4,108136
+empid863,empname185,mobile26,color2,105223
+empid145,empname365,mobile95,color4,101611
+empid897,empname997,mobile10,color3,10814
+empid180,empname946,mobile69,color4,109870
+empid419,empname981,mobile46,color8,105424
+empid610,empname354,mobile79,color3,102553
+empid840,empname796,mobile98,color4,109111
+empid735,empname881,mobile2,color1,106732
+empid1,empname628,mobile30,color0,109646
+empid235,empname717,mobile88,color7,101120
+empid374,empname922,mobile58,color2,105478
+empid145,empname10,mobile77,color7,107407
+empid289,empname377,mobile95,color6,106291
+empid812,empname164,mobile86,color6,10515
+empid876,empname974,mobile61,color9,106524
+empid820,empname862,mobile34,color5,106506
+empid372,empname379,mobile47,color8,106829
+empid337,empname52,mobile65,color6,107040
+empid656,empname420,mobile34,color0,104734
+empid665,empname384,mobile21,color2,106826
+empid78,empname321,mobile38,color6,109877
+empid639,empname346,mobile85,color6,109262
+empid640,empname810,mobile29,color0,104336
+empid985,empname188,mobile83,color8,101831
+empid665,empname900,mobile50,color9,10468
+empid298,empname264,mobile29,color5,105059
+empid154,empname655,mobile76,color7,101820
+empid329,empname385,mobile61,color6,109113
+empid223,empname973,mobile63,color8,101940
+empid540,empname962,mobile76,color1,101008
+empid745,empname221,mobile90,color3,10625
+empid665,empname828,mobile77,color9,10820
+empid424,empname422,mobile52,color2,103268
+empid484,empname788,mobile97,color9,108554
+empid889,empname159,mobile63,color1,104392
+empid353,empname11,mobile38,color3,105506
+empid636,empname93,mobile70,color4,102070
+empid9,empname754,mobile50,color5,103958
+empid11,empname670,mobile22,color6,107570
+empid248,empname931,mobile47,color1,102316
+empid520,empname146,mobile70,color6,106306
+empid2,empname27,mobile32,color9,103648
+empid629,empname790,mobile91,color8,102926
+empid27,empname911,mobile84,color1,104885
+empid405,empname601,mobile59,color8,105150
+empid291,empname747,mobile22,color5,106151
+empid553,empname277,mobile40,color5,102535
+empid927,empname125,mobile46,color0,106930
+empid527,empname595,mobile54,color5,109349
+empid334,empname732,mobile63,color0,109304
+empid965,empname459,mobile18,color4,102777
+empid3,empname38,mobile20,color5,102073
+empid865,empname839,mobile6,color7,101427
+empid579,empname704,mobile1,color0,101830
+empid979,empname756,mobile42,color3,107620
+empid956,empname18,mobile74,color5,102454
+empid821,empname126,mobile92,color3,102659
+empid845,empname862,mobile76,color0,10771
+empid550,empname827,mobile34,color1,107161
+empid350,empname110,mobile94,color0,103892
+empid433,empname140,mobile79,color4,10175
+empid851,empname214,mobile16,color5,103486
+empid813,empname817,mobile2,color2,103601
+empid20,empname604,mobile33,color3,105038
+empid638,empname298,mobile97,color2,104103
+empid936,empname522,mobile73,color0,109897
+empid780,empname939,mobile27,color6,10109
+empid613,empname876,mobile97,color7,107794
+empid641,empname105,mobile2,color7,10798
+

[2/2] incubator-carbondata git commit: [CARBONDATA-608]Fixed compilation issue in Spark1.6 This closes #506

2017-01-08 Thread jackylk
[CARBONDATA-608]Fixed compilation issue in Spark1.6 This closes #506


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/f7f40c09
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/f7f40c09
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/f7f40c09

Branch: refs/heads/master
Commit: f7f40c09fae9e468e2e5a306fb3a158f34b3790a
Parents: 96ef7f4 e2f7bba
Author: jackylk 
Authored: Mon Jan 9 09:57:55 2017 +0800
Committer: jackylk 
Committed: Mon Jan 9 09:57:55 2017 +0800

--
 .../spark/sql/catalyst/CarbonTableIdentifierImplicit.scala| 7 +++
 .../scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala | 1 +
 .../org/apache/spark/sql/optimizer/CarbonOptimizer.scala  | 1 +
 3 files changed, 9 insertions(+)
--




[1/2] incubator-carbondata git commit: Fixed compilation issue in Spark1.6

2017-01-08 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 96ef7f4c4 -> f7f40c09f


Fixed compilation issue in Spark1.6


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/e2f7bbac
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/e2f7bbac
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/e2f7bbac

Branch: refs/heads/master
Commit: e2f7bbac6e4019676ae09ab8d46876752d42dde1
Parents: 96ef7f4
Author: ravipesala 
Authored: Sun Jan 8 16:40:00 2017 +0530
Committer: jackylk 
Committed: Mon Jan 9 09:57:23 2017 +0800

--
 .../spark/sql/catalyst/CarbonTableIdentifierImplicit.scala| 7 +++
 .../scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala | 1 +
 .../org/apache/spark/sql/optimizer/CarbonOptimizer.scala  | 1 +
 3 files changed, 9 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e2f7bbac/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonTableIdentifierImplicit.scala
--
diff --git 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonTableIdentifierImplicit.scala
 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonTableIdentifierImplicit.scala
index 55441cf..cb754d8 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonTableIdentifierImplicit.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonTableIdentifierImplicit.scala
@@ -37,4 +37,11 @@ object CarbonTableIdentifierImplicit {
   case _ => Seq(tableIdentifier.table)
 }
   }
+
+  implicit def toOptionalSequence(alias: Option[String]): Option[Seq[String]] 
= {
+alias match {
+  case Some(alias) => Some(Seq(alias))
+  case _ => None
+}
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e2f7bbac/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala
--
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala
index 07ed1d4..f22e958 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonAnalysisRules.scala
@@ -18,6 +18,7 @@ package org.apache.spark.sql.hive
 
 import org.apache.spark.sql._
 import org.apache.spark.sql.catalyst.CarbonTableIdentifierImplicit
+import org.apache.spark.sql.catalyst.CarbonTableIdentifierImplicit._
 import org.apache.spark.sql.catalyst.analysis.{UnresolvedAlias, 
UnresolvedFunction, UnresolvedRelation, UnresolvedStar}
 import org.apache.spark.sql.catalyst.expressions.Alias
 import org.apache.spark.sql.catalyst.plans.Inner

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/e2f7bbac/integration/spark/src/main/scala/org/apache/spark/sql/optimizer/CarbonOptimizer.scala
--
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/optimizer/CarbonOptimizer.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/optimizer/CarbonOptimizer.scala
index fea79a5..c893035 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/optimizer/CarbonOptimizer.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/optimizer/CarbonOptimizer.scala
@@ -23,6 +23,7 @@ import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer
 
 import org.apache.spark.sql._
+import org.apache.spark.sql.catalyst.CarbonTableIdentifierImplicit._
 import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.catalyst.expressions.aggregate._
 import org.apache.spark.sql.catalyst.optimizer.Optimizer



[2/2] incubator-carbondata git commit: [CARBONDATA-595]Fixed NPE while droping the table with HDFS lock This closes #502

2017-01-09 Thread jackylk
[CARBONDATA-595]Fixed NPE while droping the table with HDFS lock This closes 
#502


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/45211a4c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/45211a4c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/45211a4c

Branch: refs/heads/master
Commit: 45211a4c590ab95b47300db05089b68afaf51050
Parents: bc5a061 7af06e6
Author: jackylk 
Authored: Mon Jan 9 18:40:53 2017 +0800
Committer: jackylk 
Committed: Mon Jan 9 18:40:53 2017 +0800

--
 .../org/apache/carbondata/locks/HdfsFileLock.java   | 14 +++---
 .../sql/execution/command/carbonTableSchema.scala   | 16 +++-
 .../sql/execution/command/carbonTableSchema.scala   | 16 +++-
 3 files changed, 25 insertions(+), 21 deletions(-)
--




[1/2] incubator-carbondata git commit: Fixed NLP while droping the table with HDFS lock

2017-01-09 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master bc5a061e9 -> 45211a4c5


Fixed NLP while droping the table with HDFS lock

Fixed comment


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/7af06e6f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/7af06e6f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/7af06e6f

Branch: refs/heads/master
Commit: 7af06e6fcef15d0e046ede3cd93b4cb19439d5ab
Parents: bc5a061
Author: ravipesala 
Authored: Fri Jan 6 15:06:02 2017 +0530
Committer: jackylk 
Committed: Mon Jan 9 18:39:58 2017 +0800

--
 .../org/apache/carbondata/locks/HdfsFileLock.java   | 14 +++---
 .../sql/execution/command/carbonTableSchema.scala   | 16 +++-
 .../sql/execution/command/carbonTableSchema.scala   | 16 +++-
 3 files changed, 25 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7af06e6f/core/src/main/java/org/apache/carbondata/locks/HdfsFileLock.java
--
diff --git a/core/src/main/java/org/apache/carbondata/locks/HdfsFileLock.java 
b/core/src/main/java/org/apache/carbondata/locks/HdfsFileLock.java
index 75c6efd..961b4d8 100644
--- a/core/src/main/java/org/apache/carbondata/locks/HdfsFileLock.java
+++ b/core/src/main/java/org/apache/carbondata/locks/HdfsFileLock.java
@@ -25,6 +25,7 @@ import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.carbon.CarbonTableIdentifier;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.datastorage.store.filesystem.CarbonFile;
 import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
 import org.apache.carbondata.core.util.CarbonProperties;
 
@@ -108,10 +109,17 @@ public class HdfsFileLock extends AbstractCarbonLock {
   } catch (IOException e) {
 return false;
   } finally {
-if (FileFactory.getCarbonFile(location, 
FileFactory.getFileType(location)).delete()) {
-  LOGGER.info("Deleted the lock file " + location);
+CarbonFile carbonFile =
+FileFactory.getCarbonFile(location, 
FileFactory.getFileType(location));
+if (carbonFile.exists()) {
+  if (carbonFile.delete()) {
+LOGGER.info("Deleted the lock file " + location);
+  } else {
+LOGGER.error("Not able to delete the lock file " + location);
+  }
 } else {
-  LOGGER.error("Not able to delete the lock file " + location);
+  LOGGER.error("Not able to delete the lock file because "
+  + "it is not existed in location " + location);
 }
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7af06e6f/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
--
diff --git 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index 865a0cf..8de8236 100644
--- 
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ 
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -728,15 +728,13 @@ private[sql] case class DropTableCommand(ifExistsSet: 
Boolean, databaseNameOp: O
 val file = FileFactory.getCarbonFile(metadataFilePath, fileType)
 CarbonUtil.deleteFoldersAndFiles(file.getParentFile)
   }
-  // delete bad record log after drop table
-  val badLogPath = CarbonUtil.getBadLogPath(dbName + File.separator + 
tableName)
-  val badLogFileType = FileFactory.getFileType(badLogPath)
-  if (FileFactory.isFileExist(badLogPath, badLogFileType)) {
-val file = FileFactory.getCarbonFile(badLogPath, badLogFileType)
-CarbonUtil.deleteFoldersAndFiles(file)
-  }
-} else {
-  logError("Unable to unlock Table MetaData")
+}
+// delete bad record log after drop table
+val badLogPath = CarbonUtil.getBadLogPath(dbName + File.separator + 
tableName)
+val badLogFileType = FileFactory.getFileType(badLogPath)
+if (FileFactory.isFileExist(badLogPath, badLogFileType)) {
+  val file = FileFactory.getCarbonFile(badLogPath, badLogFileType)
+  CarbonUtil.deleteFoldersAndFiles(file)
 }
   }

[2/2] incubator-carbondata git commit: [CARBONDATA-616] Remove the duplicated class CarbonDataWriterException.java This closes #515

2017-01-10 Thread jackylk
[CARBONDATA-616] Remove the duplicated class CarbonDataWriterException.java 
This closes #515


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/37635661
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/37635661
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/37635661

Branch: refs/heads/master
Commit: 376356613ccdd2ab77d98d14f270fb90670cde24
Parents: ce45660 7bed18e
Author: jackylk 
Authored: Tue Jan 10 20:13:55 2017 +0800
Committer: jackylk 
Committed: Tue Jan 10 20:13:55 2017 +0800

--
 .../exception/CarbonDataWriterException.java| 81 
 1 file changed, 81 deletions(-)
--




[1/2] incubator-carbondata git commit: Remove the duplicated class CarbonDataWriterException.java

2017-01-10 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master ce4566038 -> 376356613


Remove the duplicated class CarbonDataWriterException.java


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/7bed18ec
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/7bed18ec
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/7bed18ec

Branch: refs/heads/master
Commit: 7bed18ec95d69de4605d18f09273896b1de2eb67
Parents: ce45660
Author: chenliang613 
Authored: Tue Jan 10 16:22:37 2017 +0800
Committer: jackylk 
Committed: Tue Jan 10 20:13:32 2017 +0800

--
 .../exception/CarbonDataWriterException.java| 81 
 1 file changed, 81 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7bed18ec/core/src/main/java/org/apache/carbondata/core/writer/exception/CarbonDataWriterException.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/writer/exception/CarbonDataWriterException.java
 
b/core/src/main/java/org/apache/carbondata/core/writer/exception/CarbonDataWriterException.java
deleted file mode 100644
index 2d978fb..000
--- 
a/core/src/main/java/org/apache/carbondata/core/writer/exception/CarbonDataWriterException.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.writer.exception;
-
-import java.util.Locale;
-
-public class CarbonDataWriterException extends Exception {
-
-  /**
-   * default serial version ID.
-   */
-  private static final long serialVersionUID = 1L;
-
-  /**
-   * The Error message.
-   */
-  private String msg = "";
-
-  /**
-   * Constructor
-   *
-   * @param msg The error message for this exception.
-   */
-  public CarbonDataWriterException(String msg) {
-super(msg);
-this.msg = msg;
-  }
-
-  /**
-   * Constructor
-   *
-   * @param msg The error message for this exception.
-   */
-  public CarbonDataWriterException(String msg, Throwable t) {
-super(msg, t);
-this.msg = msg;
-  }
-
-  /**
-   * getLocalizedMessage
-   */
-  @Override public String getLocalizedMessage() {
-return super.getLocalizedMessage();
-  }
-
-  /**
-   * getMessage
-   */
-  public String getMessage() {
-return this.msg;
-  }
-
-  /**
-   * This method is used to get the localized message.
-   *
-   * @param locale - A Locale object represents a specific geographical,
-   *   political, or cultural region.
-   * @return - Localized error message.
-   */
-  public String getLocalizedMessage(Locale locale) {
-return "";
-  }
-
-}



[4/5] incubator-carbondata git commit: [CARBONDATA-607] Cleanup ValueCompressionHolder class and all sub-classes

2017-01-10 Thread jackylk
[CARBONDATA-607] Cleanup ValueCompressionHolder class and all sub-classes


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/6592cf4d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/6592cf4d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/6592cf4d

Branch: refs/heads/master
Commit: 6592cf4d66a10f174fa1e16352cd2e533310027e
Parents: 0252e5d
Author: Jihong Ma 
Authored: Wed Jan 11 15:32:23 2017 +0800
Committer: jackylk 
Committed: Wed Jan 11 15:32:23 2017 +0800

--
 ...CompressedMeasureChunkFileBasedReaderV1.java |  21 ++-
 ...CompressedMeasureChunkFileBasedReaderV2.java |  35 +++--
 .../store/compression/ReaderCompressModel.java  |  10 +-
 .../compression/ValueCompressionHolder.java | 115 +++
 .../compression/ValueCompressonHolder.java  | 108 --
 .../store/compression/WriterCompressModel.java  |  12 +-
 .../decimal/CompressionMaxMinByte.java  | 115 +++
 .../decimal/CompressionMaxMinDefault.java   | 117 +++
 .../decimal/CompressionMaxMinInt.java   | 113 ++
 .../decimal/CompressionMaxMinLong.java  | 113 ++
 .../decimal/CompressionMaxMinShort.java | 115 +++
 .../decimal/UnCompressMaxMinByte.java   | 147 ---
 .../decimal/UnCompressMaxMinDefault.java| 140 --
 .../decimal/UnCompressMaxMinInt.java| 141 --
 .../decimal/UnCompressMaxMinLong.java   | 139 --
 .../decimal/UnCompressMaxMinShort.java  | 139 --
 .../nondecimal/CompressionNonDecimalByte.java   |  99 +
 .../CompressionNonDecimalDefault.java   | 101 +
 .../nondecimal/CompressionNonDecimalInt.java| 100 +
 .../nondecimal/CompressionNonDecimalLong.java   | 101 +
 .../CompressionNonDecimalMaxMinByte.java| 105 +
 .../CompressionNonDecimalMaxMinDefault.java | 107 ++
 .../CompressionNonDecimalMaxMinInt.java | 105 +
 .../CompressionNonDecimalMaxMinLong.java| 106 +
 .../CompressionNonDecimalMaxMinShort.java   | 104 +
 .../nondecimal/CompressionNonDecimalShort.java  | 100 +
 .../nondecimal/UnCompressNonDecimalByte.java| 119 ---
 .../nondecimal/UnCompressNonDecimalDefault.java | 117 ---
 .../nondecimal/UnCompressNonDecimalInt.java | 115 ---
 .../nondecimal/UnCompressNonDecimalLong.java| 116 ---
 .../UnCompressNonDecimalMaxMinByte.java | 129 
 .../UnCompressNonDecimalMaxMinDefault.java  | 123 
 .../UnCompressNonDecimalMaxMinInt.java  | 126 
 .../UnCompressNonDecimalMaxMinLong.java | 130 
 .../UnCompressNonDecimalMaxMinShort.java| 129 
 .../nondecimal/UnCompressNonDecimalShort.java   | 119 ---
 .../compression/none/CompressionNoneByte.java   | 104 +
 .../none/CompressionNoneDefault.java| 102 +
 .../compression/none/CompressionNoneInt.java| 102 +
 .../compression/none/CompressionNoneLong.java   |  99 +
 .../compression/none/CompressionNoneShort.java  | 104 +
 .../compression/none/UnCompressNoneByte.java| 127 
 .../compression/none/UnCompressNoneDefault.java | 120 ---
 .../compression/none/UnCompressNoneInt.java | 122 ---
 .../compression/none/UnCompressNoneLong.java| 124 
 .../compression/none/UnCompressNoneShort.java   | 125 
 .../compression/type/CompressionBigDecimal.java | 146 ++
 .../compression/type/UnCompressBigDecimal.java  | 119 ---
 .../type/UnCompressBigDecimalByte.java  | 145 --
 .../store/dataholder/CarbonReadDataHolder.java  |   6 +-
 ...ractHeavyCompressedDoubleArrayDataStore.java |  27 ++--
 .../core/util/ValueCompressionUtil.java | 132 -
 .../core/util/ValueCompressionUtilTest.java | 128 
 .../carbon/datastore/BlockIndexStoreTest.java   |   6 +
 54 files changed, 2569 insertions(+), 3100 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6592cf4d/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
 
b/core/src

[5/5] incubator-carbondata git commit: [CARBONDATA-607] Cleanup ValueCompressionHolder class and all sub-classes This closes #505

2017-01-10 Thread jackylk
[CARBONDATA-607] Cleanup ValueCompressionHolder class and all sub-classes This 
closes #505


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/b1fe03ee
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/b1fe03ee
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/b1fe03ee

Branch: refs/heads/master
Commit: b1fe03eeb3532ad0e2c063824c1147fcd7690a42
Parents: 0252e5d 6592cf4
Author: jackylk 
Authored: Wed Jan 11 15:33:55 2017 +0800
Committer: jackylk 
Committed: Wed Jan 11 15:33:55 2017 +0800

--
 ...CompressedMeasureChunkFileBasedReaderV1.java |  21 ++-
 ...CompressedMeasureChunkFileBasedReaderV2.java |  35 +++--
 .../store/compression/ReaderCompressModel.java  |  10 +-
 .../compression/ValueCompressionHolder.java | 115 +++
 .../compression/ValueCompressonHolder.java  | 108 --
 .../store/compression/WriterCompressModel.java  |  12 +-
 .../decimal/CompressionMaxMinByte.java  | 115 +++
 .../decimal/CompressionMaxMinDefault.java   | 117 +++
 .../decimal/CompressionMaxMinInt.java   | 113 ++
 .../decimal/CompressionMaxMinLong.java  | 113 ++
 .../decimal/CompressionMaxMinShort.java | 115 +++
 .../decimal/UnCompressMaxMinByte.java   | 147 ---
 .../decimal/UnCompressMaxMinDefault.java| 140 --
 .../decimal/UnCompressMaxMinInt.java| 141 --
 .../decimal/UnCompressMaxMinLong.java   | 139 --
 .../decimal/UnCompressMaxMinShort.java  | 139 --
 .../nondecimal/CompressionNonDecimalByte.java   |  99 +
 .../CompressionNonDecimalDefault.java   | 101 +
 .../nondecimal/CompressionNonDecimalInt.java| 100 +
 .../nondecimal/CompressionNonDecimalLong.java   | 101 +
 .../CompressionNonDecimalMaxMinByte.java| 105 +
 .../CompressionNonDecimalMaxMinDefault.java | 107 ++
 .../CompressionNonDecimalMaxMinInt.java | 105 +
 .../CompressionNonDecimalMaxMinLong.java| 106 +
 .../CompressionNonDecimalMaxMinShort.java   | 104 +
 .../nondecimal/CompressionNonDecimalShort.java  | 100 +
 .../nondecimal/UnCompressNonDecimalByte.java| 119 ---
 .../nondecimal/UnCompressNonDecimalDefault.java | 117 ---
 .../nondecimal/UnCompressNonDecimalInt.java | 115 ---
 .../nondecimal/UnCompressNonDecimalLong.java| 116 ---
 .../UnCompressNonDecimalMaxMinByte.java | 129 
 .../UnCompressNonDecimalMaxMinDefault.java  | 123 
 .../UnCompressNonDecimalMaxMinInt.java  | 126 
 .../UnCompressNonDecimalMaxMinLong.java | 130 
 .../UnCompressNonDecimalMaxMinShort.java| 129 
 .../nondecimal/UnCompressNonDecimalShort.java   | 119 ---
 .../compression/none/CompressionNoneByte.java   | 104 +
 .../none/CompressionNoneDefault.java| 102 +
 .../compression/none/CompressionNoneInt.java| 102 +
 .../compression/none/CompressionNoneLong.java   |  99 +
 .../compression/none/CompressionNoneShort.java  | 104 +
 .../compression/none/UnCompressNoneByte.java| 127 
 .../compression/none/UnCompressNoneDefault.java | 120 ---
 .../compression/none/UnCompressNoneInt.java | 122 ---
 .../compression/none/UnCompressNoneLong.java| 124 
 .../compression/none/UnCompressNoneShort.java   | 125 
 .../compression/type/CompressionBigDecimal.java | 146 ++
 .../compression/type/UnCompressBigDecimal.java  | 119 ---
 .../type/UnCompressBigDecimalByte.java  | 145 --
 .../store/dataholder/CarbonReadDataHolder.java  |   6 +-
 ...ractHeavyCompressedDoubleArrayDataStore.java |  27 ++--
 .../core/util/ValueCompressionUtil.java | 132 -
 .../core/util/ValueCompressionUtilTest.java | 128 
 .../carbon/datastore/BlockIndexStoreTest.java   |   6 +
 54 files changed, 2569 insertions(+), 3100 deletions(-)
--




[1/5] incubator-carbondata git commit: [CARBONDATA-607] Cleanup ValueCompressionHolder class and all sub-classes

2017-01-10 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 0252e5d76 -> b1fe03eeb


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6592cf4d/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
--
diff --git 
a/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
 
b/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
index fa5aff3..a468c81 100644
--- 
a/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
+++ 
b/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
@@ -29,6 +29,8 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.cache.CacheProvider;
 import org.apache.carbondata.core.cache.CacheType;
 import org.apache.carbondata.core.carbon.AbsoluteTableIdentifier;
@@ -53,6 +55,10 @@ public class BlockIndexStoreTest extends TestCase {
   BlockIndexStore cache;
 
   private String property;
+
+  private static final LogService LOGGER =
+  LogServiceFactory.getLogService(BlockIndexStoreTest.class.getName());
+
   @BeforeClass public void setUp() {
property = 
CarbonProperties.getInstance().getProperty(CarbonCommonConstants.CARBON_DATA_FILE_VERSION);




[3/5] incubator-carbondata git commit: [CARBONDATA-607] Cleanup ValueCompressionHolder class and all sub-classes

2017-01-10 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6592cf4d/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/nondecimal/CompressionNonDecimalLong.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/nondecimal/CompressionNonDecimalLong.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/nondecimal/CompressionNonDecimalLong.java
new file mode 100644
index 000..00e9353
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/nondecimal/CompressionNonDecimalLong.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.datastorage.store.compression.nondecimal;
+
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
+import 
org.apache.carbondata.core.carbon.datastore.chunk.store.MeasureChunkStoreFactory;
+import 
org.apache.carbondata.core.carbon.datastore.chunk.store.MeasureDataChunkStore;
+import org.apache.carbondata.core.datastorage.store.compression.Compressor;
+import 
org.apache.carbondata.core.datastorage.store.compression.CompressorFactory;
+import 
org.apache.carbondata.core.datastorage.store.compression.ValueCompressionHolder;
+import org.apache.carbondata.core.util.ValueCompressionUtil;
+import org.apache.carbondata.core.util.ValueCompressionUtil.DataType;
+
+public class CompressionNonDecimalLong extends ValueCompressionHolder {
+  /**
+   * Attribute for Carbon LOGGER
+   */
+  private static final LogService LOGGER =
+  
LogServiceFactory.getLogService(CompressionNonDecimalLong.class.getName());
+
+  /**
+   * longCompressor.
+   */
+  private static Compressor compressor = 
CompressorFactory.getInstance().getCompressor();
+
+  /**
+   * value.
+   */
+  private long[] value;
+
+  private MeasureDataChunkStore measureChunkStore;
+
+  private double divisionFactory;
+
+  @Override public void setValue(long[] value) {
+this.value = value;
+  }
+
+  @Override public long[] getValue() { return this.value; }
+
+  @Override public void compress() {
+compressedValue = super.compress(compressor, DataType.DATA_LONG, value);
+  }
+
+  @Override public void uncompress(DataType dataType, byte[] compressedData, 
int offset,
+  int length, int decimalPlaces, Object maxValueObject) {
+super.unCompress(compressor, dataType, compressedData, offset, length);
+setUncompressedValues(value, decimalPlaces);
+  }
+
+  @Override public void setValueInBytes(byte[] bytes) {
+ByteBuffer buffer = ByteBuffer.wrap(bytes);
+this.value = ValueCompressionUtil.convertToLongArray(buffer, bytes.length);
+  }
+
+  @Override public long getLongValue(int index) {
+throw new UnsupportedOperationException(
+  "Long value is not defined for CompressionNonDecimalLong");
+  }
+
+  @Override public double getDoubleValue(int index) {
+return (measureChunkStore.getLong(index) / this.divisionFactory);
+  }
+
+  @Override public BigDecimal getBigDecimalValue(int index) {
+throw new UnsupportedOperationException(
+  "Big decimal value is not defined for CompressionNonDecimalLong");
+  }
+
+  private void setUncompressedValues(long[] data, int decimalPlaces) {
+this.measureChunkStore =
+
MeasureChunkStoreFactory.INSTANCE.getMeasureDataChunkStore(DataType.DATA_LONG, 
data.length);
+this.measureChunkStore.putData(data);
+this.divisionFactory = Math.pow(10, decimalPlaces);
+  }
+
+  @Override public void freeMemory() {
+this.measureChunkStore.freeMemory();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6592cf4d/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/nondecimal/CompressionNonDecimalMaxMinByte.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/nondecimal/CompressionNonDecimalMaxMinByte.java
 
b/core/src/main/java/org/apache/carbondat

[2/5] incubator-carbondata git commit: [CARBONDATA-607] Cleanup ValueCompressionHolder class and all sub-classes

2017-01-10 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6592cf4d/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/none/CompressionNoneDefault.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/none/CompressionNoneDefault.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/none/CompressionNoneDefault.java
new file mode 100644
index 000..63fce48
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/none/CompressionNoneDefault.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.datastorage.store.compression.none;
+
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
+import 
org.apache.carbondata.core.carbon.datastore.chunk.store.MeasureChunkStoreFactory;
+import 
org.apache.carbondata.core.carbon.datastore.chunk.store.MeasureDataChunkStore;
+import org.apache.carbondata.core.datastorage.store.compression.Compressor;
+import 
org.apache.carbondata.core.datastorage.store.compression.CompressorFactory;
+import 
org.apache.carbondata.core.datastorage.store.compression.ValueCompressionHolder;
+import org.apache.carbondata.core.util.ValueCompressionUtil;
+import org.apache.carbondata.core.util.ValueCompressionUtil.DataType;
+
+public class CompressionNoneDefault extends ValueCompressionHolder {
+  /**
+   * Attribute for Carbon LOGGER
+   */
+  private static final LogService LOGGER =
+  LogServiceFactory.getLogService(CompressionNoneDefault.class.getName());
+  /**
+   * doubleCompressor.
+   */
+  private static Compressor compressor = 
CompressorFactory.getInstance().getCompressor();
+  /**
+   * value.
+   */
+  private double[] value;
+
+  private DataType actualDataType;
+
+  private MeasureDataChunkStore measureChunkStore;
+
+  public CompressionNoneDefault(DataType actualDataType) {
+this.actualDataType = actualDataType;
+  }
+
+  @Override public void setValue(double[] value) {this.value = value; }
+
+  @Override
+  public void uncompress(DataType dataType, byte[] data, int offset, int 
length,
+  int decimalPlaces, Object maxValueObject) {
+super.unCompress(compressor, dataType, data, offset, length);
+setUncompressedValues(value);
+  }
+
+  @Override public double[] getValue() { return this.value; }
+
+  @Override public void compress() {
+compressedValue = super.compress(compressor, DataType.DATA_DOUBLE, value);
+  }
+
+  @Override public void setValueInBytes(byte[] value) {
+ByteBuffer buffer = ByteBuffer.wrap(value);
+this.value = ValueCompressionUtil.convertToDoubleArray(buffer, 
value.length);
+  }
+
+  @Override public long getLongValue(int index) {
+throw new UnsupportedOperationException(
+  "Long value is not defined for CompressionNonDefault");
+  }
+
+  @Override public double getDoubleValue(int index) {
+return measureChunkStore.getDouble(index);
+  }
+
+  @Override public BigDecimal getBigDecimalValue(int index) {
+throw new UnsupportedOperationException(
+  "Big decimal is not defined for CompressionNoneDefault");
+  }
+
+  private void setUncompressedValues(double[] data) {
+this.measureChunkStore = MeasureChunkStoreFactory.INSTANCE
+.getMeasureDataChunkStore(DataType.DATA_DOUBLE, data.length);
+this.measureChunkStore.putData(data);
+
+  }
+
+  @Override public void freeMemory() {
+this.measureChunkStore.freeMemory();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/6592cf4d/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/none/CompressionNoneInt.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/none/CompressionNoneInt.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/none/CompressionNoneInt.java
new file mode 100644
index 000..b45deda
--- /

[2/2] incubator-carbondata git commit: [CARBONDATA-622]unify file header reader This closes #518

2017-01-11 Thread jackylk
[CARBONDATA-622]unify file header reader This closes #518


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/30033605
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/30033605
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/30033605

Branch: refs/heads/master
Commit: 30033605b7a34287ba3e74350e55b87a153c7be0
Parents: b1fe03e cbc33c4
Author: jackylk 
Authored: Wed Jan 11 17:47:33 2017 +0800
Committer: jackylk 
Committed: Wed Jan 11 17:47:33 2017 +0800

--
 .../TestLoadDataWithFileHeaderException.scala   |   6 +-
 .../TestLoadDataWithNotProperInputFile.scala|  19 ++--
 .../carbondata/spark/util/CommonUtil.scala  |  46 +++-
 .../spark/util/GlobalDictionaryUtil.scala   |  64 +--
 .../execution/command/carbonTableSchema.scala   |   3 +-
 .../spark/util/AllDictionaryTestCase.scala  |   1 +
 .../AutoHighCardinalityIdentifyTestCase.scala   |   1 +
 .../util/ExternalColumnDictionaryTestCase.scala |   5 +-
 ...GlobalDictionaryUtilConcurrentTestCase.scala |   1 +
 .../util/GlobalDictionaryUtilTestCase.scala |   1 +
 .../execution/command/carbonTableSchema.scala   |   3 +-
 .../processing/model/CarbonLoadModel.java   |  10 ++
 .../newflow/DataLoadProcessBuilder.java |  40 +--
 .../util/CarbonDataProcessorUtil.java   | 109 ---
 14 files changed, 99 insertions(+), 210 deletions(-)
--




[1/2] incubator-carbondata git commit: readfileheader

2017-01-11 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master b1fe03eeb -> 30033605b


readfileheader

fix comments


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/cbc33c4b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/cbc33c4b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/cbc33c4b

Branch: refs/heads/master
Commit: cbc33c4b3893f7e1d8d820f89c0df6d3ef5a
Parents: b1fe03e
Author: QiangCai 
Authored: Tue Jan 10 21:32:51 2017 +0800
Committer: jackylk 
Committed: Wed Jan 11 15:57:41 2017 +0800

--
 .../TestLoadDataWithFileHeaderException.scala   |   6 +-
 .../TestLoadDataWithNotProperInputFile.scala|  19 ++--
 .../carbondata/spark/util/CommonUtil.scala  |  46 +++-
 .../spark/util/GlobalDictionaryUtil.scala   |  64 +--
 .../execution/command/carbonTableSchema.scala   |   3 +-
 .../spark/util/AllDictionaryTestCase.scala  |   1 +
 .../AutoHighCardinalityIdentifyTestCase.scala   |   1 +
 .../util/ExternalColumnDictionaryTestCase.scala |   5 +-
 ...GlobalDictionaryUtilConcurrentTestCase.scala |   1 +
 .../util/GlobalDictionaryUtilTestCase.scala |   1 +
 .../execution/command/carbonTableSchema.scala   |   3 +-
 .../processing/model/CarbonLoadModel.java   |  10 ++
 .../newflow/DataLoadProcessBuilder.java |  40 +--
 .../util/CarbonDataProcessorUtil.java   | 109 ---
 14 files changed, 99 insertions(+), 210 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cbc33c4b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithFileHeaderException.scala
--
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithFileHeaderException.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithFileHeaderException.scala
index 7717112..78b9a22 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithFileHeaderException.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithFileHeaderException.scala
@@ -41,8 +41,7 @@ class TestLoadDataWithFileHeaderException extends QueryTest 
with BeforeAndAfterA
   assert(false)
 } catch {
   case e: Exception =>
-assert(e.getMessage.equals("DataLoad failure: CSV File provided is not 
proper. " +
-  "Column names in schema and csv header are not same. CSVFile Name : 
windows.csv"))
+assert(e.getMessage.contains("CSV header in input file is not proper. 
Column names in schema and csv header are not the same."))
 }
   }
 
@@ -55,8 +54,7 @@ class TestLoadDataWithFileHeaderException extends QueryTest 
with BeforeAndAfterA
   assert(false)
 } catch {
   case e: Exception =>
-assert(e.getMessage.equals("DataLoad failure: CSV header provided in 
DDL is not proper. " +
-  "Column names in schema and CSV header are not the same."))
+assert(e.getMessage.contains("CSV header in DDL is not proper. Column 
names in schema and CSV header are not the same"))
 }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/cbc33c4b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithNotProperInputFile.scala
--
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithNotProperInputFile.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithNotProperInputFile.scala
index 5fd52dc..7fb194f 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithNotProperInputFile.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithNotProperInputFile.scala
@@ -35,10 +35,9 @@ class TestLoadDataWithNotProperInputFile extends QueryTest {
 
   test("test loading data with input path exists but has nothing") {
 try {
-  val carbonLoadModel: CarbonLoadModel = new CarbonLoadModel
   val dataPath = s"$resourcesPath/nullSample.csv"
-  carbonLoadModel.setFactFilePath(FileUtils.getPaths(dataPath))
-  GlobalDictionaryUtil.loadDataFrame(sqlContext, carbonLoadModel)
+  FileUtils.getPaths(

[2/2] incubator-carbondata git commit: Update carbondata description and clean .pdf files This closes #522

2017-01-12 Thread jackylk
Update carbondata description and clean .pdf files This closes #522


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/20453497
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/20453497
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/20453497

Branch: refs/heads/master
Commit: 20453497bb03d3843799b7a5ea8d1baa604852a6
Parents: 3003360 a11a419
Author: jackylk 
Authored: Thu Jan 12 20:05:09 2017 +0800
Committer: jackylk 
Committed: Thu Jan 12 20:05:09 2017 +0800

--
 README.md  |   7 ++-
 docs/Apache-CarbonData-meetup-material.pdf | Bin 1021758 -> 0 bytes
 pom.xml|   7 +++
 3 files changed, 5 insertions(+), 9 deletions(-)
--




[1/2] incubator-carbondata git commit: update carbon description and clean .pdf files

2017-01-12 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 30033605b -> 20453497b


update carbon description and clean .pdf files

fix comments


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/a11a4198
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/a11a4198
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/a11a4198

Branch: refs/heads/master
Commit: a11a4198197fc53f406ac03d81b633eb913f8873
Parents: 3003360
Author: chenliang613 
Authored: Wed Jan 11 22:12:13 2017 +0800
Committer: jackylk 
Committed: Thu Jan 12 20:04:42 2017 +0800

--
 README.md  |   7 ++-
 docs/Apache-CarbonData-meetup-material.pdf | Bin 1021758 -> 0 bytes
 pom.xml|   7 +++
 3 files changed, 5 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a11a4198/README.md
--
diff --git a/README.md b/README.md
index c28f0b2..ce71666 100644
--- a/README.md
+++ b/README.md
@@ -19,10 +19,7 @@
 
 
 
-Apache CarbonData(incubating) is a new big data file format for faster
-interactive query using advanced columnar storage, index, compression
-and encoding techniques to improve computing efficiency, in turn it will 
-help speedup queries an order of magnitude faster over PetaBytes of data. 
+Apache CarbonData(incubating) is an indexed columnar data format for fast 
analytics on big data platform, e.g.Apache Hadoop, Apache Spark, etc.
 
 You can find the latest CarbonData document and learn more at:
 
[http://carbondata.incubator.apache.org](http://carbondata.incubator.apache.org/)
@@ -64,7 +61,7 @@ Please refer [CarbonData File 
Format](https://cwiki.apache.org/confluence/displa
 
(https://cwiki.apache.org/confluence/display/CARBONDATA/Suggestion+to+create+CarbonData+table)
 
 ## Other Technical Material
-[Apache CarbonData meetup material](docs/Apache-CarbonData-meetup-material.pdf)
+[Apache CarbonData meetup 
material](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=66850609)
 
 ## Fork and Contribute
 This is an active open source project for everyone, and we are always open to 
people who want to use this system or contribute to it. 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a11a4198/docs/Apache-CarbonData-meetup-material.pdf
--
diff --git a/docs/Apache-CarbonData-meetup-material.pdf 
b/docs/Apache-CarbonData-meetup-material.pdf
deleted file mode 100644
index 5f85fff..000
Binary files a/docs/Apache-CarbonData-meetup-material.pdf and /dev/null differ

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a11a4198/pom.xml
--
diff --git a/pom.xml b/pom.xml
index d6100ac..1e0c80d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -28,10 +28,9 @@
   org.apache.carbondata
   carbondata-parent
   Apache CarbonData :: Parent
-  CarbonData is a new Apache Hadoop native file format for faster
-interactive query using advanced columnar storage, index, compression
-and encoding techniques to improve computing efficiency, in turn it will
-help speedup queries an order of magnitude faster over PetaBytes of 
data.
+  Apache CarbonData(incubating) is an indexed columnar data 
format for fast analytics
+on big data platform, e.g.Apache Hadoop, Apache Spark, etc.
+  
   http://carbondata.incubator.apache.org
   2016
   pom



[1/2] incubator-carbondata git commit: fixUnionTestCase

2017-01-12 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 20453497b -> fe36dea66


fixUnionTestCase


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/b0d3d403
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/b0d3d403
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/b0d3d403

Branch: refs/heads/master
Commit: b0d3d403f1ba345de6cc49dc335778ffa150d375
Parents: 2045349
Author: QiangCai 
Authored: Wed Jan 11 23:47:25 2017 +0800
Committer: jackylk 
Committed: Thu Jan 12 20:11:32 2017 +0800

--
 .../AllDataTypesTestCaseAggregate.scala | 30 ++--
 .../AllDataTypesTestCaseAggregate.scala | 17 ---
 .../sql/optimizer/CarbonLateDecodeRule.scala|  2 +-
 3 files changed, 16 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/b0d3d403/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
--
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
index a0f12f5..d17a946 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
@@ -1087,21 +1087,21 @@ class AllDataTypesTestCaseAggregate extends QueryTest 
with BeforeAndAfterAll {
   Seq(Row(96981.54360516652)))
   })
 
-//  test("CARBONDATA-60-union-defect")({
-//sql("drop table if exists carbonunion")
-//import sqlContext.implicits._
-//val df = sqlContext.sparkContext.parallelize(1 to 1000).map(x => (x+"", 
(x+100)+"")).toDF("c1", "c2")
-//df.registerTempTable("sparkunion")
-//df.write
-//  .format("carbondata")
-//  .mode(SaveMode.Overwrite)
-//  .option("tableName", "carbonunion")
-//  .save()
-//checkAnswer(
-//  sql("select c1,count(c1) from (select c1 as c1,c2 as c2 from 
carbonunion union all select c2 as c1,c1 as c2 from carbonunion)t where 
c1='200' group by c1"),
-//  sql("select c1,count(c1) from (select c1 as c1,c2 as c2 from 
sparkunion union all select c2 as c1,c1 as c2 from sparkunion)t where c1='200' 
group by c1"))
-//sql("drop table if exists carbonunion")
-//  })
+  test("CARBONDATA-60-union-defect")({
+sql("drop table if exists carbonunion")
+import sqlContext.implicits._
+val df = sqlContext.sparkContext.parallelize(1 to 1000).map(x => (x+"", 
(x+100)+"")).toDF("c1", "c2")
+df.registerTempTable("sparkunion")
+df.write
+  .format("carbondata")
+  .mode(SaveMode.Overwrite)
+  .option("tableName", "carbonunion")
+  .save()
+checkAnswer(
+  sql("select c1,count(c1) from (select c1 as c1,c2 as c2 from carbonunion 
union all select c2 as c1,c1 as c2 from carbonunion)t where c1='200' group by 
c1"),
+  sql("select c1,count(c1) from (select c1 as c1,c2 as c2 from sparkunion 
union all select c2 as c1,c1 as c2 from sparkunion)t where c1='200' group by 
c1"))
+sql("drop table if exists carbonunion")
+  })
 
   test("select Min(imei) from (select imei from Carbon_automation_test order 
by imei) t")({
 checkAnswer(

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/b0d3d403/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
--
diff --git 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
index 3c52949..1629faf 100644
--- 
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
+++ 
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
@@ -59,21 +59,4 @@ class AllDataTypesTestCaseAggregate extends QueryTest with 
B

[2/2] incubator-carbondata git commit: [CARBONDATA-627]fix union test case for spark2 This closes #524

2017-01-12 Thread jackylk
[CARBONDATA-627]fix union test case for spark2 This closes #524


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/fe36dea6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/fe36dea6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/fe36dea6

Branch: refs/heads/master
Commit: fe36dea669b98d7a286f24cf3810d9daa40f7987
Parents: 2045349 b0d3d40
Author: jackylk 
Authored: Thu Jan 12 20:12:01 2017 +0800
Committer: jackylk 
Committed: Thu Jan 12 20:12:01 2017 +0800

--
 .../AllDataTypesTestCaseAggregate.scala | 30 ++--
 .../AllDataTypesTestCaseAggregate.scala | 17 ---
 .../sql/optimizer/CarbonLateDecodeRule.scala|  2 +-
 3 files changed, 16 insertions(+), 33 deletions(-)
--




[1/2] incubator-carbondata git commit: Fixed measure selection with out table order gives wrong result

2017-01-12 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master fe36dea66 -> e705aadd9


Fixed measure selection with out table order gives wrong result

Fixed comment


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/7934d7b8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/7934d7b8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/7934d7b8

Branch: refs/heads/master
Commit: 7934d7b8ac03fd98064a213ee01b2b64bece6309
Parents: fe36dea
Author: ravipesala 
Authored: Wed Jan 11 22:47:42 2017 +0530
Committer: jackylk 
Committed: Thu Jan 12 20:16:56 2017 +0800

--
 .../impl/DictionaryBasedVectorResultCollector.java |  1 -
 .../scan/processor/AbstractDataBlockIterator.java  |  8 
 .../spark/sql/execution/CarbonLateDecodeStrategy.scala |  6 +++---
 .../carbondata/vectorreader/VectorReaderTestCase.scala | 13 -
 4 files changed, 23 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7934d7b8/core/src/main/java/org/apache/carbondata/scan/collector/impl/DictionaryBasedVectorResultCollector.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/scan/collector/impl/DictionaryBasedVectorResultCollector.java
 
b/core/src/main/java/org/apache/carbondata/scan/collector/impl/DictionaryBasedVectorResultCollector.java
index cab7caf..3ce54de 100644
--- 
a/core/src/main/java/org/apache/carbondata/scan/collector/impl/DictionaryBasedVectorResultCollector.java
+++ 
b/core/src/main/java/org/apache/carbondata/scan/collector/impl/DictionaryBasedVectorResultCollector.java
@@ -101,7 +101,6 @@ public class DictionaryBasedVectorResultCollector extends 
AbstractScannedResultC
 complexInfo = complexList.toArray(new 
ColumnVectorInfo[complexList.size()]);
 Arrays.sort(dictionaryInfo);
 Arrays.sort(noDictionaryInfo);
-Arrays.sort(measureInfo);
 Arrays.sort(complexInfo);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7934d7b8/core/src/main/java/org/apache/carbondata/scan/processor/AbstractDataBlockIterator.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/scan/processor/AbstractDataBlockIterator.java
 
b/core/src/main/java/org/apache/carbondata/scan/processor/AbstractDataBlockIterator.java
index 4fc74b9..f996c16 100644
--- 
a/core/src/main/java/org/apache/carbondata/scan/processor/AbstractDataBlockIterator.java
+++ 
b/core/src/main/java/org/apache/carbondata/scan/processor/AbstractDataBlockIterator.java
@@ -22,6 +22,8 @@ import java.io.IOException;
 import java.util.List;
 
 import org.apache.carbondata.common.CarbonIterator;
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.carbon.datastore.DataRefNode;
 import org.apache.carbondata.core.carbon.querystatistics.QueryStatisticsModel;
 import org.apache.carbondata.core.datastorage.store.FileHolder;
@@ -43,6 +45,9 @@ import 
org.apache.carbondata.scan.scanner.impl.NonFilterScanner;
  */
 public abstract class AbstractDataBlockIterator extends 
CarbonIterator> {
 
+  private static final LogService LOGGER =
+  
LogServiceFactory.getLogService(AbstractDataBlockIterator.class.getName());
+
   /**
* iterator which will be used to iterate over data blocks
*/
@@ -85,12 +90,15 @@ public abstract class AbstractDataBlockIterator extends 
CarbonIteratorhttp://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/7934d7b8/integration/spark2/src/main/scala/org/apache/spark/sql/execution/CarbonLateDecodeStrategy.scala
--
diff --git 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/CarbonLateDecodeStrategy.scala
 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/CarbonLateDecodeStrategy.scala
index ace92fc..2e6989d 100644
--- 
a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/CarbonLateDecodeStrategy.scala
+++ 
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/CarbonLateDecodeStrategy.scala
@@ -463,13 +463,13 @@ private[sql] class CarbonLateDecodeStrategy extends 
SparkStrategy {
   def supportBatchedDataSource(sqlContext: SQLContext, cols: Seq[Attribute]): 
Boolean = {
 val enableReader = {
   if 
(sqlContext.sparkSession.conf.contains(CarbonCommonConstants.ENABLE_VECTOR_READER))
 {
-
sqlContext.sparkSession.conf.get(CarbonCommonConstants.ENABLE_VECTOR_READER).toBoolean
+
sqlContext.sparkSession.conf.get(CarbonCommonConstants.ENABLE_VECTOR_READER)
   

[2/2] incubator-carbondata git commit: [CARBONDATA-628] Fixed measure selection with out table order gives wrong result with vectorized reader enabled This closes #525

2017-01-12 Thread jackylk
[CARBONDATA-628] Fixed measure selection with out table order gives wrong 
result with vectorized reader enabled This closes #525


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/e705aadd
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/e705aadd
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/e705aadd

Branch: refs/heads/master
Commit: e705aadd90dca851c113aff50686710e9fc5511d
Parents: fe36dea 7934d7b
Author: jackylk 
Authored: Thu Jan 12 20:17:39 2017 +0800
Committer: jackylk 
Committed: Thu Jan 12 20:17:39 2017 +0800

--
 .../impl/DictionaryBasedVectorResultCollector.java |  1 -
 .../scan/processor/AbstractDataBlockIterator.java  |  8 
 .../spark/sql/execution/CarbonLateDecodeStrategy.scala |  6 +++---
 .../carbondata/vectorreader/VectorReaderTestCase.scala | 13 -
 4 files changed, 23 insertions(+), 5 deletions(-)
--




[1/2] incubator-carbondata git commit: Fixed compaction with multiple blocklet issue

2017-01-13 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 6a2afc179 -> e547fd00a


Fixed compaction with multiple blocklet issue


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/822f23b8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/822f23b8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/822f23b8

Branch: refs/heads/master
Commit: 822f23b8046f692b80f29832a1c58c97acf3b43f
Parents: 6a2afc1
Author: kumarvishal 
Authored: Tue Jan 10 21:37:15 2017 +0800
Committer: jackylk 
Committed: Fri Jan 13 16:31:46 2017 +0800

--
 .../datacompaction/DataCompactionBlockletBoundryTest.scala  | 2 +-
 .../scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala | 5 -
 2 files changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/822f23b8/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBlockletBoundryTest.scala
--
diff --git 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBlockletBoundryTest.scala
 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBlockletBoundryTest.scala
index 8b8b3c5..149ab6c 100644
--- 
a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBlockletBoundryTest.scala
+++ 
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBlockletBoundryTest.scala
@@ -36,7 +36,7 @@ class DataCompactionBlockletBoundryTest extends QueryTest 
with BeforeAndAfterAll
   .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "mm/dd/")
 CarbonProperties.getInstance()
   .addProperty(CarbonCommonConstants.BLOCKLET_SIZE,
-"55")
+"120")
 sql(
   "CREATE TABLE IF NOT EXISTS blocklettest (country String, ID String, 
date Timestamp, name " +
 "String, " +

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/822f23b8/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
--
diff --git 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
index 09c35ae..4923753 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
@@ -318,7 +318,10 @@ class CarbonMergerRDD[K, V](
 
 // prepare the details required to extract the segment properties using 
last segment.
 if (null != carbonInputSplits && carbonInputSplits.nonEmpty) {
-  val carbonInputSplit = carbonInputSplits.last
+  // taking head as scala sequence is use and while adding it will add at 
first
+  // so as we need to update the update the key of older segments with 
latest keygenerator
+  // we need to take the top of the split
+  val carbonInputSplit = carbonInputSplits.head
   var dataFileFooter: DataFileFooter = null
 
   try {



[2/2] incubator-carbondata git commit: [CARBONDATA-621]Fixed compaction with multiple blocklet issue This closes #517

2017-01-13 Thread jackylk
[CARBONDATA-621]Fixed compaction with multiple blocklet issue This closes #517


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/e547fd00
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/e547fd00
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/e547fd00

Branch: refs/heads/master
Commit: e547fd00a5248892a595e7db31213a1ef52a12bb
Parents: 6a2afc1 822f23b
Author: jackylk 
Authored: Fri Jan 13 16:45:36 2017 +0800
Committer: jackylk 
Committed: Fri Jan 13 16:45:36 2017 +0800

--
 .../datacompaction/DataCompactionBlockletBoundryTest.scala  | 2 +-
 .../scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala | 5 -
 2 files changed, 5 insertions(+), 2 deletions(-)
--




[2/2] incubator-carbondata git commit: [CARBONDATA-630]Fixed select query with expression issue in Spark 2.1 cluster mode This closes #527

2017-01-13 Thread jackylk
[CARBONDATA-630]Fixed select query with expression issue in Spark 2.1 cluster 
mode This closes #527


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/8f6ae29c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/8f6ae29c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/8f6ae29c

Branch: refs/heads/master
Commit: 8f6ae29c81410e182fb7f5823d7e5dec286aa622
Parents: e547fd0 559cac4
Author: jackylk 
Authored: Fri Jan 13 17:02:56 2017 +0800
Committer: jackylk 
Committed: Fri Jan 13 17:02:56 2017 +0800

--
 .../main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--




[1/2] incubator-carbondata git commit: Fixed select query with expression issue

2017-01-13 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master e547fd00a -> 8f6ae29c8


Fixed select query with expression issue

Fixed comment


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/559cac4c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/559cac4c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/559cac4c

Branch: refs/heads/master
Commit: 559cac4cb7b116656dba85af49131e18ceacb80c
Parents: e547fd0
Author: ravipesala 
Authored: Thu Jan 12 20:25:25 2017 +0530
Committer: jackylk 
Committed: Fri Jan 13 17:02:25 2017 +0800

--
 .../main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/559cac4c/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
--
diff --git 
a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
 
b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
index fbcfbc8..10454d6 100644
--- 
a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
+++ 
b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
@@ -237,6 +237,8 @@ class CarbonDecoderRDD(
 output: Seq[Attribute])
 extends RDD[InternalRow](prev) {
 
+  private val storepath = CarbonEnv.get.carbonMetastore.storePath
+
   def canBeDecoded(attr: Attribute): Boolean = {
 profile match {
   case ip: IncludeProfile if ip.attributes.nonEmpty =>
@@ -302,7 +304,6 @@ class CarbonDecoderRDD(
   }
 
   override def compute(split: Partition, context: TaskContext): 
Iterator[InternalRow] = {
-  val storepath = CarbonEnv.get.carbonMetastore.storePath
 val absoluteTableIdentifiers = relations.map { relation =>
   val carbonTable = 
relation.carbonRelation.carbonRelation.metaData.carbonTable
   (carbonTable.getFactTableName, carbonTable.getAbsoluteTableIdentifier)



[1/2] incubator-carbondata git commit: Fixed carbondata file version issue

2017-01-13 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 8f6ae29c8 -> 32354b37b


Fixed carbondata file version issue


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/51425d49
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/51425d49
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/51425d49

Branch: refs/heads/master
Commit: 51425d49b2ba210b505ecc6fd69cf69e763409cd
Parents: 8f6ae29
Author: kumarvishal 
Authored: Mon Jan 9 01:47:19 2017 +0800
Committer: kumarvishal 
Committed: Fri Jan 13 17:06:41 2017 +0800

--
 .../java/org/apache/carbondata/core/util/CarbonProperties.java| 3 ++-
 .../apache/carbondata/carbon/datastore/BlockIndexStoreTest.java   | 2 +-
 2 files changed, 3 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/51425d49/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java 
b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
index 8a5a1c6..0763f1b 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
@@ -375,7 +375,8 @@ public final class CarbonProperties {
   return getDefaultFormatVersion();
 } else {
   try {
-return ColumnarFormatVersion.valueOf(versionStr);
+short version = Short.parseShort(versionStr);
+return ColumnarFormatVersion.valueOf(version);
   } catch (IllegalArgumentException e) {
 return getDefaultFormatVersion();
   }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/51425d49/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
--
diff --git 
a/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
 
b/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
index a468c81..b43b690 100644
--- 
a/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
+++ 
b/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
@@ -62,7 +62,7 @@ public class BlockIndexStoreTest extends TestCase {
   @BeforeClass public void setUp() {
property = 
CarbonProperties.getInstance().getProperty(CarbonCommonConstants.CARBON_DATA_FILE_VERSION);

-   
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_DATA_FILE_VERSION,
 "V1");
+   
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_DATA_FILE_VERSION,
 "1");
 StoreCreator.createCarbonStore();
 CarbonProperties.getInstance().
 addProperty(CarbonCommonConstants.CARBON_MAX_DRIVER_LRU_CACHE_SIZE, 
"10");



[2/2] incubator-carbondata git commit: [CARBONDATA-609]Fixed carbondata file version issue in carbon.properites This closes #507

2017-01-13 Thread jackylk
[CARBONDATA-609]Fixed carbondata file version issue in carbon.properites This 
closes #507


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/32354b37
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/32354b37
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/32354b37

Branch: refs/heads/master
Commit: 32354b37ba4bba0d197c1af831b29f9cc5734e87
Parents: 8f6ae29 51425d4
Author: jackylk 
Authored: Fri Jan 13 23:26:02 2017 +0800
Committer: jackylk 
Committed: Fri Jan 13 23:26:02 2017 +0800

--
 .../java/org/apache/carbondata/core/util/CarbonProperties.java| 3 ++-
 .../apache/carbondata/carbon/datastore/BlockIndexStoreTest.java   | 2 +-
 2 files changed, 3 insertions(+), 2 deletions(-)
--




[2/2] incubator-carbondata git commit: [CARBONDATA-636]Fixed testcase issues in spark 1.6 and 2.1 of no kettle. And also refactored of insert into flow in no kettle This closes #529

2017-01-13 Thread jackylk
[CARBONDATA-636]Fixed testcase issues in spark 1.6 and 2.1 of no kettle. And 
also refactored of insert into flow in no kettle This closes #529


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/6d29fa2f
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/6d29fa2f
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/6d29fa2f

Branch: refs/heads/master
Commit: 6d29fa2f004cfd721dff476c9a8218b2e34a9727
Parents: 32354b3 8100d94
Author: jackylk 
Authored: Fri Jan 13 23:52:37 2017 +0800
Committer: jackylk 
Committed: Fri Jan 13 23:52:37 2017 +0800

--
 .../core/constants/CarbonCommonConstants.java   | 10 +
 .../spark/rdd/CarbonDataLoadRDD.scala   | 18 
 .../spark/rdd/NewCarbonDataLoadRDD.scala| 30 -
 .../processing/csvreaderstep/CsvInput.java  | 11 ++---
 .../csvreaderstep/JavaRddIterator.java  | 32 --
 .../processing/csvreaderstep/RddInputUtils.java | 12 +++---
 .../processing/model/CarbonLoadModel.java   | 15 +++
 .../newflow/CarbonDataLoadConfiguration.java| 10 +
 .../newflow/DataLoadProcessBuilder.java |  1 +
 .../sort/impl/ParallelReadMergeSorterImpl.java  |  3 --
 .../newflow/steps/InputProcessorStepImpl.java   | 45 +++-
 .../sortandgroupby/sortdata/SortDataRows.java   |  7 ++-
 .../util/CarbonDataProcessorUtil.java   | 30 -
 13 files changed, 105 insertions(+), 119 deletions(-)
--




[1/2] incubator-carbondata git commit: Fixed testcase issues in spark 1.6 and 2.1 of no kettle. And also refactored insert into flow of no kettle

2017-01-13 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 32354b37b -> 6d29fa2f0


Fixed testcase issues in spark 1.6 and 2.1 of no kettle. And also refactored 
insert into flow of no kettle


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/8100d949
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/8100d949
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/8100d949

Branch: refs/heads/master
Commit: 8100d949e4264651b09b42a173aa17085369cd82
Parents: 32354b3
Author: ravipesala 
Authored: Fri Jan 13 00:16:33 2017 +0530
Committer: jackylk 
Committed: Fri Jan 13 23:52:01 2017 +0800

--
 .../core/constants/CarbonCommonConstants.java   | 10 +
 .../spark/rdd/CarbonDataLoadRDD.scala   | 18 
 .../spark/rdd/NewCarbonDataLoadRDD.scala| 30 -
 .../processing/csvreaderstep/CsvInput.java  | 11 ++---
 .../csvreaderstep/JavaRddIterator.java  | 32 --
 .../processing/csvreaderstep/RddInputUtils.java | 12 +++---
 .../processing/model/CarbonLoadModel.java   | 15 +++
 .../newflow/CarbonDataLoadConfiguration.java| 10 +
 .../newflow/DataLoadProcessBuilder.java |  1 +
 .../sort/impl/ParallelReadMergeSorterImpl.java  |  3 --
 .../newflow/steps/InputProcessorStepImpl.java   | 45 +++-
 .../sortandgroupby/sortdata/SortDataRows.java   |  7 ++-
 .../util/CarbonDataProcessorUtil.java   | 30 -
 13 files changed, 105 insertions(+), 119 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/8100d949/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
 
b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index 041f5ed..664720e 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -1115,6 +1115,16 @@ public final class CarbonCommonConstants {
*/
   public static final String USE_OFFHEAP_IN_QUERY_PROCSSING_DEFAULT = "true";
 
+  /**
+   * whether to prefetch data while loading.
+   */
+  public static final String USE_PREFETCH_WHILE_LOADING = 
"carbon.loading.prefetch";
+
+  /**
+   * default value for prefetch data while loading.
+   */
+  public static final String USE_PREFETCH_WHILE_LOADING_DEFAULT = "false";
+
   private CarbonCommonConstants() {
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/8100d949/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala
--
diff --git 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala
 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala
index 4392775..14a0930 100644
--- 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala
@@ -33,21 +33,21 @@ import org.apache.spark.sql.Row
 import org.apache.spark.sql.execution.command.ExecutionErrors
 import org.apache.spark.util.SparkUtil
 
+import org.apache.carbondata.common.CarbonIterator
 import org.apache.carbondata.common.logging.LogServiceFactory
 import org.apache.carbondata.common.logging.impl.StandardLogService
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.load.{BlockDetails, LoadMetadataDetails}
 import org.apache.carbondata.core.util.{CarbonProperties, 
CarbonTimeStatisticsFactory}
 import org.apache.carbondata.processing.constants.DataProcessorConstants
-import org.apache.carbondata.processing.csvreaderstep.{JavaRddIterator, 
RddInputUtils}
+import org.apache.carbondata.processing.csvreaderstep.RddInputUtils
 import org.apache.carbondata.processing.etl.DataLoadingException
 import org.apache.carbondata.processing.graphgenerator.GraphGenerator
 import org.apache.carbondata.processing.model.CarbonLoadModel
 import org.apache.carbondata.spark.DataLoadResult
-import org.apache.carbondata.spark.load.{_}
+import org.apache.carbondata.spark.load._
 import org.apache.carbondata.spark.splits.TableSplit
-import org.apache.carbondata.spark.util.CarbonQueryUtil
-import org.apache.carbondata.spark.util.CarbonScalaUtil
+import org.apache.carbondata.spark.util.{CarbonQueryUtil, CarbonScalaUtil}
 
 /**
  * Thi

[2/2] incubator-carbondata git commit: [CARBONDATA-633]Fixed offheap Query Crash issue This closes #533

2017-01-13 Thread jackylk
[CARBONDATA-633]Fixed offheap Query Crash issue  This closes #533


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/b5814653
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/b5814653
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/b5814653

Branch: refs/heads/master
Commit: b58146531f1941bf5fd22e2a5380ad82b138ce36
Parents: 6d29fa2 a45ace2
Author: jackylk 
Authored: Sat Jan 14 12:23:55 2017 +0800
Committer: jackylk 
Committed: Sat Jan 14 12:23:55 2017 +0800

--
 .../chunk/store/MeasureChunkStoreFactory.java  |  2 +-
 .../scan/executor/impl/AbstractQueryExecutor.java  | 10 ++
 .../scan/executor/impl/DetailQueryExecutor.java|  3 ++-
 .../executor/impl/VectorDetailQueryExecutor.java   |  3 ++-
 .../scan/processor/AbstractDataBlockIterator.java  | 10 --
 .../scan/processor/impl/DataBlockIteratorImpl.java |  6 --
 .../scan/result/AbstractScannedResult.java |  1 -
 .../AbstractDetailQueryResultIterator.java | 17 -
 .../scan/scanner/impl/FilterScanner.java   |  2 ++
 9 files changed, 41 insertions(+), 13 deletions(-)
--




[1/2] incubator-carbondata git commit: fixed offheap crash issue

2017-01-13 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 6d29fa2f0 -> b58146531


fixed offheap crash issue


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/a45ace20
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/a45ace20
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/a45ace20

Branch: refs/heads/master
Commit: a45ace20368f939980d64ffedd1b08774ce61a81
Parents: 6d29fa2
Author: kumarvishal 
Authored: Fri Jan 13 12:50:26 2017 +0800
Committer: jackylk 
Committed: Sat Jan 14 12:23:38 2017 +0800

--
 .../chunk/store/MeasureChunkStoreFactory.java  |  2 +-
 .../scan/executor/impl/AbstractQueryExecutor.java  | 10 ++
 .../scan/executor/impl/DetailQueryExecutor.java|  3 ++-
 .../executor/impl/VectorDetailQueryExecutor.java   |  3 ++-
 .../scan/processor/AbstractDataBlockIterator.java  | 10 --
 .../scan/processor/impl/DataBlockIteratorImpl.java |  6 --
 .../scan/result/AbstractScannedResult.java |  1 -
 .../AbstractDetailQueryResultIterator.java | 17 -
 .../scan/scanner/impl/FilterScanner.java   |  2 ++
 9 files changed, 41 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a45ace20/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/MeasureChunkStoreFactory.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/MeasureChunkStoreFactory.java
 
b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/MeasureChunkStoreFactory.java
index e7068b0..87ef7b5 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/MeasureChunkStoreFactory.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/store/MeasureChunkStoreFactory.java
@@ -62,7 +62,7 @@ public class MeasureChunkStoreFactory {
* @return measure chunk store
*/
   public MeasureDataChunkStore getMeasureDataChunkStore(DataType dataType, int 
numberOfRows) {
-if (isUnsafe) {
+if (!isUnsafe) {
   switch (dataType) {
 case DATA_BYTE:
   return new SafeByteMeasureChunkStore(numberOfRows);

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a45ace20/core/src/main/java/org/apache/carbondata/scan/executor/impl/AbstractQueryExecutor.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/scan/executor/impl/AbstractQueryExecutor.java
 
b/core/src/main/java/org/apache/carbondata/scan/executor/impl/AbstractQueryExecutor.java
index 9976db3..8c9daaf 100644
--- 
a/core/src/main/java/org/apache/carbondata/scan/executor/impl/AbstractQueryExecutor.java
+++ 
b/core/src/main/java/org/apache/carbondata/scan/executor/impl/AbstractQueryExecutor.java
@@ -28,6 +28,7 @@ import java.util.Set;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.carbondata.common.CarbonIterator;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.common.logging.impl.StandardLogService;
@@ -80,6 +81,12 @@ public abstract class AbstractQueryExecutor implements 
QueryExecutor {
*/
   protected QueryExecutorProperties queryProperties;
 
+  /**
+   * query result iterator which will execute the query
+   * and give the result
+   */
+  protected CarbonIterator queryIterator;
+
   public AbstractQueryExecutor() {
 queryProperties = new QueryExecutorProperties();
   }
@@ -470,6 +477,9 @@ public abstract class AbstractQueryExecutor implements 
QueryExecutor {
*/
   @Override public void finish() throws QueryExecutionException {
 CarbonUtil.clearBlockCache(queryProperties.dataBlocks);
+if(null != queryIterator) {
+  queryIterator.close();
+}
 if (null != queryProperties.executorService) {
   queryProperties.executorService.shutdown();
   try {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/a45ace20/core/src/main/java/org/apache/carbondata/scan/executor/impl/DetailQueryExecutor.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/scan/executor/impl/DetailQueryExecutor.java
 
b/core/src/main/java/org/apache/carbondata/scan/executor/impl/DetailQueryExecutor.java
index 92e7fc8..6a34e68 100644
--- 
a/core/src/main/java/org/apache/carbondata/scan/executor/impl/DetailQueryExecutor.java
+++ 
b/core/src/main/java/org/apache/carbondata/scan/executor/impl/DetailQueryExecutor.java
@@ -39,11 +39,12 @@ public cl

[1/2] incubator-carbondata git commit: added validation for table is not empty and don not contain any spaces

2017-01-13 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master b58146531 -> 392bc290e


added validation for table is not empty and don not contain any spaces

reformatted the code

replace validation with string utils

removed java style error

removed unnecesary code

removed java style err

removed unused import

changed the exception message


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/bb0f83dd
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/bb0f83dd
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/bb0f83dd

Branch: refs/heads/master
Commit: bb0f83dd3861f62090d4b67f654ec54f1b288645
Parents: b581465
Author: anubhav100 
Authored: Mon Jan 9 21:28:26 2017 +0530
Committer: jackylk 
Committed: Sat Jan 14 15:26:11 2017 +0800

--
 .../org/apache/spark/sql/CarbonSource.scala | 33 +++-
 1 file changed, 19 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/bb0f83dd/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
--
diff --git 
a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala 
b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
index d03c90c..65fc266 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonSource.scala
@@ -17,10 +17,9 @@
 
 package org.apache.spark.sql
 
-import java.io.File
-
 import scala.language.implicitConversions
 
+import org.apache.commons.lang.StringUtils
 import org.apache.hadoop.fs.Path
 import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
 import org.apache.spark.sql.execution.CarbonLateDecodeStrategy
@@ -32,38 +31,41 @@ import org.apache.spark.sql.types.{DecimalType, StructType}
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.spark.CarbonOption
+import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
 
 /**
  * Carbon relation provider compliant to data source api.
  * Creates carbon relations
  */
 class CarbonSource extends CreatableRelationProvider
-with SchemaRelationProvider with DataSourceRegister {
+  with SchemaRelationProvider with DataSourceRegister {
 
   override def shortName(): String = "carbondata"
 
   // called by any write operation like INSERT INTO DDL or DataFrame.write API
   override def createRelation(
-   sqlContext: SQLContext,
-   mode: SaveMode,
-   parameters: Map[String, String],
-   data: DataFrame): BaseRelation = {
+  sqlContext: SQLContext,
+  mode: SaveMode,
+  parameters: Map[String, String],
+  data: DataFrame): BaseRelation = {
 CarbonEnv.init(sqlContext.sparkSession)
 // User should not specify path since only one store is supported in 
carbon currently,
 // after we support multi-store, we can remove this limitation
 require(!parameters.contains("path"), "'path' should not be specified, " +
-"the path to store carbon file is the 'storePath' specified when 
creating CarbonContext")
+  "the path to store carbon file is 
the 'storePath' " +
+  "specified when creating 
CarbonContext")
 
 val options = new CarbonOption(parameters)
 val storePath = 
CarbonProperties.getInstance().getProperty(CarbonCommonConstants.STORE_LOCATION)
 val tablePath = new Path(storePath + "/" + options.dbName + "/" + 
options.tableName)
 val isExists = 
tablePath.getFileSystem(sqlContext.sparkContext.hadoopConfiguration)
-.exists(tablePath)
+  .exists(tablePath)
 val (doSave, doAppend) = (mode, isExists) match {
   case (SaveMode.ErrorIfExists, true) =>
 sys.error(s"ErrorIfExists mode, path $storePath already exists.")
   case (SaveMode.Overwrite, true) =>
-sqlContext.sparkSession.sql(s"DROP TABLE IF EXISTS 
${options.dbName}.${options.tableName}")
+sqlContext.sparkSession
+  .sql(s"DROP TABLE IF EXISTS ${ options.dbName }.${ options.tableName 
}")
 (true, false)
   case (SaveMode.Overwrite, false) | (SaveMode.ErrorIfExists, false) =>
 (true, false)
@@ -85,9 +87,9 @@ class CarbonSource extends CreatableRelationProvider
 
   // called by DDL operation with a USI

[2/2] incubator-carbondata git commit: [CARBONDATA-584]added validation for table is not empty This closes #511

2017-01-13 Thread jackylk
[CARBONDATA-584]added validation for table is not empty This closes #511


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/392bc290
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/392bc290
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/392bc290

Branch: refs/heads/master
Commit: 392bc290e944962d12d4f6f8b7fb96d3349a1846
Parents: b581465 bb0f83d
Author: jackylk 
Authored: Sat Jan 14 15:26:46 2017 +0800
Committer: jackylk 
Committed: Sat Jan 14 15:26:46 2017 +0800

--
 .../org/apache/spark/sql/CarbonSource.scala | 33 +++-
 1 file changed, 19 insertions(+), 14 deletions(-)
--




[16/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/scan/expression/conditional/GreaterThanEqualToExpression.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/scan/expression/conditional/GreaterThanEqualToExpression.java
 
b/core/src/main/java/org/apache/carbondata/scan/expression/conditional/GreaterThanEqualToExpression.java
deleted file mode 100644
index cf73e4b..000
--- 
a/core/src/main/java/org/apache/carbondata/scan/expression/conditional/GreaterThanEqualToExpression.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.scan.expression.conditional;
-
-import org.apache.carbondata.core.metadata.DataType;
-import org.apache.carbondata.scan.expression.Expression;
-import org.apache.carbondata.scan.expression.ExpressionResult;
-import 
org.apache.carbondata.scan.expression.exception.FilterIllegalMemberException;
-import 
org.apache.carbondata.scan.expression.exception.FilterUnsupportedException;
-import org.apache.carbondata.scan.filter.intf.ExpressionType;
-import org.apache.carbondata.scan.filter.intf.RowIntf;
-
-public class GreaterThanEqualToExpression extends BinaryConditionalExpression {
-  private static final long serialVersionUID = 4185317066280688984L;
-
-  public GreaterThanEqualToExpression(Expression left, Expression right) {
-super(left, right);
-  }
-
-  public ExpressionResult evaluate(RowIntf value)
-  throws FilterUnsupportedException, FilterIllegalMemberException {
-ExpressionResult elRes = left.evaluate(value);
-ExpressionResult erRes = right.evaluate(value);
-ExpressionResult exprResVal1 = elRes;
-if (elRes.isNull() || erRes.isNull()) {
-  elRes.set(DataType.BOOLEAN, false);
-  return elRes;
-}
-if (elRes.getDataType() != erRes.getDataType()) {
-  if (elRes.getDataType().getPrecedenceOrder() < 
erRes.getDataType().getPrecedenceOrder()) {
-exprResVal1 = erRes;
-  }
-
-}
-boolean result = false;
-switch (exprResVal1.getDataType()) {
-  case STRING:
-result = elRes.getString().compareTo(erRes.getString()) >= 0;
-break;
-  case SHORT:
-result = elRes.getShort() >= (erRes.getShort());
-break;
-  case INT:
-result = elRes.getInt() >= (erRes.getInt());
-break;
-  case DOUBLE:
-result = elRes.getDouble() >= (erRes.getDouble());
-break;
-  case DATE:
-  case TIMESTAMP:
-result = elRes.getTime() >= (erRes.getTime());
-break;
-  case LONG:
-result = elRes.getLong() >= (erRes.getLong());
-break;
-  case DECIMAL:
-result = elRes.getDecimal().compareTo(erRes.getDecimal()) >= 0;
-break;
-  default:
-throw new FilterUnsupportedException(
-"DataType: " + exprResVal1.getDataType() + " not supported for the 
filter expression");
-}
-exprResVal1.set(DataType.BOOLEAN, result);
-return exprResVal1;
-  }
-
-  @Override public ExpressionType getFilterExpressionType() {
-return ExpressionType.GREATERTHAN_EQUALTO;
-  }
-
-  @Override public String getString() {
-return "GreaterThanEqualTo(" + left.getString() + ',' + right.getString() 
+ ')';
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/scan/expression/conditional/GreaterThanExpression.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/scan/expression/conditional/GreaterThanExpression.java
 
b/core/src/main/java/org/apache/carbondata/scan/expression/conditional/GreaterThanExpression.java
deleted file mode 100644
index 583b334..000
--- 
a/core/src/main/java/org/apache/carbondata/scan/expression/conditional/GreaterThanExpression.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright owners

[19/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/update/data/BlockletLevelDeleteDeltaDataCache.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/update/data/BlockletLevelDeleteDeltaDataCache.java
 
b/core/src/main/java/org/apache/carbondata/core/update/data/BlockletLevelDeleteDeltaDataCache.java
new file mode 100644
index 000..5b453cc
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/update/data/BlockletLevelDeleteDeltaDataCache.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.update.data;
+
+import org.roaringbitmap.RoaringBitmap;
+
+/**
+ * This class maintains delete delta data cache of each blocklet along with 
the block timestamp
+ */
+public class BlockletLevelDeleteDeltaDataCache {
+  private RoaringBitmap deleteDelataDataCache;
+  private String timeStamp;
+
+  public BlockletLevelDeleteDeltaDataCache(int[] deleteDeltaFileData, String 
timeStamp) {
+deleteDelataDataCache = RoaringBitmap.bitmapOf(deleteDeltaFileData);
+this.timeStamp=timeStamp;
+  }
+
+  public boolean contains(int key) {
+return deleteDelataDataCache.contains(key);
+  }
+
+  public int getSize() {
+return deleteDelataDataCache.getCardinality();
+  }
+
+  public String getCacheTimeStamp() {
+return timeStamp;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/update/data/DeleteDeltaCacheLoaderIntf.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/update/data/DeleteDeltaCacheLoaderIntf.java
 
b/core/src/main/java/org/apache/carbondata/core/update/data/DeleteDeltaCacheLoaderIntf.java
new file mode 100644
index 000..813d843
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/update/data/DeleteDeltaCacheLoaderIntf.java
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.update.data;
+
+/**
+ * This interface holds all methods required to load delete delta file data to 
cache
+ */
+public interface DeleteDeltaCacheLoaderIntf {
+
+  void loadDeleteDeltaFileDataToCache();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/update/data/RowCountDetailsVO.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/update/data/RowCountDetailsVO.java
 
b/core/src/main/java/org/apache/carbondata/core/update/data/RowCountDetailsVO.java
new file mode 100644
index 000..15e33ad
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/update/data/RowCountDetailsVO.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable la

[13/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/scan/filter/resolver/ConditionalFilterResolverImpl.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/scan/filter/resolver/ConditionalFilterResolverImpl.java
 
b/core/src/main/java/org/apache/carbondata/scan/filter/resolver/ConditionalFilterResolverImpl.java
deleted file mode 100644
index 82c6b84..000
--- 
a/core/src/main/java/org/apache/carbondata/scan/filter/resolver/ConditionalFilterResolverImpl.java
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.scan.filter.resolver;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.SortedMap;
-
-import org.apache.carbondata.core.AbsoluteTableIdentifier;
-import org.apache.carbondata.core.datastore.block.SegmentProperties;
-import org.apache.carbondata.core.metadata.DataType;
-import org.apache.carbondata.core.metadata.Encoding;
-import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
-import org.apache.carbondata.scan.expression.ColumnExpression;
-import org.apache.carbondata.scan.expression.Expression;
-import 
org.apache.carbondata.scan.expression.conditional.BinaryConditionalExpression;
-import org.apache.carbondata.scan.expression.conditional.ConditionalExpression;
-import 
org.apache.carbondata.scan.expression.exception.FilterUnsupportedException;
-import org.apache.carbondata.scan.filter.FilterUtil;
-import org.apache.carbondata.scan.filter.intf.FilterExecuterType;
-import 
org.apache.carbondata.scan.filter.resolver.metadata.FilterResolverMetadata;
-import 
org.apache.carbondata.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
-import 
org.apache.carbondata.scan.filter.resolver.resolverinfo.visitor.FilterInfoTypeVisitorFactory;
-
-public class ConditionalFilterResolverImpl implements FilterResolverIntf {
-
-  private static final long serialVersionUID = 1838955268462201691L;
-  protected Expression exp;
-  protected boolean isExpressionResolve;
-  protected boolean isIncludeFilter;
-  private DimColumnResolvedFilterInfo dimColResolvedFilterInfo;
-
-  public ConditionalFilterResolverImpl(Expression exp, boolean 
isExpressionResolve,
-  boolean isIncludeFilter) {
-this.exp = exp;
-this.isExpressionResolve = isExpressionResolve;
-this.isIncludeFilter = isIncludeFilter;
-this.dimColResolvedFilterInfo = new DimColumnResolvedFilterInfo();
-  }
-
-  /**
-   * This API will resolve the filter expression and generates the
-   * dictionaries for executing/evaluating the filter expressions in the
-   * executer layer.
-   *
-   * @throws FilterUnsupportedException
-   */
-  @Override public void resolve(AbsoluteTableIdentifier 
absoluteTableIdentifier)
-  throws FilterUnsupportedException, IOException {
-FilterResolverMetadata metadata = new FilterResolverMetadata();
-metadata.setTableIdentifier(absoluteTableIdentifier);
-if ((!isExpressionResolve) && exp instanceof BinaryConditionalExpression) {
-  BinaryConditionalExpression binaryConditionalExpression = 
(BinaryConditionalExpression) exp;
-  Expression leftExp = binaryConditionalExpression.getLeft();
-  Expression rightExp = binaryConditionalExpression.getRight();
-  if (leftExp instanceof ColumnExpression) {
-ColumnExpression columnExpression = (ColumnExpression) leftExp;
-metadata.setColumnExpression(columnExpression);
-metadata.setExpression(rightExp);
-metadata.setIncludeFilter(isIncludeFilter);
-// If imei=imei comes in filter condition then we need to
-// skip processing of right expression.
-// This flow has reached here assuming that this is a single
-// column expression.
-// we need to check if the other expression contains column
-// expression or not in depth.
-CarbonDimension dimension = columnExpression.getDimension();
-if (FilterUtil.checkIfExpressionContainsColumn(rightExp)
-|| FilterUtil.isExpressionNeedsToResolved(rightExp, 
isIncludeFilter) &&
-dimens

[40/57] [abbrv] incubator-carbondata git commit: move load

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ae17158a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
--
diff --git 
a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
 
b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
index 0fecaaf..142796a 100644
--- 
a/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
+++ 
b/integration/spark/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
@@ -41,14 +41,14 @@ import 
org.apache.carbondata.common.logging.LogServiceFactory
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.block.{Distributable, 
TableBlockInfo}
 import org.apache.carbondata.core.dictionary.server.DictionaryServer
-import org.apache.carbondata.core.load.{BlockDetails, LoadMetadataDetails}
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable
 import org.apache.carbondata.core.metadata.{CarbonTableIdentifier, 
ColumnarFormatVersion}
 import org.apache.carbondata.core.path.CarbonStorePath
 import org.apache.carbondata.core.update.CarbonUpdateUtil
+import org.apache.carbondata.core.updatestatus.LoadMetadataDetails
 import org.apache.carbondata.core.updatestatus.locks.{CarbonLockFactory, 
ICarbonLock, LockUsage}
 import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.processing.csvreaderstep.RddInpututilsForUpdate
+import org.apache.carbondata.processing.csvreaderstep.{BlockDetails, 
RddInpututilsForUpdate}
 import org.apache.carbondata.processing.etl.DataLoadingException
 import org.apache.carbondata.processing.model.CarbonLoadModel
 import 
org.apache.carbondata.processing.newflow.exception.CarbonDataLoadingException

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ae17158a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
--
diff --git 
a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
 
b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
index 441500b..db67940 100644
--- 
a/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
+++ 
b/integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
@@ -40,14 +40,14 @@ import org.apache.spark.{SparkEnv, SparkException}
 import org.apache.carbondata.common.logging.LogServiceFactory
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.block.{Distributable, 
TableBlockInfo}
-import org.apache.carbondata.core.load.{BlockDetails, LoadMetadataDetails}
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable
 import org.apache.carbondata.core.metadata.{CarbonTableIdentifier, 
ColumnarFormatVersion}
 import org.apache.carbondata.core.path.CarbonStorePath
 import org.apache.carbondata.core.update.CarbonUpdateUtil
+import org.apache.carbondata.core.updatestatus.LoadMetadataDetails
 import org.apache.carbondata.core.updatestatus.locks.{CarbonLockFactory, 
ICarbonLock, LockUsage}
 import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.processing.csvreaderstep.RddInpututilsForUpdate
+import org.apache.carbondata.processing.csvreaderstep.{BlockDetails, 
RddInpututilsForUpdate}
 import org.apache.carbondata.processing.etl.DataLoadingException
 import org.apache.carbondata.processing.model.CarbonLoadModel
 import 
org.apache.carbondata.processing.newflow.exception.CarbonDataLoadingException

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ae17158a/processing/src/main/java/org/apache/carbondata/processing/csvreaderstep/BlockDetails.java
--
diff --git 
a/processing/src/main/java/org/apache/carbondata/processing/csvreaderstep/BlockDetails.java
 
b/processing/src/main/java/org/apache/carbondata/processing/csvreaderstep/BlockDetails.java
new file mode 100644
index 000..cc6ae5f
--- /dev/null
+++ 
b/processing/src/main/java/org/apache/carbondata/processing/csvreaderstep/BlockDetails.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or ag

[04/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/test/java/org/apache/carbondata/scan/expression/conditional/GreaterThanExpressionUnitTest.java
--
diff --git 
a/core/src/test/java/org/apache/carbondata/scan/expression/conditional/GreaterThanExpressionUnitTest.java
 
b/core/src/test/java/org/apache/carbondata/scan/expression/conditional/GreaterThanExpressionUnitTest.java
deleted file mode 100644
index e0805a6..000
--- 
a/core/src/test/java/org/apache/carbondata/scan/expression/conditional/GreaterThanExpressionUnitTest.java
+++ /dev/null
@@ -1,364 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.scan.expression.conditional;
-
-import java.math.BigDecimal;
-import java.sql.Timestamp;
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import org.apache.carbondata.core.metadata.DataType;
-import org.apache.carbondata.scan.expression.ColumnExpression;
-import org.apache.carbondata.scan.expression.ExpressionResult;
-import 
org.apache.carbondata.scan.expression.exception.FilterIllegalMemberException;
-import 
org.apache.carbondata.scan.expression.exception.FilterUnsupportedException;
-import org.apache.carbondata.scan.filter.intf.RowImpl;
-
-import mockit.Mock;
-import mockit.MockUp;
-import org.apache.spark.sql.types.Decimal;
-import org.junit.Test;
-
-import static junit.framework.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-public class GreaterThanExpressionUnitTest {
-  static GreaterThanExpression greaterThanExpression;
-
-  @Test public void testEvaluateForGreaterThanExpressionWithStringDataType()
-  throws FilterUnsupportedException, FilterIllegalMemberException {
-ColumnExpression left = new ColumnExpression("left_name", DataType.STRING);
-left.setColIndex(0);
-ColumnExpression right = new ColumnExpression("right_name", 
DataType.STRING);
-right.setColIndex(1);
-greaterThanExpression = new GreaterThanExpression(left, right);
-RowImpl value = new RowImpl();
-String[] row = { "string1" };
-String[] row1 = { "String's Value" };
-Object objectRow[] = { row, row1 };
-new MockUp() {
-  Boolean returnMockFlag = true;
-
-  @Mock public String getString() {
-if (returnMockFlag) {
-  returnMockFlag = false;
-  return "string1";
-
-} else {
-  return "String's Value";
-
-}
-
-  }
-};
-value.setValues(objectRow);
-ExpressionResult result = greaterThanExpression.evaluate(value);
-assertTrue(result.getBoolean());
-  }
-
-  @Test public void testEvaluateForGreaterThanExpressionWithShortDataType()
-  throws FilterUnsupportedException, FilterIllegalMemberException {
-ColumnExpression right = new ColumnExpression("id", DataType.SHORT);
-right.setColIndex(0);
-ColumnExpression left = new ColumnExpression("id", DataType.SHORT);
-left.setColIndex(1);
-greaterThanExpression = new GreaterThanExpression(left, right);
-RowImpl value = new RowImpl();
-Short[] row = { 170 };
-Short[] row1 = { 70 };
-Object objectRow[] = { row, row1 };
-value.setValues(objectRow);
-
-new MockUp() {
-  Boolean returnMockFlag = true;
-
-  @Mock public Short getShort() {
-if (returnMockFlag) {
-  returnMockFlag = false;
-  return 170;
-
-} else {
-  return 70;
-
-}
-
-  }
-};
-
-ExpressionResult result = greaterThanExpression.evaluate(value);
-assertTrue(result.getBoolean());
-
-  }
-
-  @Test public void testEvaluateForGreaterThanExpressionWithDoubleDataType()
-  throws FilterUnsupportedException, FilterIllegalMemberException {
-ColumnExpression right = new ColumnExpression("right_contact", 
DataType.DOUBLE);
-right.setColIndex(0);
-ColumnExpression left = new ColumnExpression("left_contact", 
DataType.DOUBLE);
-left.setColIndex(1);
-greaterThanExpression = new GreaterThanExpression(left, right);
-RowImpl value = new RowImpl();
-Double[] 

[28/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/FileFactory.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/FileFactory.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/FileFactory.java
deleted file mode 100644
index f0c424b..000
--- 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/FileFactory.java
+++ /dev/null
@@ -1,485 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.datastorage.store.impl;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.zip.GZIPInputStream;
-
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-import 
org.apache.carbondata.core.datastorage.store.filesystem.AlluxioCarbonFile;
-import org.apache.carbondata.core.datastorage.store.filesystem.CarbonFile;
-import org.apache.carbondata.core.datastorage.store.filesystem.HDFSCarbonFile;
-import org.apache.carbondata.core.datastorage.store.filesystem.LocalCarbonFile;
-import 
org.apache.carbondata.core.datastorage.store.filesystem.ViewFSCarbonFile;
-import org.apache.carbondata.core.util.CarbonUtil;
-
-import 
org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.compress.BZip2Codec;
-import org.apache.hadoop.io.compress.GzipCodec;
-
-public final class FileFactory {
-  private static Configuration configuration = null;
-
-  static {
-configuration = new Configuration();
-configuration.addResource(new Path("../core-default.xml"));
-  }
-
-  private FileFactory() {
-
-  }
-
-  public static Configuration getConfiguration() {
-return configuration;
-  }
-
-  public static FileHolder getFileHolder(FileType fileType) {
-switch (fileType) {
-  case LOCAL:
-return new FileHolderImpl();
-  case HDFS:
-  case ALLUXIO:
-  case VIEWFS:
-return new DFSFileHolderImpl();
-  default:
-return new FileHolderImpl();
-}
-  }
-
-  public static FileType getFileType(String path) {
-if (path.startsWith(CarbonUtil.HDFS_PREFIX)) {
-  return FileType.HDFS;
-}
-else if (path.startsWith(CarbonUtil.ALLUXIO_PREFIX)) {
-  return FileType.ALLUXIO;
-}
-else if (path.startsWith(CarbonUtil.VIEWFS_PREFIX)) {
-  return FileType.VIEWFS;
-}
-return FileType.LOCAL;
-  }
-
-  public static CarbonFile getCarbonFile(String path, FileType fileType) {
-switch (fileType) {
-  case LOCAL:
-return new LocalCarbonFile(path);
-  case HDFS:
-return new HDFSCarbonFile(path);
-  case ALLUXIO:
-return new AlluxioCarbonFile(path);
-  case VIEWFS:
-return new ViewFSCarbonFile(path);
-  default:
-return new LocalCarbonFile(path);
-}
-  }
-
-  public static DataInputStream getDataInputStream(String path, FileType 
fileType)
-  throws IOException {
-return getDataInputStream(path, fileType, -1);
-  }
-
-  public static DataInputStream getDataInputStream(String path, FileType 
fileType, int bufferSize)
-  throws IOException {
-path = path.replace("\\", "/");
-boolean gzip = path.endsWith(".gz");
-boolean bzip2 = path.endsWith(".bz2");
-InputStream stream;
-switch (fileType) {
-  case LOCAL:
-if (gzip) {
-  stream = new GZIPInputStream(new FileInputStream(path));
-} else if (bzip2) {
-  stream = new BZip2CompressorInputStream(new FileInputStream(path));
-} else {
-  stream = new FileInputStream(path);
-}
- 

[02/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/RowResultMerger.java
--
diff --git 
a/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/RowResultMerger.java
 
b/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/RowResultMerger.java
index 2bbc72d..825c98d 100644
--- 
a/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/RowResultMerger.java
+++ 
b/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/RowResultMerger.java
@@ -46,8 +46,8 @@ import 
org.apache.carbondata.processing.store.CarbonFactDataHandlerColumnar;
 import org.apache.carbondata.processing.store.CarbonFactDataHandlerModel;
 import org.apache.carbondata.processing.store.CarbonFactHandler;
 import 
org.apache.carbondata.processing.store.writer.exception.CarbonDataWriterException;
-import org.apache.carbondata.scan.result.iterator.RawResultIterator;
-import org.apache.carbondata.scan.wrappers.ByteArrayWrapper;
+import org.apache.carbondata.core.scan.result.iterator.RawResultIterator;
+import org.apache.carbondata.core.scan.wrappers.ByteArrayWrapper;
 
 /**
  * This is the Merger class responsible for the merging of the segments.

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/TupleConversionAdapter.java
--
diff --git 
a/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/TupleConversionAdapter.java
 
b/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/TupleConversionAdapter.java
index 2a48bab..497605c 100644
--- 
a/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/TupleConversionAdapter.java
+++ 
b/integration/spark-common/src/main/java/org/apache/carbondata/spark/merger/TupleConversionAdapter.java
@@ -23,7 +23,7 @@ import java.util.List;
 
 import org.apache.carbondata.core.datastore.block.SegmentProperties;
 import org.apache.carbondata.processing.util.RemoveDictionaryUtil;
-import org.apache.carbondata.scan.wrappers.ByteArrayWrapper;
+import org.apache.carbondata.core.scan.wrappers.ByteArrayWrapper;
 
 /**
  * This class will be used to convert the Result into the format used in data 
writer.

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/integration/spark-common/src/main/java/org/apache/carbondata/spark/partition/api/impl/QueryPartitionHelper.java
--
diff --git 
a/integration/spark-common/src/main/java/org/apache/carbondata/spark/partition/api/impl/QueryPartitionHelper.java
 
b/integration/spark-common/src/main/java/org/apache/carbondata/spark/partition/api/impl/QueryPartitionHelper.java
index a864ebf..dd8556c 100644
--- 
a/integration/spark-common/src/main/java/org/apache/carbondata/spark/partition/api/impl/QueryPartitionHelper.java
+++ 
b/integration/spark-common/src/main/java/org/apache/carbondata/spark/partition/api/impl/QueryPartitionHelper.java
@@ -24,7 +24,7 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.scan.model.CarbonQueryPlan;
+import org.apache.carbondata.core.scan.model.CarbonQueryPlan;
 import org.apache.carbondata.spark.partition.api.DataPartitioner;
 import org.apache.carbondata.spark.partition.api.Partition;
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/integration/spark-common/src/main/java/org/apache/carbondata/spark/util/CarbonQueryUtil.java
--
diff --git 
a/integration/spark-common/src/main/java/org/apache/carbondata/spark/util/CarbonQueryUtil.java
 
b/integration/spark-common/src/main/java/org/apache/carbondata/spark/util/CarbonQueryUtil.java
index 94e475e..ffce6cb 100644
--- 
a/integration/spark-common/src/main/java/org/apache/carbondata/spark/util/CarbonQueryUtil.java
+++ 
b/integration/spark-common/src/main/java/org/apache/carbondata/spark/util/CarbonQueryUtil.java
@@ -27,7 +27,7 @@ import java.util.Map;
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.load.LoadMetadataDetails;
-import org.apache.carbondata.scan.model.CarbonQueryPlan;
+import org.apache.carbondata.core.scan.model.CarbonQueryPlan;
 import org.apache.carbondata.spark.partition.api.Partition;
 import org.apache.carbondata.spark.partition.api.impl.DefaultLoadBalancer;
 import org.apache.carbondata.spark.partition.api.impl.PartitionMultiFileImpl;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/integration/spark-common/src/main/scala/org/apache/carbondata/spark/CarbonFilters.scala
-

[08/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/test/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryTypeTest.java
--
diff --git 
a/core/src/test/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryTypeTest.java
 
b/core/src/test/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryTypeTest.java
new file mode 100644
index 000..b8f484f
--- /dev/null
+++ 
b/core/src/test/java/org/apache/carbondata/core/scan/complextypes/PrimitiveQueryTypeTest.java
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.core.scan.complextypes;
+
+import java.nio.ByteBuffer;
+
+import org.apache.carbondata.core.cache.dictionary.ColumnDictionaryInfo;
+import org.apache.carbondata.core.cache.dictionary.Dictionary;
+import org.apache.carbondata.core.cache.dictionary.ForwardDictionary;
+import org.apache.carbondata.core.metadata.DataType;
+import 
org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
+import 
org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
+import org.apache.carbondata.core.keygenerator.mdkey.Bits;
+import org.apache.carbondata.core.util.DataTypeUtil;
+
+import mockit.Mock;
+import mockit.MockUp;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class PrimitiveQueryTypeTest {
+  private static PrimitiveQueryType primitiveQueryType, 
primitiveQueryTypeForInt,
+  primitiveQueryTypeForLong, primitiveQueryTypeForDouble, 
primitiveQueryTypeForBoolean,
+  primitiveQueryTypeForTimeStamp, 
primitiveQueryTypeForTimeStampForIsDictionaryFalse;
+  private static Dictionary dictionary;
+  private boolean isDirectDictionary = false;
+
+  @BeforeClass public static void setUp() {
+String name = "test";
+String parentName = "testParent";
+int blockIndex = 1;
+int keySize = 1;
+boolean isDirectDictionary = true;
+primitiveQueryType =
+new PrimitiveQueryType(name, parentName, blockIndex, DataType.STRING, 
keySize, dictionary,
+isDirectDictionary);
+primitiveQueryTypeForInt =
+new PrimitiveQueryType(name, parentName, blockIndex, DataType.INT, 
keySize, dictionary,
+isDirectDictionary);
+primitiveQueryTypeForDouble =
+new PrimitiveQueryType(name, parentName, blockIndex, DataType.DOUBLE, 
keySize, dictionary,
+isDirectDictionary);
+primitiveQueryTypeForLong =
+new PrimitiveQueryType(name, parentName, blockIndex, DataType.LONG, 
keySize, dictionary,
+isDirectDictionary);
+primitiveQueryTypeForBoolean =
+new PrimitiveQueryType(name, parentName, blockIndex, DataType.BOOLEAN, 
keySize, dictionary,
+isDirectDictionary);
+primitiveQueryTypeForTimeStamp =
+new PrimitiveQueryType(name, parentName, blockIndex, 
DataType.TIMESTAMP, keySize,
+dictionary, isDirectDictionary);
+ColumnDictionaryInfo columnDictionaryInfo = new 
ColumnDictionaryInfo(DataType.STRING);
+ForwardDictionary forwardDictionary = new 
ForwardDictionary(columnDictionaryInfo);
+primitiveQueryTypeForTimeStampForIsDictionaryFalse =
+new PrimitiveQueryType(name, parentName, blockIndex, 
DataType.TIMESTAMP, keySize,
+forwardDictionary, false);
+
+  }
+
+  @Test public void testGetDataBasedOnDataTypeFromSurrogates() {
+ByteBuffer surrogateData = ByteBuffer.allocate(10);
+surrogateData.put(3, (byte) 1);
+new MockUp() {
+  @Mock public long[] getKeyArray(byte[] key, int offset) {
+return new long[] { 1313045L };
+  }
+};
+Object expectedValue = 131304300L;
+
+Object actualValue =
+
primitiveQueryTypeForTimeStamp.getDataBasedOnDataTypeFromSurrogates(surrogateData);
+assertEquals(expectedValue, actualValue);
+  }
+
+  @Test public void 
testGetDataBasedOnDataTypeFromSurrogatesWhenIsDictionaryFalse() {
+ByteBuffer surrogateData = ByteBuffer.allocate(10);
+surrogateData.put(3, (byte) 1);
+new MockUp() {
+  @Mock public String getDict

[07/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanExpressionUnitTest.java
--
diff --git 
a/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanExpressionUnitTest.java
 
b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanExpressionUnitTest.java
new file mode 100644
index 000..bfc4f9a
--- /dev/null
+++ 
b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanExpressionUnitTest.java
@@ -0,0 +1,364 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.core.scan.expression.conditional;
+
+import java.math.BigDecimal;
+import java.sql.Timestamp;
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.apache.carbondata.core.metadata.DataType;
+import org.apache.carbondata.core.scan.expression.ColumnExpression;
+import org.apache.carbondata.core.scan.expression.ExpressionResult;
+import 
org.apache.carbondata.core.scan.expression.exception.FilterIllegalMemberException;
+import 
org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedException;
+import org.apache.carbondata.core.scan.filter.intf.RowImpl;
+
+import mockit.Mock;
+import mockit.MockUp;
+import org.apache.spark.sql.types.Decimal;
+import org.junit.Test;
+
+import static junit.framework.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+public class GreaterThanExpressionUnitTest {
+  static GreaterThanExpression greaterThanExpression;
+
+  @Test public void testEvaluateForGreaterThanExpressionWithStringDataType()
+  throws FilterUnsupportedException, FilterIllegalMemberException {
+ColumnExpression left = new ColumnExpression("left_name", DataType.STRING);
+left.setColIndex(0);
+ColumnExpression right = new ColumnExpression("right_name", 
DataType.STRING);
+right.setColIndex(1);
+greaterThanExpression = new GreaterThanExpression(left, right);
+RowImpl value = new RowImpl();
+String[] row = { "string1" };
+String[] row1 = { "String's Value" };
+Object objectRow[] = { row, row1 };
+new MockUp() {
+  Boolean returnMockFlag = true;
+
+  @Mock public String getString() {
+if (returnMockFlag) {
+  returnMockFlag = false;
+  return "string1";
+
+} else {
+  return "String's Value";
+
+}
+
+  }
+};
+value.setValues(objectRow);
+ExpressionResult result = greaterThanExpression.evaluate(value);
+assertTrue(result.getBoolean());
+  }
+
+  @Test public void testEvaluateForGreaterThanExpressionWithShortDataType()
+  throws FilterUnsupportedException, FilterIllegalMemberException {
+ColumnExpression right = new ColumnExpression("id", DataType.SHORT);
+right.setColIndex(0);
+ColumnExpression left = new ColumnExpression("id", DataType.SHORT);
+left.setColIndex(1);
+greaterThanExpression = new GreaterThanExpression(left, right);
+RowImpl value = new RowImpl();
+Short[] row = { 170 };
+Short[] row1 = { 70 };
+Object objectRow[] = { row, row1 };
+value.setValues(objectRow);
+
+new MockUp() {
+  Boolean returnMockFlag = true;
+
+  @Mock public Short getShort() {
+if (returnMockFlag) {
+  returnMockFlag = false;
+  return 170;
+
+} else {
+  return 70;
+
+}
+
+  }
+};
+
+ExpressionResult result = greaterThanExpression.evaluate(value);
+assertTrue(result.getBoolean());
+
+  }
+
+  @Test public void testEvaluateForGreaterThanExpressionWithDoubleDataType()
+  throws FilterUnsupportedException, FilterIllegalMemberException {
+ColumnExpression right = new ColumnExpression("right_contact", 
DataType.DOUBLE);
+right.setColIndex(0);
+ColumnExpression left = new ColumnExpression("left_contact", 
DataType.DOUBLE);
+left.setColIndex(1);
+greaterThanExpression = new GreaterThanExpression(left, right);
+   

[24/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
new file mode 100644
index 000..0b82b53
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/filter/FilterExpressionProcessor.java
@@ -0,0 +1,338 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.core.scan.filter;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.BitSet;
+import java.util.List;
+
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.core.AbsoluteTableIdentifier;
+import org.apache.carbondata.core.datastore.DataRefNode;
+import org.apache.carbondata.core.datastore.DataRefNodeFinder;
+import org.apache.carbondata.core.datastore.IndexKey;
+import org.apache.carbondata.core.datastore.block.AbstractIndex;
+import org.apache.carbondata.core.datastore.impl.btree.BTreeDataRefNodeFinder;
+import org.apache.carbondata.core.metadata.DataType;
+import org.apache.carbondata.core.metadata.Encoding;
+import org.apache.carbondata.core.keygenerator.KeyGenException;
+import org.apache.carbondata.core.scan.expression.BinaryExpression;
+import org.apache.carbondata.core.scan.expression.Expression;
+import 
org.apache.carbondata.core.scan.expression.conditional.BinaryConditionalExpression;
+import 
org.apache.carbondata.core.scan.expression.conditional.ConditionalExpression;
+import 
org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedException;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.filter.intf.ExpressionType;
+import 
org.apache.carbondata.core.scan.filter.resolver.ConditionalFilterResolverImpl;
+import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
+import 
org.apache.carbondata.core.scan.filter.resolver.LogicalFilterResolverImpl;
+import 
org.apache.carbondata.core.scan.filter.resolver.RowLevelFilterResolverImpl;
+import 
org.apache.carbondata.core.scan.filter.resolver.RowLevelRangeFilterResolverImpl;
+
+public class FilterExpressionProcessor implements FilterProcessor {
+
+  private static final LogService LOGGER =
+  
LogServiceFactory.getLogService(FilterExpressionProcessor.class.getName());
+
+  /**
+   * Implementation will provide the resolved form of filters based on the
+   * filter expression tree which is been passed in Expression instance.
+   *
+   * @param expressionTree  , filter expression tree
+   * @param tableIdentifier ,contains carbon store informations
+   * @return a filter resolver tree
+   */
+  public FilterResolverIntf getFilterResolver(Expression expressionTree,
+  AbsoluteTableIdentifier tableIdentifier) throws 
FilterUnsupportedException, IOException {
+if (null != expressionTree && null != tableIdentifier) {
+  return getFilterResolvertree(expressionTree, tableIdentifier);
+}
+return null;
+  }
+
+  /**
+   * This API will scan the Segment level all btrees and selects the required
+   * block reference  nodes inorder to push the same to executer for applying 
filters
+   * on the respective data reference node.
+   * Following Algorithm is followed in below API
+   * Step:1 Get the start end key based on the filter tree resolver information
+   * Step:2 Prepare the IndexKeys inorder to scan the tree and get the start 
and end reference
+   * node(block)
+   * Step:3 Once data reference node ranges retrieved traverse the node within 
this range
+   * and select the node based on the block min and max value and the filter 
value.
+   * Step:4 The selected blocks will be send to executers for applying the 
filters with the help
+   * of Filter executers.
+   *
+   */
+  public List getFilterredBlocks(DataRefNode btreeNode,
+  FilterResolverIntf filterResolver, AbstractIndex tableS

[50/57] [abbrv] incubator-carbondata git commit: fix style

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/48316190/core/src/main/java/org/apache/carbondata/core/datastorage/impl/data/compressed/AbstractHeavyCompressedDoubleArrayDataStore.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/impl/data/compressed/AbstractHeavyCompressedDoubleArrayDataStore.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/impl/data/compressed/AbstractHeavyCompressedDoubleArrayDataStore.java
deleted file mode 100644
index e47cf42..000
--- 
a/core/src/main/java/org/apache/carbondata/core/datastorage/impl/data/compressed/AbstractHeavyCompressedDoubleArrayDataStore.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.datastorage.impl.data.compressed;
-
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.NodeMeasureDataStore;
-import 
org.apache.carbondata.core.datastorage.compression.ValueCompressionHolder;
-import org.apache.carbondata.core.datastorage.compression.WriterCompressModel;
-import org.apache.carbondata.core.datastorage.dataholder.CarbonWriteDataHolder;
-import org.apache.carbondata.core.util.ValueCompressionUtil;
-
-public abstract class AbstractHeavyCompressedDoubleArrayDataStore
-implements NodeMeasureDataStore //NodeMeasureDataStore
-{
-
-  private LogService LOGGER =
-  
LogServiceFactory.getLogService(AbstractHeavyCompressedDoubleArrayDataStore.class.getName());
-
-  /**
-   * values.
-   */
-  protected ValueCompressionHolder[] values;
-
-  /**
-   * compressionModel.
-   */
-  protected WriterCompressModel compressionModel;
-
-  /**
-   * type
-   */
-  private char[] type;
-
-  /**
-   * AbstractHeavyCompressedDoubleArrayDataStore constructor.
-   *
-   * @param compressionModel
-   */
-  public AbstractHeavyCompressedDoubleArrayDataStore(WriterCompressModel 
compressionModel) {
-this.compressionModel = compressionModel;
-if (null != compressionModel) {
-  this.type = compressionModel.getType();
-  values =
-  new 
ValueCompressionHolder[compressionModel.getValueCompressionHolder().length];
-}
-  }
-
-  // this method first invokes encoding routine to encode the data chunk,
-  // followed by invoking compression routine for preparing the data chunk for 
writing.
-  @Override public byte[][] 
getWritableMeasureDataArray(CarbonWriteDataHolder[] dataHolder) {
-byte[][] returnValue = new byte[values.length][];
-for (int i = 0; i < compressionModel.getValueCompressionHolder().length; 
i++) {
-  values[i] = compressionModel.getValueCompressionHolder()[i];
-  if (type[i] != CarbonCommonConstants.BYTE_VALUE_MEASURE) {
-// first perform encoding of the data chunk
-values[i].setValue(
-
ValueCompressionUtil.getValueCompressor(compressionModel.getCompressionFinders()[i])
-
.getCompressedValues(compressionModel.getCompressionFinders()[i], dataHolder[i],
-compressionModel.getMaxValue()[i],
-compressionModel.getMantissa()[i]));
-  } else {
-values[i].setValue(dataHolder[i].getWritableByteArrayValues());
-  }
-  values[i].compress();
-  returnValue[i] = values[i].getCompressedData();
-}
-
-return returnValue;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/48316190/core/src/main/java/org/apache/carbondata/core/datastorage/impl/data/compressed/HeavyCompressedDoubleArrayDataInMemoryStore.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/impl/data/compressed/HeavyCompressedDoubleArrayDataInMemoryStore.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/impl/data/compressed/HeavyCompressedDoubleArrayDataInMemoryStore.java
deleted file mode 100644
index ea42beb..000
--- 
a/core/src/main/java/org/apache/carbondata/core/datastorage/impl/d

[48/57] [abbrv] incubator-carbondata git commit: fix style

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/48316190/core/src/main/java/org/apache/carbondata/core/datastore/compression/none/CompressionNoneLong.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/compression/none/CompressionNoneLong.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/compression/none/CompressionNoneLong.java
new file mode 100644
index 000..04bab54
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/compression/none/CompressionNoneLong.java
@@ -0,0 +1,99 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.datastore.compression.none;
+
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
+import 
org.apache.carbondata.core.datastore.chunk.store.MeasureChunkStoreFactory;
+import org.apache.carbondata.core.datastore.chunk.store.MeasureDataChunkStore;
+import org.apache.carbondata.core.datastore.compression.Compressor;
+import org.apache.carbondata.core.datastore.compression.CompressorFactory;
+import org.apache.carbondata.core.datastore.compression.ValueCompressionHolder;
+import org.apache.carbondata.core.util.ValueCompressionUtil;
+import org.apache.carbondata.core.util.ValueCompressionUtil.DataType;
+
+public class CompressionNoneLong extends ValueCompressionHolder {
+  /**
+   * Attribute for Carbon LOGGER
+   */
+  private static final LogService LOGGER =
+  LogServiceFactory.getLogService(CompressionNoneLong.class.getName());
+  /**
+   * longCompressor.
+   */
+  private static Compressor compressor = 
CompressorFactory.getInstance().getCompressor();
+  /**
+   * value.
+   */
+  protected long[] value;
+
+  private DataType actualDataType;
+
+  private MeasureDataChunkStore measureChunkStore;
+
+  public CompressionNoneLong(DataType actualDataType) {
+this.actualDataType = actualDataType;
+  }
+
+  @Override public void setValue(long[] value) { this.value = value;  }
+
+  @Override public long[] getValue() { return this.value; }
+
+  @Override public void compress() {
+compressedValue = super.compress(compressor, DataType.DATA_LONG, value);
+  }
+
+  @Override
+  public void uncompress(DataType dataType, byte[] data, int offset, int 
length,
+  int decimalPlaces, Object maxValueObject) {
+super.unCompress(compressor, dataType, data, offset, length);
+setUncompressedValues(value);
+  }
+
+  @Override public void setValueInBytes(byte[] byteValue) {
+ByteBuffer buffer = ByteBuffer.wrap(byteValue);
+this.value = ValueCompressionUtil.convertToLongArray(buffer, 
byteValue.length);
+  }
+
+  @Override public long getLongValue(int index) {
+return measureChunkStore.getLong(index);
+  }
+
+  @Override public double getDoubleValue(int index) {
+return measureChunkStore.getLong(index);
+  }
+
+  @Override public BigDecimal getBigDecimalValue(int index) {
+throw new UnsupportedOperationException("Get big decimal is not 
supported");
+  }
+
+  private void setUncompressedValues(long[] data) {
+this.measureChunkStore =
+  
MeasureChunkStoreFactory.INSTANCE.getMeasureDataChunkStore(DataType.DATA_LONG, 
data.length);
+this.measureChunkStore.putData(data);
+  }
+
+  @Override public void freeMemory() {
+this.measureChunkStore.freeMemory();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/48316190/core/src/main/java/org/apache/carbondata/core/datastore/compression/none/CompressionNoneShort.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/compression/none/CompressionNoneShort.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/compression/none/CompressionNoneShort.java
new file mode 100644
index 000..b84c562
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/compression/none/CompressionNoneShort.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contr

[52/57] [abbrv] incubator-carbondata git commit: fix style

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/48316190/core/src/main/java/org/apache/carbondata/core/datastorage/compression/WriterCompressModel.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/compression/WriterCompressModel.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/compression/WriterCompressModel.java
deleted file mode 100644
index d833c61..000
--- 
a/core/src/main/java/org/apache/carbondata/core/datastorage/compression/WriterCompressModel.java
+++ /dev/null
@@ -1,220 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.datastorage.compression;
-
-import org.apache.carbondata.core.util.CompressionFinder;
-import org.apache.carbondata.core.util.ValueCompressionUtil;
-
-public class WriterCompressModel {
-
-  /**
-   * DataType[]  variable.
-   */
-  private ValueCompressionUtil.DataType[] convertedDataType;
-  /**
-   * DataType[]  variable.
-   */
-  private ValueCompressionUtil.DataType[] actualDataType;
-
-  /**
-   * maxValue
-   */
-  private Object[] maxValue;
-  /**
-   * minValue.
-   */
-  private Object[] minValue;
-
-  /**
-   * uniqueValue
-   */
-  private Object[] uniqueValue;
-  /**
-   * mantissa.
-   */
-  private int[] mantissa;
-
-  /**
-   * aggType
-   */
-  private char[] type;
-
-  /**
-   * dataTypeSelected
-   */
-  private byte[] dataTypeSelected;
-  /**
-   * unCompressValues.
-   */
-  private ValueCompressionHolder[] valueHolder;
-
-  private CompressionFinder[] compressionFinders;
-
-  /**
-   * @return the convertedDataType
-   */
-  public ValueCompressionUtil.DataType[] getConvertedDataType() {
-return convertedDataType;
-  }
-
-  /**
-   * @param convertedDataType the convertedDataType to set
-   */
-  public void setConvertedDataType(ValueCompressionUtil.DataType[] 
convertedDataType) {
-this.convertedDataType = convertedDataType;
-  }
-
-  /**
-   * @return the actualDataType
-   */
-  public ValueCompressionUtil.DataType[] getActualDataType() {
-return actualDataType;
-  }
-
-  /**
-   * @param actualDataType
-   */
-  public void setActualDataType(ValueCompressionUtil.DataType[] 
actualDataType) {
-this.actualDataType = actualDataType;
-  }
-
-  /**
-   * @return the maxValue
-   */
-  public Object[] getMaxValue() {
-return maxValue;
-  }
-
-  /**
-   * @param maxValue the maxValue to set
-   */
-  public void setMaxValue(Object[] maxValue) {
-this.maxValue = maxValue;
-  }
-
-  /**
-   * @return the mantissa
-   */
-  public int[] getMantissa() {
-return mantissa;
-  }
-
-  /**
-   * @param mantissa the mantissa to set
-   */
-  public void setMantissa(int[] mantissa) {
-this.mantissa = mantissa;
-  }
-
-  /**
-   * getUnCompressValues().
-   *
-   * @return the unCompressValues
-   */
-  public ValueCompressionHolder[] getValueCompressionHolder() {
-return valueHolder;
-  }
-
-  /**
-   * @param valueHolder set the ValueCompressionHolder
-   */
-  public void setValueCompressionHolder(ValueCompressionHolder[] valueHolder) {
-this.valueHolder = valueHolder;
-  }
-
-  /**
-   * getMinValue
-   *
-   * @return
-   */
-  public Object[] getMinValue() {
-return minValue;
-  }
-
-  /**
-   * setMinValue.
-   *
-   * @param minValue
-   */
-  public void setMinValue(Object[] minValue) {
-this.minValue = minValue;
-  }
-
-  /**
-   * @return the aggType
-   */
-  public char[] getType() {
-return type;
-  }
-
-  /**
-   * @param type the type to set
-   */
-  public void setType(char[] type) {
-this.type = type;
-  }
-
-  /**
-   * @return the dataTypeSelected
-   */
-  public byte[] getDataTypeSelected() {
-return dataTypeSelected;
-  }
-
-  /**
-   * @param dataTypeSelected the dataTypeSelected to set
-   */
-  public void setDataTypeSelected(byte[] dataTypeSelected) {
-this.dataTypeSelected = dataTypeSelected;
-  }
-
-  /**
-   * getUniqueValue
-   *
-   * @return
-   */
-  public Object[] getUniqueValue() {
-return uniqueValue;
-  }
-
-  /**
-   * setUniqueValue
-   *
-   * @param uniqueValue
-   */
-  public void setUniqueValue(Object[] uniqueValue) {
-

[35/57] [abbrv] incubator-carbondata git commit: move identifier to metadata package

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/d0d25c9b/processing/src/test/java/org/apache/carbondata/lcm/locks/LocalFileLockTest.java
--
diff --git 
a/processing/src/test/java/org/apache/carbondata/lcm/locks/LocalFileLockTest.java
 
b/processing/src/test/java/org/apache/carbondata/lcm/locks/LocalFileLockTest.java
deleted file mode 100644
index 5f7190a..000
--- 
a/processing/src/test/java/org/apache/carbondata/lcm/locks/LocalFileLockTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.lcm.locks;
-
-import org.apache.carbondata.core.CarbonTableIdentifier;
-import org.apache.carbondata.locks.LocalFileLock;
-import org.apache.carbondata.locks.LockUsage;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.pentaho.di.core.util.Assert;
-
-/**
- * Test class to test the functionality of the local file locking.
- *
- * @author Administrator
- */
-public class LocalFileLockTest {
-
-  /**
-   * @throws java.lang.Exception
-   */
-  @Before public void setUp() throws Exception {
-  }
-
-  /**
-   * @throws java.lang.Exception
-   */
-  @After public void tearDown() throws Exception {
-  }
-
-  @Test public void testingLocalFileLockingByAcquiring2Locks() {
-
-   CarbonTableIdentifier carbonTableIdentifier = new 
CarbonTableIdentifier("databaseName", "tableName", "tableId");
-LocalFileLock localLock1 =
-new LocalFileLock(carbonTableIdentifier,
-LockUsage.METADATA_LOCK);
-Assert.assertTrue(localLock1.lock());
-LocalFileLock localLock2 =
-new LocalFileLock(carbonTableIdentifier,
-LockUsage.METADATA_LOCK);
-Assert.assertTrue(!localLock2.lock());
-
-Assert.assertTrue(localLock1.unlock());
-Assert.assertTrue(localLock2.lock());
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/d0d25c9b/processing/src/test/java/org/apache/carbondata/lcm/locks/ZooKeeperLockingTest.java
--
diff --git 
a/processing/src/test/java/org/apache/carbondata/lcm/locks/ZooKeeperLockingTest.java
 
b/processing/src/test/java/org/apache/carbondata/lcm/locks/ZooKeeperLockingTest.java
deleted file mode 100644
index 6c21543..000
--- 
a/processing/src/test/java/org/apache/carbondata/lcm/locks/ZooKeeperLockingTest.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.lcm.locks;
-
-import mockit.NonStrictExpectations;
-import org.apache.carbondata.core.CarbonTableIdentifier;
-import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.locks.LockUsage;
-import org.apache.carbondata.locks.ZooKeeperLocking;
-import org.apache.carbondata.locks.ZookeeperInit;
-import org.apache.zookeeper.server.ServerConfig;
-import org.apache.zookeeper.server.ZooKeeperServerMain;
-import org.apache.zookeeper.server.quorum.QuorumPeerConfig;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.ServerSocket;
-import java.util.Properties;
-
-/**
- * @author Administrator
- */
-public class ZooKeeperLockingTest {
-
-  int freePort;
-
-  /**
-   * @throws java.lang.Exception
-   */
-  @Before public voi

[06/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/test/java/org/apache/carbondata/core/scan/filter/FilterUtilTest.java
--
diff --git 
a/core/src/test/java/org/apache/carbondata/core/scan/filter/FilterUtilTest.java 
b/core/src/test/java/org/apache/carbondata/core/scan/filter/FilterUtilTest.java
new file mode 100644
index 000..1da4415
--- /dev/null
+++ 
b/core/src/test/java/org/apache/carbondata/core/scan/filter/FilterUtilTest.java
@@ -0,0 +1,392 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.core.scan.filter;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.UUID;
+
+import org.apache.carbondata.core.cache.dictionary.AbstractDictionaryCacheTest;
+import org.apache.carbondata.core.AbsoluteTableIdentifier;
+import org.apache.carbondata.core.CarbonTableIdentifier;
+import org.apache.carbondata.core.datastore.IndexKey;
+import org.apache.carbondata.core.datastore.block.SegmentProperties;
+import org.apache.carbondata.core.metadata.DataType;
+import org.apache.carbondata.core.metadata.Encoding;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.keygenerator.KeyGenException;
+import 
org.apache.carbondata.core.keygenerator.mdkey.MultiDimKeyVarLengthGenerator;
+import org.apache.carbondata.core.scan.expression.ColumnExpression;
+import org.apache.carbondata.core.scan.expression.Expression;
+import org.apache.carbondata.core.scan.expression.LiteralExpression;
+import org.apache.carbondata.core.scan.expression.conditional.ListExpression;
+import org.apache.carbondata.core.scan.filter.intf.RowImpl;
+
+import mockit.Mock;
+import mockit.MockUp;
+import org.junit.Before;
+import org.junit.Test;
+
+import static junit.framework.TestCase.assertFalse;
+import static junit.framework.TestCase.assertTrue;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+public class FilterUtilTest extends AbstractDictionaryCacheTest {
+
+  private ColumnSchema columnSchema;
+
+  @Before public void setUp() throws Exception {
+init();
+this.databaseName = props.getProperty("database", "testSchema");
+this.tableName = props.getProperty("tableName", "carbon");
+this.carbonStorePath = props.getProperty("storePath", "carbonStore");
+carbonTableIdentifier =
+new CarbonTableIdentifier(databaseName, tableName, 
UUID.randomUUID().toString());
+this.carbonStorePath = props.getProperty("storePath", "carbonStore");
+columnSchema = new ColumnSchema();
+columnSchema.setColumnar(true);
+columnSchema.setColumnName("IMEI");
+columnSchema.setColumnUniqueId(UUID.randomUUID().toString());
+columnSchema.setDataType(DataType.STRING);
+columnSchema.setDimensionColumn(true);
+  }
+
+  @Test public void testCheckIfLeftExpressionRequireEvaluation() {
+List children = new ArrayList<>();
+ListExpression expression = new ListExpression(children);
+boolean result = 
FilterUtil.checkIfLeftExpressionRequireEvaluation(expression);
+assertTrue(result);
+  }
+
+  @Test
+  public void 
testCheckIfLeftExpressionRequireEvaluationWithExpressionNotInstanceOfColumnExpression()
 {
+ColumnExpression expression = new ColumnExpression("test", 
DataType.STRING);
+boolean result = 
FilterUtil.checkIfLeftExpressionRequireEvaluation(expression);
+assertFalse(result);
+  }
+
+  @Test public void testNanSafeEqualsDoublesWithUnEqualValues() {
+Double d1 = new Double(60.67);
+Double d2 = new Double(60.69);
+boolean result = FilterUtil.nanSafeEqualsDoubles(d1, d2);
+assertFalse(result);
+  }
+
+  @Test public void testNanSafeEqualsDoublesWithEqualValues() {
+Double d1 = new Double(60.67);
+Double d2 = new Double(60.67);
+boolean result = FilterUtil.nanSafeEqualsDoubles(d1, d2);
+assertTrue(result);
+  }
+
+  @Test public vo

[49/57] [abbrv] incubator-carbondata git commit: fix style

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/48316190/core/src/main/java/org/apache/carbondata/core/datastore/compression/WriterCompressModel.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/compression/WriterCompressModel.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/compression/WriterCompressModel.java
new file mode 100644
index 000..368209f
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/compression/WriterCompressModel.java
@@ -0,0 +1,220 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.datastore.compression;
+
+import org.apache.carbondata.core.util.CompressionFinder;
+import org.apache.carbondata.core.util.ValueCompressionUtil;
+
+public class WriterCompressModel {
+
+  /**
+   * DataType[]  variable.
+   */
+  private ValueCompressionUtil.DataType[] convertedDataType;
+  /**
+   * DataType[]  variable.
+   */
+  private ValueCompressionUtil.DataType[] actualDataType;
+
+  /**
+   * maxValue
+   */
+  private Object[] maxValue;
+  /**
+   * minValue.
+   */
+  private Object[] minValue;
+
+  /**
+   * uniqueValue
+   */
+  private Object[] uniqueValue;
+  /**
+   * mantissa.
+   */
+  private int[] mantissa;
+
+  /**
+   * aggType
+   */
+  private char[] type;
+
+  /**
+   * dataTypeSelected
+   */
+  private byte[] dataTypeSelected;
+  /**
+   * unCompressValues.
+   */
+  private ValueCompressionHolder[] valueHolder;
+
+  private CompressionFinder[] compressionFinders;
+
+  /**
+   * @return the convertedDataType
+   */
+  public ValueCompressionUtil.DataType[] getConvertedDataType() {
+return convertedDataType;
+  }
+
+  /**
+   * @param convertedDataType the convertedDataType to set
+   */
+  public void setConvertedDataType(ValueCompressionUtil.DataType[] 
convertedDataType) {
+this.convertedDataType = convertedDataType;
+  }
+
+  /**
+   * @return the actualDataType
+   */
+  public ValueCompressionUtil.DataType[] getActualDataType() {
+return actualDataType;
+  }
+
+  /**
+   * @param actualDataType
+   */
+  public void setActualDataType(ValueCompressionUtil.DataType[] 
actualDataType) {
+this.actualDataType = actualDataType;
+  }
+
+  /**
+   * @return the maxValue
+   */
+  public Object[] getMaxValue() {
+return maxValue;
+  }
+
+  /**
+   * @param maxValue the maxValue to set
+   */
+  public void setMaxValue(Object[] maxValue) {
+this.maxValue = maxValue;
+  }
+
+  /**
+   * @return the mantissa
+   */
+  public int[] getMantissa() {
+return mantissa;
+  }
+
+  /**
+   * @param mantissa the mantissa to set
+   */
+  public void setMantissa(int[] mantissa) {
+this.mantissa = mantissa;
+  }
+
+  /**
+   * getUnCompressValues().
+   *
+   * @return the unCompressValues
+   */
+  public ValueCompressionHolder[] getValueCompressionHolder() {
+return valueHolder;
+  }
+
+  /**
+   * @param valueHolder set the ValueCompressionHolder
+   */
+  public void setValueCompressionHolder(ValueCompressionHolder[] valueHolder) {
+this.valueHolder = valueHolder;
+  }
+
+  /**
+   * getMinValue
+   *
+   * @return
+   */
+  public Object[] getMinValue() {
+return minValue;
+  }
+
+  /**
+   * setMinValue.
+   *
+   * @param minValue
+   */
+  public void setMinValue(Object[] minValue) {
+this.minValue = minValue;
+  }
+
+  /**
+   * @return the aggType
+   */
+  public char[] getType() {
+return type;
+  }
+
+  /**
+   * @param type the type to set
+   */
+  public void setType(char[] type) {
+this.type = type;
+  }
+
+  /**
+   * @return the dataTypeSelected
+   */
+  public byte[] getDataTypeSelected() {
+return dataTypeSelected;
+  }
+
+  /**
+   * @param dataTypeSelected the dataTypeSelected to set
+   */
+  public void setDataTypeSelected(byte[] dataTypeSelected) {
+this.dataTypeSelected = dataTypeSelected;
+  }
+
+  /**
+   * getUniqueValue
+   *
+   * @return
+   */
+  public Object[] getUniqueValue() {
+return uniqueValue;
+  }
+
+  /**
+   * setUniqueValue
+   *
+   * @param uniqueValue
+   */
+  public void setUniqueValue(Object[] uniqueValue) {
+this.uniqu

[01/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
Repository: incubator-carbondata
Updated Branches:
  refs/heads/movecorepackage [created] afa88ee65


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
--
diff --git 
a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
 
b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
index 1f7f443..326bde7 100644
--- 
a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
+++ 
b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
@@ -45,10 +45,10 @@ import 
org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
 import org.apache.carbondata.core.path.CarbonStorePath;
 import org.apache.carbondata.core.path.CarbonTablePath;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.store.filesystem.CarbonFile;
-import 
org.apache.carbondata.core.datastorage.store.filesystem.CarbonFileFilter;
-import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
-import org.apache.carbondata.core.datastorage.store.impl.FileFactory.FileType;
+import org.apache.carbondata.core.datastorage.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastorage.filesystem.CarbonFileFilter;
+import org.apache.carbondata.core.datastorage.impl.FileFactory;
+import org.apache.carbondata.core.datastorage.impl.FileFactory.FileType;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.core.util.DataTypeUtil;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/processing/src/test/java/org/apache/carbondata/test/util/StoreCreator.java
--
diff --git 
a/processing/src/test/java/org/apache/carbondata/test/util/StoreCreator.java 
b/processing/src/test/java/org/apache/carbondata/test/util/StoreCreator.java
index 3d77e11..3afc0ae 100644
--- a/processing/src/test/java/org/apache/carbondata/test/util/StoreCreator.java
+++ b/processing/src/test/java/org/apache/carbondata/test/util/StoreCreator.java
@@ -45,7 +45,7 @@ import 
org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.path.CarbonStorePath;
 import org.apache.carbondata.core.path.CarbonTablePath;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
+import org.apache.carbondata.core.datastorage.impl.FileFactory;
 import org.apache.carbondata.core.load.BlockDetails;
 import org.apache.carbondata.core.load.LoadMetadataDetails;
 import org.apache.carbondata.core.util.CarbonProperties;



[46/57] [abbrv] incubator-carbondata git commit: fix style

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/48316190/core/src/test/java/org/apache/carbondata/core/datastorage/filesystem/LocalCarbonFileTest.java
--
diff --git 
a/core/src/test/java/org/apache/carbondata/core/datastorage/filesystem/LocalCarbonFileTest.java
 
b/core/src/test/java/org/apache/carbondata/core/datastorage/filesystem/LocalCarbonFileTest.java
deleted file mode 100644
index 9d1ab42..000
--- 
a/core/src/test/java/org/apache/carbondata/core/datastorage/filesystem/LocalCarbonFileTest.java
+++ /dev/null
@@ -1,471 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.datastorage.filesystem;
-
-import mockit.Mock;
-import mockit.MockUp;
-
-import org.apache.carbondata.core.datastorage.impl.FileFactory;
-
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import sun.nio.ch.FileChannelImpl;
-
-import java.io.*;
-import java.nio.channels.ReadableByteChannel;
-import java.util.Objects;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-
-public class LocalCarbonFileTest {
-
-private static LocalCarbonFile localCarbonFile;
-private static File file;
-private static File dir;
-private static FileOutputStream oFile;
-
-@BeforeClass
-static public void setUp() {
-file = new File("Test.carbondata");
-dir = new File("Testdir.carbondata");
-if (!file.exists())
-try {
-file.createNewFile();
-dir.mkdir();
-} catch (IOException e) {
-e.printStackTrace();
-}
-try {
-oFile = new FileOutputStream(file, true);
-
-
-byte[] bytes = "core java api".getBytes();
-
-oFile.write(bytes);
-} catch (FileNotFoundException e) {
-e.printStackTrace();
-localCarbonFile = new LocalCarbonFile(file);
-} catch (IOException e) {
-e.printStackTrace();
-}
-}
-
-@AfterClass
-static public void cleanUp() {
-file.delete();
-dir.delete();
-
-}
-
-@Test
-public void 
testListFilesWithCarbonFileFilterAndWithOutOutDirectoryPermission() {
-CarbonFileFilter carbonFileFilter = new CarbonFileFilter() {
-@Override
-public boolean accept(CarbonFile file) {
-return false;
-}
-};
-new MockUp() {
-@Mock
-public boolean isDirectory() {
-return false;
-}
-
-
-};
-assertTrue(localCarbonFile.listFiles(carbonFileFilter) == null);
-}
-
-@Test
-public void testListFilesWithOutDirPermission() {
-localCarbonFile = new LocalCarbonFile(file);
-new MockUp() {
-@Mock
-public boolean isDirectory() {
-return false;
-}
-};
-assertTrue(localCarbonFile.listFiles() == null);
-}
-
-@Test
-public void testCreateNewFileForException() throws IOException {
-localCarbonFile = new LocalCarbonFile(new File(""));
-assertTrue(!localCarbonFile.createNewFile());
-}
-
-@Test
-public void testCheckIfFileExists() throws IOException {
-localCarbonFile = new LocalCarbonFile(new File(""));
-assertTrue(!localCarbonFile.exists());
-}
-
-@Test
-public void testRenameForce() {
-localCarbonFile = new LocalCarbonFile(file);
-assertTrue(localCarbonFile.renameForce("Testdb.carbon"));
-File file1 = new File("Testdb.carbon");
-if (file1.exists()) {
-file1.delete();
-}
-}
-
-@Test
-public void testRenameTo() {
-localCarbonFile = new LocalCarbonFile(file);
-assertTrue(!localCarbonFile.renameTo("Testdb.carbon"));
-}
-
-@Test
-public void testsetLastModifiedTime() {
-localCarbonFile = new LocalCarbonFile(file);
-assertTrue(!localCarbonFile.setLastModifiedTime(50L));
-}
-
-@Test
-public void testtruncate() {
-localCarbonFile = new LocalCa

[53/57] [abbrv] incubator-carbondata git commit: fix style

2017-01-13 Thread jackylk
fix style


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/48316190
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/48316190
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/48316190

Branch: refs/heads/movecorepackage
Commit: 4831619042155b9988f103f3bca0519229c3d8f3
Parents: ae17158
Author: jackylk 
Authored: Fri Jan 13 16:13:46 2017 +0800
Committer: jackylk 
Committed: Fri Jan 13 16:13:46 2017 +0800

--
 .../core/atomic/AtomicFileOperationsImpl.java   |   6 +-
 .../dictionary/AbstractDictionaryCache.java |   4 +-
 .../core/compression/BigDecimalCompressor.java  |   2 +-
 .../core/compression/BigIntCompressor.java  |   2 +-
 .../core/compression/DoubleCompressor.java  |   2 +-
 .../core/compression/ValueCompressor.java   |   2 +-
 .../carbondata/core/datastorage/FileHolder.java |  88 
 .../core/datastorage/MeasureDataWrapper.java|  30 --
 .../core/datastorage/NodeMeasureDataStore.java  |  34 --
 .../columnar/BlockIndexerStorageForInt.java | 226 -
 .../BlockIndexerStorageForNoInvertedIndex.java  | 159 --
 .../datastorage/columnar/ColumnGroupModel.java  | 116 -
 .../columnar/ColumnWithIntIndex.java|  82 
 .../columnar/ColumnWithIntIndexForHighCard.java |  49 --
 .../columnar/ColumnarKeyStoreDataHolder.java|  54 ---
 .../columnar/ColumnarKeyStoreMetadata.java  |  53 --
 .../core/datastorage/columnar/IndexStorage.java |  44 --
 .../datastorage/columnar/UnBlockIndexer.java|  78 ---
 .../datastorage/compression/Compressor.java |  60 ---
 .../compression/CompressorFactory.java  |  52 --
 .../compression/MeasureMetaDataModel.java   | 128 -
 .../compression/ReaderCompressModel.java|  61 ---
 .../compression/SnappyCompressor.java   | 222 -
 .../compression/ValueCompressionHolder.java | 115 -
 .../compression/WriterCompressModel.java| 220 -
 .../decimal/CompressionMaxMinByte.java  | 115 -
 .../decimal/CompressionMaxMinDefault.java   | 117 -
 .../decimal/CompressionMaxMinInt.java   | 113 -
 .../decimal/CompressionMaxMinLong.java  | 113 -
 .../decimal/CompressionMaxMinShort.java | 115 -
 .../nondecimal/CompressionNonDecimalByte.java   |  99 
 .../CompressionNonDecimalDefault.java   | 101 
 .../nondecimal/CompressionNonDecimalInt.java| 100 
 .../nondecimal/CompressionNonDecimalLong.java   | 101 
 .../CompressionNonDecimalMaxMinByte.java| 105 
 .../CompressionNonDecimalMaxMinDefault.java | 107 
 .../CompressionNonDecimalMaxMinInt.java | 105 
 .../CompressionNonDecimalMaxMinLong.java| 106 
 .../CompressionNonDecimalMaxMinShort.java   | 104 
 .../nondecimal/CompressionNonDecimalShort.java  | 100 
 .../compression/none/CompressionNoneByte.java   | 104 
 .../none/CompressionNoneDefault.java| 102 
 .../compression/none/CompressionNoneInt.java| 102 
 .../compression/none/CompressionNoneLong.java   |  99 
 .../compression/none/CompressionNoneShort.java  | 104 
 .../compression/type/CompressionBigDecimal.java | 146 --
 .../dataholder/CarbonReadDataHolder.java|  50 --
 .../dataholder/CarbonWriteDataHolder.java   | 280 ---
 .../filesystem/AbstractDFSCarbonFile.java   | 217 -
 .../filesystem/AlluxioCarbonFile.java   | 130 -
 .../core/datastorage/filesystem/CarbonFile.java |  66 ---
 .../filesystem/CarbonFileFilter.java|  24 -
 .../datastorage/filesystem/HDFSCarbonFile.java  | 128 -
 .../datastorage/filesystem/LocalCarbonFile.java | 229 -
 .../filesystem/ViewFSCarbonFile.java| 126 -
 .../impl/CompressedDataMeasureDataWrapper.java  |  37 --
 .../datastorage/impl/DFSFileHolderImpl.java | 134 -
 .../core/datastorage/impl/FileFactory.java  | 485 ---
 .../core/datastorage/impl/FileHolderImpl.java   | 198 
 ...ractHeavyCompressedDoubleArrayDataStore.java |  90 
 ...yCompressedDoubleArrayDataInMemoryStore.java |  30 --
 .../carbondata/core/datastore/DataRefNode.java  |   1 -
 .../carbondata/core/datastore/FileHolder.java   |  88 
 .../core/datastore/MeasureDataWrapper.java  |  30 ++
 .../core/datastore/NodeMeasureDataStore.java|  34 ++
 .../core/datastore/block/SegmentProperties.java |   5 +-
 .../core/datastore/block/TableBlockInfo.java|   2 +-
 .../datastore/chunk/MeasureColumnDataChunk.java |   2 +-
 .../impl/FixedLengthDimensionDataChunk.java |   3 +-
 .../impl/VariableLengthDimensionDataChunk.java  |   3 +-
 .../chunk/reader/CarbonDataReaderFactory.java   |  12 +-
 .../reader/DimensionColumnChunkReader.java  |   2 +-
 .../chunk/reader

[57/57] [abbrv] incubator-carbondata git commit: fix style

2017-01-13 Thread jackylk
fix style


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/afa88ee6
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/afa88ee6
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/afa88ee6

Branch: refs/heads/movecorepackage
Commit: afa88ee654fc76fcf59c70d51d378a0e3fa75e71
Parents: f75ddfb
Author: jackylk 
Authored: Sat Jan 14 15:56:10 2017 +0800
Committer: jackylk 
Committed: Sat Jan 14 15:56:10 2017 +0800

--
 .../core/locks/AbstractCarbonLock.java  |  77 
 .../core/locks/CarbonLockFactory.java   |  94 +
 .../carbondata/core/locks/CarbonLockUtil.java   |  64 ++
 .../carbondata/core/locks/HdfsFileLock.java | 129 
 .../carbondata/core/locks/ICarbonLock.java  |  40 
 .../carbondata/core/locks/LocalFileLock.java| 164 
 .../apache/carbondata/core/locks/LockUsage.java |  36 
 .../carbondata/core/locks/ZooKeeperLocking.java | 195 +++
 .../carbondata/core/locks/ZookeeperInit.java|  82 
 .../core/update/CarbonUpdateUtil.java   |   2 +-
 .../core/updatestatus/SegmentStatusManager.java |   8 +-
 .../SegmentUpdateStatusManager.java |   6 +-
 .../updatestatus/locks/AbstractCarbonLock.java  |  77 
 .../updatestatus/locks/CarbonLockFactory.java   |  94 -
 .../core/updatestatus/locks/CarbonLockUtil.java |  64 --
 .../core/updatestatus/locks/HdfsFileLock.java   | 129 
 .../core/updatestatus/locks/ICarbonLock.java|  40 
 .../core/updatestatus/locks/LocalFileLock.java  | 164 
 .../core/updatestatus/locks/LockUsage.java  |  36 
 .../updatestatus/locks/ZooKeeperLocking.java| 195 ---
 .../core/updatestatus/locks/ZookeeperInit.java  |  82 
 .../core/locks/LocalFileLockTest.java   |  64 ++
 .../core/locks/ZooKeeperLockingTest.java| 142 ++
 .../updatestatus/locks/LocalFileLockTest.java   |  64 --
 .../locks/ZooKeeperLockingTest.java | 142 --
 .../datacompaction/DataCompactionLockTest.scala |   2 +-
 .../dataretention/DataRetentionTestCase.scala   |   2 +-
 .../carbondata/spark/load/CarbonLoaderUtil.java |   2 +-
 .../spark/merger/CarbonDataMergerUtil.java  |   6 +-
 .../spark/rdd/CarbonGlobalDictionaryRDD.scala   |   2 +-
 .../spark/rdd/DataManagementFunc.scala  |   2 +-
 .../spark/rdd/CarbonDataRDDFactory.scala|   2 +-
 .../sql/execution/command/IUDCommands.scala |   2 +-
 .../execution/command/carbonTableSchema.scala   |   2 +-
 .../apache/spark/sql/hive/CarbonMetastore.scala |   2 +-
 .../spark/rdd/CarbonDataRDDFactory.scala|   8 +-
 .../spark/sql/CarbonDataFrameWriter.scala   |   2 +-
 .../scala/org/apache/spark/sql/CarbonScan.scala |   2 -
 .../org/apache/spark/sql/TableCreator.scala |   2 +-
 .../execution/CarbonLateDecodeStrategy.scala|   2 +-
 .../execution/command/carbonTableSchema.scala   |   4 +-
 .../apache/spark/sql/hive/CarbonMetastore.scala |   8 +-
 42 files changed, 1120 insertions(+), 1122 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/afa88ee6/core/src/main/java/org/apache/carbondata/core/locks/AbstractCarbonLock.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/locks/AbstractCarbonLock.java 
b/core/src/main/java/org/apache/carbondata/core/locks/AbstractCarbonLock.java
new file mode 100644
index 000..7ced442
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/locks/AbstractCarbonLock.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.locks;
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.util.CarbonProperties;
+
+/**
+ * This is the abstract class of the lock implementati

[51/57] [abbrv] incubator-carbondata git commit: fix style

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/48316190/core/src/main/java/org/apache/carbondata/core/datastorage/compression/none/CompressionNoneLong.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/compression/none/CompressionNoneLong.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/compression/none/CompressionNoneLong.java
deleted file mode 100644
index c22464d..000
--- 
a/core/src/main/java/org/apache/carbondata/core/datastorage/compression/none/CompressionNoneLong.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.datastorage.compression.none;
-
-import java.math.BigDecimal;
-import java.nio.ByteBuffer;
-
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.datastorage.compression.Compressor;
-import org.apache.carbondata.core.datastorage.compression.CompressorFactory;
-import 
org.apache.carbondata.core.datastorage.compression.ValueCompressionHolder;
-import 
org.apache.carbondata.core.datastore.chunk.store.MeasureChunkStoreFactory;
-import org.apache.carbondata.core.datastore.chunk.store.MeasureDataChunkStore;
-import org.apache.carbondata.core.util.ValueCompressionUtil;
-import org.apache.carbondata.core.util.ValueCompressionUtil.DataType;
-
-public class CompressionNoneLong extends ValueCompressionHolder {
-  /**
-   * Attribute for Carbon LOGGER
-   */
-  private static final LogService LOGGER =
-  LogServiceFactory.getLogService(CompressionNoneLong.class.getName());
-  /**
-   * longCompressor.
-   */
-  private static Compressor compressor = 
CompressorFactory.getInstance().getCompressor();
-  /**
-   * value.
-   */
-  protected long[] value;
-
-  private DataType actualDataType;
-
-  private MeasureDataChunkStore measureChunkStore;
-
-  public CompressionNoneLong(DataType actualDataType) {
-this.actualDataType = actualDataType;
-  }
-
-  @Override public void setValue(long[] value) { this.value = value;  }
-
-  @Override public long[] getValue() { return this.value; }
-
-  @Override public void compress() {
-compressedValue = super.compress(compressor, DataType.DATA_LONG, value);
-  }
-
-  @Override
-  public void uncompress(DataType dataType, byte[] data, int offset, int 
length,
-  int decimalPlaces, Object maxValueObject) {
-super.unCompress(compressor, dataType, data, offset, length);
-setUncompressedValues(value);
-  }
-
-  @Override public void setValueInBytes(byte[] byteValue) {
-ByteBuffer buffer = ByteBuffer.wrap(byteValue);
-this.value = ValueCompressionUtil.convertToLongArray(buffer, 
byteValue.length);
-  }
-
-  @Override public long getLongValue(int index) {
-return measureChunkStore.getLong(index);
-  }
-
-  @Override public double getDoubleValue(int index) {
-return measureChunkStore.getLong(index);
-  }
-
-  @Override public BigDecimal getBigDecimalValue(int index) {
-throw new UnsupportedOperationException("Get big decimal is not 
supported");
-  }
-
-  private void setUncompressedValues(long[] data) {
-this.measureChunkStore =
-  
MeasureChunkStoreFactory.INSTANCE.getMeasureDataChunkStore(DataType.DATA_LONG, 
data.length);
-this.measureChunkStore.putData(data);
-  }
-
-  @Override public void freeMemory() {
-this.measureChunkStore.freeMemory();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/48316190/core/src/main/java/org/apache/carbondata/core/datastorage/compression/none/CompressionNoneShort.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/compression/none/CompressionNoneShort.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/compression/none/CompressionNoneShort.java
deleted file mode 100644
index f0b1c99..000
--- 
a/core/src/main/java/org/apache/carbondata/core/datastorage/compression/none/CompressionNoneShort.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation 

[11/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/scan/result/iterator/VectorDetailQueryResultIterator.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/scan/result/iterator/VectorDetailQueryResultIterator.java
 
b/core/src/main/java/org/apache/carbondata/scan/result/iterator/VectorDetailQueryResultIterator.java
deleted file mode 100644
index 417f597..000
--- 
a/core/src/main/java/org/apache/carbondata/scan/result/iterator/VectorDetailQueryResultIterator.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.scan.result.iterator;
-
-import java.util.List;
-import java.util.concurrent.ExecutorService;
-
-import org.apache.carbondata.scan.executor.infos.BlockExecutionInfo;
-import org.apache.carbondata.scan.model.QueryModel;
-import org.apache.carbondata.scan.result.vector.CarbonColumnarBatch;
-
-/**
- * It reads the data vector batch format
- */
-public class VectorDetailQueryResultIterator extends 
AbstractDetailQueryResultIterator {
-
-  private final Object lock = new Object();
-
-  public VectorDetailQueryResultIterator(List infos, 
QueryModel queryModel,
-  ExecutorService execService) {
-super(infos, queryModel, execService);
-  }
-
-  @Override public Object next() {
-throw new UnsupportedOperationException("call processNextBatch instaed");
-  }
-
-  public void processNextBatch(CarbonColumnarBatch columnarBatch) {
-synchronized (lock) {
-  updateDataBlockIterator();
-  if (dataBlockIterator != null) {
-dataBlockIterator.processNextBatch(columnarBatch);
-  }
-}
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/scan/result/vector/CarbonColumnVector.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/scan/result/vector/CarbonColumnVector.java
 
b/core/src/main/java/org/apache/carbondata/scan/result/vector/CarbonColumnVector.java
deleted file mode 100644
index 0af1857..000
--- 
a/core/src/main/java/org/apache/carbondata/scan/result/vector/CarbonColumnVector.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.scan.result.vector;
-
-import org.apache.spark.sql.types.Decimal;
-
-public interface CarbonColumnVector {
-
-  void putShort(int rowId, short value);
-
-  void putInt(int rowId, int value);
-
-  void putLong(int rowId, long value);
-
-  void putDecimal(int rowId, Decimal value, int precision);
-
-  void putDouble(int rowId, double value);
-
-  void putBytes(int rowId, byte[] value);
-
-  void putBytes(int rowId, int offset, int length, byte[] value);
-
-  void putNull(int rowId);
-
-  boolean isNull(int rowId);
-
-  void putObject(int rowId, Object obj);
-
-  Object getData(int rowId);
-
-  void reset();
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/scan/result/vector/CarbonColumnarBatch.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/scan/result/vector/CarbonColumnarBatch.java
 
b/core/src/main/java/org/apache/carbondata/scan/result/vector/CarbonColumnarBatch.java

[54/57] [abbrv] incubator-carbondata git commit: move package in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f75ddfb6/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala
--
diff --cc 
integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala
index 5ebb124,14a0930..ea52697
--- 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataLoadRDD.scala
@@@ -36,10 -37,10 +37,11 @@@ import org.apache.carbondata.common.Car
  import org.apache.carbondata.common.logging.LogServiceFactory
  import org.apache.carbondata.common.logging.impl.StandardLogService
  import org.apache.carbondata.core.constants.CarbonCommonConstants
 -import org.apache.carbondata.core.load.{BlockDetails, LoadMetadataDetails}
 +import org.apache.carbondata.core.updatestatus.LoadMetadataDetails
  import org.apache.carbondata.core.util.{CarbonProperties, 
CarbonTimeStatisticsFactory}
  import org.apache.carbondata.processing.constants.DataProcessorConstants
- import org.apache.carbondata.processing.csvreaderstep.{BlockDetails, 
JavaRddIterator, RddInputUtils}
++import org.apache.carbondata.processing.csvreaderstep.{BlockDetails, 
RddInputUtils}
+ import org.apache.carbondata.processing.csvreaderstep.RddInputUtils
  import org.apache.carbondata.processing.etl.DataLoadingException
  import org.apache.carbondata.processing.graphgenerator.GraphGenerator
  import org.apache.carbondata.processing.model.CarbonLoadModel

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f75ddfb6/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/CarbonMergerRDD.scala
--

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f75ddfb6/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
--
diff --cc 
integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
index 645294f,46e83a5..77dc78a
--- 
a/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
+++ 
b/integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/NewCarbonDataLoadRDD.scala
@@@ -45,7 -43,6 +45,7 @@@ import org.apache.carbondata.core.updat
  import org.apache.carbondata.core.util.{CarbonProperties, 
CarbonTimeStatisticsFactory}
  import org.apache.carbondata.hadoop.csv.CSVInputFormat
  import org.apache.carbondata.hadoop.csv.recorditerator.RecordReaderIterator
- import org.apache.carbondata.processing.csvreaderstep.{BlockDetails, 
JavaRddIterator}
++import org.apache.carbondata.processing.csvreaderstep.BlockDetails
  import org.apache.carbondata.processing.model.CarbonLoadModel
  import org.apache.carbondata.processing.newflow.DataLoadExecutor
  import 
org.apache.carbondata.processing.newflow.exception.BadRecordFoundException

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f75ddfb6/integration/spark-common/src/main/scala/org/apache/spark/sql/catalyst/CarbonDDLSqlParser.scala
--

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f75ddfb6/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
--

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f75ddfb6/processing/src/main/java/org/apache/carbondata/processing/csvreaderstep/CsvInput.java
--

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f75ddfb6/processing/src/main/java/org/apache/carbondata/processing/model/CarbonLoadModel.java
--

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f75ddfb6/processing/src/main/java/org/apache/carbondata/processing/newflow/CarbonDataLoadConfiguration.java
--

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f75ddfb6/processing/src/main/java/org/apache/carbondata/processing/newflow/DataLoadProcessBuilder.java
--

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f75ddfb6/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
--

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/f75ddfb6/processing/src/test/java/org/apache/carbondata/carbon/datastore/BlockIndexStoreTest.java
---

[12/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/scan/model/QueryModel.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/scan/model/QueryModel.java 
b/core/src/main/java/org/apache/carbondata/scan/model/QueryModel.java
deleted file mode 100644
index 8665463..000
--- a/core/src/main/java/org/apache/carbondata/scan/model/QueryModel.java
+++ /dev/null
@@ -1,365 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.scan.model;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.carbondata.core.cache.dictionary.Dictionary;
-import org.apache.carbondata.core.AbsoluteTableIdentifier;
-import org.apache.carbondata.core.datastore.block.TableBlockInfo;
-import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
-import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
-import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
-import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
-import org.apache.carbondata.core.stats.QueryStatisticsRecorder;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.update.UpdateVO;
-import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.scan.expression.ColumnExpression;
-import org.apache.carbondata.scan.expression.Expression;
-import org.apache.carbondata.scan.expression.UnknownExpression;
-import org.apache.carbondata.scan.expression.conditional.ConditionalExpression;
-import org.apache.carbondata.scan.filter.resolver.FilterResolverIntf;
-
-/**
- * Query model which will have all the detail
- * about the query, This will be sent from driver to executor '
- * This will be refereed to executing the query.
- */
-public class QueryModel implements Serializable {
-
-  /**
-   * serialization version
-   */
-  private static final long serialVersionUID = -4674677234007089052L;
-  /**
-   * this will hold the information about the dictionary dimension
-   * which to
-   */
-  public transient Map columnToDictionaryMapping;
-  /**
-   * list of dimension selected for in query
-   */
-  private List queryDimension;
-  /**
-   * list of measure selected in query
-   */
-  private List queryMeasures;
-  /**
-   * query id
-   */
-  private String queryId;
-  /**
-   * filter tree
-   */
-  private FilterResolverIntf filterExpressionResolverTree;
-
-  /**
-   * table block information in which query will be executed
-   */
-  private List tableBlockInfos;
-  /**
-   * absolute table identifier
-   */
-  private AbsoluteTableIdentifier absoluteTableIdentifier;
-  /**
-   * To handle most of the computation in query engines like spark and hive, 
carbon should give
-   * raw detailed records to it.
-   */
-  private boolean forcedDetailRawQuery;
-  /**
-   * table on which query will be executed
-   * TODO need to remove this ad pass only the path
-   * and carbon metadata will load the table from metadata file
-   */
-  private CarbonTable table;
-
-  private QueryStatisticsRecorder statisticsRecorder;
-
-  private boolean vectorReader;
-
-  /**
-   * Invalid table blocks, which need to be removed from
-   * memory, invalid blocks can be segment which are deleted
-   * or compacted
-   */
-  private List invalidSegmentIds;
-  private Map invalidSegmentBlockIdMap =
-  new HashMap<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-
-  public QueryModel() {
-tableBlockInfos = new ArrayList();
-queryDimension = new ArrayList();
-queryMeasures = new ArrayList();
-invalidSegmentIds = new ArrayList<>();
-  }
-
-  public static QueryModel createModel(AbsoluteTableIdentifier 
absoluteTableIdentifier,
-  CarbonQueryPlan queryPlan, CarbonTable carbonTable) {
-QueryModel queryModel = new QueryModel();
-String factTableName = carbonTable.getFactTableName();
-queryModel.setAbsoluteTableIdentifier(absoluteTableIdentifier);
-
-fillQueryModel(queryPlan, carbonTable, queryModel

[26/57] [abbrv] incubator-carbondata git commit: move org.apache.carbon.common in core

2017-01-13 Thread jackylk
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
--
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
new file mode 100644
index 000..7b29c8b
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
@@ -0,0 +1,604 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.core.scan.executor.infos;
+
+import java.util.Map;
+
+import org.apache.carbondata.core.cache.dictionary.Dictionary;
+import org.apache.carbondata.core.AbsoluteTableIdentifier;
+import org.apache.carbondata.core.datastore.DataRefNode;
+import org.apache.carbondata.core.datastore.IndexKey;
+import org.apache.carbondata.core.datastore.block.AbstractIndex;
+import org.apache.carbondata.core.keygenerator.KeyGenerator;
+import org.apache.carbondata.core.scan.filter.GenericQueryType;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.model.QueryDimension;
+import org.apache.carbondata.core.scan.model.QueryMeasure;
+
+/**
+ * Below class will have all the properties which needed during query execution
+ * for one block
+ */
+public class BlockExecutionInfo {
+
+  /**
+   * block on which query will be executed
+   */
+  private AbstractIndex blockIndex;
+
+  /**
+   * each segment key size can be different and in that case we need to update
+   * the fixed key with latest segment key generator. so this property will
+   * tell whether this is required or not if key size is same then it is not
+   * required
+   */
+  private boolean isFixedKeyUpdateRequired;
+
+  /**
+   * below to store all the information required for aggregation during query
+   * execution
+   */
+  private AggregatorInfo aggregatorInfo;
+
+  /**
+   * this will be used to get the first tentative block from which query
+   * execution start, this will be useful in case of filter query to get the
+   * start block based on filter values
+   */
+  private IndexKey startKey;
+
+  /**
+   * this will be used to get the last tentative block till which scanning
+   * will be done, this will be useful in case of filter query to get the last
+   * block based on filter values
+   */
+  private IndexKey endKey;
+
+  private String blockId;
+
+  /**
+   * masked byte for block which will be used to unpack the fixed length key,
+   * this will be used for updating the older block key with new block key
+   * generator
+   */
+  private int[] maskedByteForBlock;
+
+  /**
+   * total number of dimension in block
+   */
+  private int totalNumberDimensionBlock;
+
+  /**
+   * total number of measure in block
+   */
+  private int totalNumberOfMeasureBlock;
+
+  /**
+   * will be used to read the dimension block from file
+   */
+  private int[][] allSelectedDimensionBlocksIndexes;
+
+  /**
+   * will be used to read the measure block from file
+   */
+  private int[][] allSelectedMeasureBlocksIndexes;
+
+  /**
+   * this will be used to update the older block fixed length keys with the
+   * new block fixed length key
+   */
+  private KeyStructureInfo keyStructureInfo;
+
+  /**
+   * first block from which query execution will start
+   */
+  private DataRefNode firstDataBlock;
+
+  /**
+   * number of block to be scanned in the query
+   */
+  private long numberOfBlockToScan;
+
+  /**
+   * key size of the fixed length dimension column
+   */
+  private int fixedLengthKeySize;
+
+  /**
+   * dictionary column block indexes based on query
+   */
+  private int[] dictionaryColumnBlockIndex;
+  /**
+   * no dictionary column block indexes in based on the query order
+   */
+  private int[] noDictionaryBlockIndexes;
+
+  /**
+   * key generator used for generating the table block fixed length key
+   */
+  private KeyGenerator blockKeyGenerator;
+
+  /**
+   * each column value size
+   */
+  private int[] eachColumnValueSize;
+
+  /**
+   * column group block i

<    1   2   3   4   5   6   7   >