[carbondata] branch master updated: [CARBONDATA-3467] Fix count(*) with filter on string column

2019-07-12 Thread ravipesala
This is an automated email from the ASF dual-hosted git repository.

ravipesala pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
 new ebe78dc  [CARBONDATA-3467] Fix count(*) with filter on string column
ebe78dc is described below

commit ebe78dca170773a5f4a37e8146a923b2dc6604a4
Author: Indhumathi27 
AuthorDate: Tue Jul 9 09:10:24 2019 +0530

[CARBONDATA-3467] Fix count(*) with filter on string column

Problem:
count(*) with filter on string column throws Unresolved Exception
Solution:
Added check for UnresolvedAlias in MVAnalyzer

This closes #3319
---
 .../org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala   |  9 -
 .../carbondata/mv/rewrite/TestAllOperationsOnMV.scala   | 13 -
 2 files changed, 20 insertions(+), 2 deletions(-)

diff --git 
a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
 
b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
index 04bcfbb..edd9c81 100644
--- 
a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
+++ 
b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
@@ -70,7 +70,14 @@ class MVAnalyzerRule(sparkSession: SparkSession) extends 
Rule[LogicalPlan] {
 plan.transform {
   case aggregate@Aggregate(grp, aExp, child) =>
 // check for if plan is for dataload for preaggregate table, then skip 
applying mv
-if (aExp.exists(p => p.name.equals("preAggLoad") || 
p.name.equals("preAgg"))) {
+val isPreAggLoad = aExp.exists { p =>
+  if (p.isInstanceOf[UnresolvedAlias]) {
+false
+  } else {
+p.name.equals("preAggLoad") || p.name.equals("preAgg")
+  }
+}
+if (isPreAggLoad) {
   needAnalysis = false
 }
 Aggregate(grp, aExp, child)
diff --git 
a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
 
b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
index 839a2e6..81ddf38 100644
--- 
a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
+++ 
b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
@@ -540,6 +540,17 @@ class TestAllOperationsOnMV extends QueryTest with 
BeforeAndAfterEach {
 }.getMessage.contains("Operation not allowed on child table.")
   }
 
+  test("test count(*) with filter") {
+sql("drop table if exists maintable")
+sql("create table maintable(id int, name string, id1 string, id2 string, 
dob timestamp, doj " +
+"timestamp, v1 bigint, v2 bigint, v3 decimal(30,10), v4 
decimal(20,10), v5 double, v6 " +
+"double ) stored by 'carbondata'")
+sql("insert into maintable values(1, 'abc', 'id001', 'id002', '2017-01-01 
00:00:00','2017-01-01 " +
+"00:00:00', 234, 2242,12.4,23.4,2323,455 )")
+checkAnswer(sql("select count(*) from maintable where  id1 < id2"), 
Seq(Row(1)))
+sql("drop table if exists maintable")
+  }
+
   test("drop meta cache on mv datamap table") {
 sql("drop table IF EXISTS maintable")
 sql("create table maintable(name string, c_code int, price int) stored by 
'carbondata'")
@@ -580,6 +591,6 @@ class TestAllOperationsOnMV extends QueryTest with 
BeforeAndAfterEach {
 newSet.addAll(oldSet)
 newSet
   }
-
+  
 }
 



[carbondata] branch master updated: [CARBONDATA-3457][MV] Fix Column not found issue with Query having Cast Expression

2019-07-12 Thread ravipesala
This is an automated email from the ASF dual-hosted git repository.

ravipesala pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
 new 771d436  [CARBONDATA-3457][MV] Fix Column not found issue with Query 
having Cast Expression
771d436 is described below

commit 771d436fe2ed2d34ccf0ee1d8f555af30c382345
Author: Indhumathi27 
AuthorDate: Thu Jun 27 17:09:20 2019 +0530

[CARBONDATA-3457][MV] Fix Column not found issue with Query having Cast 
Expression

Problem:
For Cast(exp), alias reference is not included, hence throws column not 
found exception for column given inside cast expression.

Solution:
AliasMap has to be created for CAST[EXP] also and should be replaced with 
subsmer alias map references.

This closes #3312
---
 .../carbondata/mv/rewrite/DefaultMatchMaker.scala  | 16 ++
 .../carbondata/mv/rewrite/MVCreateTestCase.scala   | 58 ++
 2 files changed, 74 insertions(+)

diff --git 
a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/DefaultMatchMaker.scala
 
b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/DefaultMatchMaker.scala
index 9a9a2a6..5329608 100644
--- 
a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/DefaultMatchMaker.scala
+++ 
b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/DefaultMatchMaker.scala
@@ -53,6 +53,14 @@ abstract class DefaultMatchPattern extends 
MatchPattern[ModularPlan] {
 (a.child.asInstanceOf[Attribute], a.toAttribute)
   })
 
+// Create aliasMap with Expression to alias reference attribute
+val aliasMapExp =
+  subsumer.outputList.collect {
+case a: Alias if a.child.isInstanceOf[Expression] &&
+ !a.child.isInstanceOf[AggregateExpression] =>
+  a.child -> a.toAttribute
+  }.toMap
+
 // Check and replace all alias references with subsumer alias map 
references.
 val compensation1 = compensation.transform {
   case plan if !plan.skip && plan != subsumer =>
@@ -66,6 +74,14 @@ abstract class DefaultMatchPattern extends 
MatchPattern[ModularPlan] {
   exprId = ref.exprId,
   qualifier = a.qualifier)
   }.getOrElse(a)
+  case a: Expression =>
+aliasMapExp
+  .get(a)
+  .map { ref =>
+AttributeReference(
+  ref.name, ref.dataType)(
+  exprId = ref.exprId)
+  }.getOrElse(a)
   }
 }
 
diff --git 
a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
 
b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
index 1d259c8..ca6c0c5 100644
--- 
a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
+++ 
b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
@@ -1169,6 +1169,64 @@ class MVCreateTestCase extends QueryTest with 
BeforeAndAfterAll {
 assert(TestUtil.verifyMVDataMap(analyzed1, "da_cast"))
   }
 
+  test("test cast of expression with mv") {
+sql("drop table IF EXISTS maintable")
+sql("create table maintable (m_month bigint, c_code string, " +
+"c_country smallint, d_dollar_value double, q_quantity double, u_unit 
smallint, b_country smallint, i_id int, y_year smallint) stored by 
'carbondata'")
+sql("insert into maintable select 10, 'xxx', 123, 456, 45, 5, 23, 1, 2000")
+sql("drop datamap if exists da_cast")
+sql(
+  "create datamap da_cast using 'mv' as select cast(floor((m_month +1000) 
/ 900) * 900 - 2000 AS INT) as a, c_code as abc from maintable")
+val df1 = sql(
+  " select cast(floor((m_month +1000) / 900) * 900 - 2000 AS INT) as a 
,c_code as abc  from maintable")
+val df2 = sql(
+  " select cast(floor((m_month +1000) / 900) * 900 - 2000 AS INT),c_code 
as abc  from maintable")
+val analyzed1 = df1.queryExecution.analyzed
+assert(TestUtil.verifyMVDataMap(analyzed1, "da_cast"))
+  }
+
+  test("test cast with & without alias") {
+sql("drop table IF EXISTS maintable")
+sql("create table maintable (m_month bigint, c_code string, " +
+"c_country smallint, d_dollar_value double, q_quantity double, u_unit 
smallint, b_country smallint, i_id int, y_year smallint) stored by 
'carbondata'")
+sql("insert into maintable select 10, 'xxx', 123, 456, 45, 5, 23, 1, 2000")
+sql("drop datamap if exists da_cast")
+sql(
+  "create datamap da_cast using 'mv' as select cast(m_month + 1000 AS INT) 
as a, c_code as abc from maintable")
+checkAnswer(sql("select cast(m_month + 1000 AS INT) as a, c_code as abc 
from maintable"), Seq(Row(1010, "xxx")))
+var df1 = sql("select cast(m_month + 1000 AS INT) as a, c_code as abc from 
maintable")
+var analyzed1 = 

Jenkins build is still unstable: carbondata-master-spark-2.1 » Apache CarbonData :: Spark2 #3638

2019-07-12 Thread Apache Jenkins Server
See 




Jenkins build is still unstable: carbondata-master-spark-2.1 » Apache CarbonData :: Spark Common Test #3638

2019-07-12 Thread Apache Jenkins Server
See 




Jenkins build is still unstable: carbondata-master-spark-2.1 » Apache CarbonData :: Store SDK #3638

2019-07-12 Thread Apache Jenkins Server
See 




Jenkins build is unstable: carbondata-master-spark-2.2 #1834

2019-07-12 Thread Apache Jenkins Server
See 




Jenkins build is still unstable: carbondata-master-spark-2.2 » Apache CarbonData :: Spark Common Test #1834

2019-07-12 Thread Apache Jenkins Server
See 




Jenkins build is still unstable: carbondata-master-spark-2.2 » Apache CarbonData :: Store SDK #1834

2019-07-12 Thread Apache Jenkins Server
See 




[carbondata] branch master updated: [CARBONDATA-3456] Fix DataLoading to MV table when Yarn-Application is killed

2019-07-12 Thread ravipesala
This is an automated email from the ASF dual-hosted git repository.

ravipesala pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
 new cdf0594  [CARBONDATA-3456] Fix DataLoading to MV table when 
Yarn-Application is killed
cdf0594 is described below

commit cdf0594cb4fefcec6a892692daca2d73f40ccd19
Author: Indhumathi27 
AuthorDate: Thu Jun 27 18:16:04 2019 +0530

[CARBONDATA-3456] Fix DataLoading to MV table when Yarn-Application is 
killed

Problem:
When dataLoad is triggered on datamaptable and new LoadMetaDetail with 
SegmentStatus as InsertInProgress and segmentMappingInfo is created and then 
yarn-application is killed. Then on next load, stale loadMetadetail is still in 
InsertInProgress state and mainTableSegemnts mapped to that loadMetaDetail is 
not considered for nextLoad resulted in dataMismatch between main table and 
datamap table

Solution:
Clean up the old invalid segment before creating a new entry for new Load.

This closes #3310
---
 .../carbondata/core/datamap/DataMapProvider.java   | 25 
 .../carbondata/core/datamap/DataMapUtil.java   | 18 ++-
 .../core/datamap/dev/DataMapSyncStatus.java| 19 ---
 .../carbondata/core/metadata/SegmentFileStore.java |  2 +-
 .../core/statusmanager/SegmentStatusManager.java   | 27 ++
 .../apache/carbondata/core/util/CarbonUtil.java|  2 +-
 .../bloom/BloomCoarseGrainDataMapFactory.java  |  3 ++-
 .../datamap/lucene/LuceneDataMapFactoryBase.java   |  3 ++-
 .../carbondata/mv/datamap/MVDataMapProvider.scala  |  8 ++-
 .../mv/rewrite/MVIncrementalLoadingTestcase.scala  |  6 +++--
 .../hadoop/api/CarbonOutputCommitter.java  |  5 ++--
 .../hadoop/api/CarbonTableInputFormat.java |  6 +++--
 .../carbondata/datamap/IndexDataMapProvider.java   |  4 ++--
 .../datamap/PreAggregateDataMapProvider.java   |  4 ++--
 .../datamap/IndexDataMapRebuildRDD.scala   |  3 ++-
 .../spark/rdd/CarbonDataRDDFactory.scala   |  1 +
 .../spark/sql/events/MergeIndexEventListener.scala |  2 +-
 .../sql/execution/command/cache/CacheUtil.scala|  4 ++--
 .../command/cache/DropCacheEventListeners.scala|  3 ++-
 .../command/datamap/CarbonDataMapShowCommand.scala |  5 ++--
 .../command/mutation/HorizontalCompaction.scala|  6 +++--
 .../CarbonAlterTableDropHivePartitionCommand.scala |  2 +-
 .../CarbonAlterTableDropPartitionCommand.scala |  3 ++-
 .../CarbonAlterTableSplitPartitionCommand.scala|  3 ++-
 .../org/apache/spark/sql/hive/CarbonRelation.scala |  4 ++--
 .../org/apache/spark/util/MergeIndexUtil.scala |  2 +-
 .../processing/merger/CarbonDataMergerUtil.java|  7 +++---
 27 files changed, 120 insertions(+), 57 deletions(-)

diff --git 
a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapProvider.java 
b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapProvider.java
index d0b66f3..c320226 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapProvider.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapProvider.java
@@ -129,10 +129,15 @@ public abstract class DataMapProvider {
 }
 String newLoadName = "";
 String segmentMap = "";
-AbsoluteTableIdentifier dataMapTableAbsoluteTableIdentifier = 
AbsoluteTableIdentifier
-.from(dataMapSchema.getRelationIdentifier().getTablePath(),
+CarbonTable dataMapTable = CarbonTable
+
.buildFromTablePath(dataMapSchema.getRelationIdentifier().getTableName(),
 dataMapSchema.getRelationIdentifier().getDatabaseName(),
-dataMapSchema.getRelationIdentifier().getTableName());
+dataMapSchema.getRelationIdentifier().getTablePath(),
+dataMapSchema.getRelationIdentifier().getTableId());
+AbsoluteTableIdentifier dataMapTableAbsoluteTableIdentifier =
+dataMapTable.getAbsoluteTableIdentifier();
+// Clean up the old invalid segment data before creating a new entry for 
new load.
+SegmentStatusManager.deleteLoadsAndUpdateMetadata(dataMapTable, false, 
null);
 SegmentStatusManager segmentStatusManager =
 new SegmentStatusManager(dataMapTableAbsoluteTableIdentifier);
 Map> segmentMapping = new HashMap<>();
@@ -148,6 +153,15 @@ public abstract class DataMapProvider {
 
CarbonTablePath.getMetadataPath(dataMapSchema.getRelationIdentifier().getTablePath());
 LoadMetadataDetails[] loadMetaDataDetails =
 SegmentStatusManager.readLoadMetadata(dataMapTableMetadataPath);
+// Mark for delete all stale loadMetadetail
+for (LoadMetadataDetails loadMetadataDetail : loadMetaDataDetails) {
+  if ((loadMetadataDetail.getSegmentStatus() == 
SegmentStatus.INSERT_IN_PROGRESS
+  || loadMetadataDetail.getSegmentStatus()
+  == 

Jenkins build is still unstable: carbondata-master-spark-2.2 » Apache CarbonData :: Store SDK #1833

2019-07-12 Thread Apache Jenkins Server
See 




Jenkins build is still unstable: carbondata-master-spark-2.2 » Apache CarbonData :: Spark Common Test #1833

2019-07-12 Thread Apache Jenkins Server
See 




Build failed in Jenkins: carbondata-master-spark-2.1 » Apache CarbonData :: Examples #3637

2019-07-12 Thread Apache Jenkins Server
See 


--
[...truncated 280.37 KB...]
|count(1)|
++
|  10|
++

++
|count(1)|
++
|  10|
++

time for query on table with lucene datamap table:0.254
time for query on table without lucene datamap table:0.144
+-+-+
|   id| name|
+-+-+
|which test1 good7|who and name7|
|which test1 good7|who and name5|
|which test1 good8|who and name3|
|which test1 good0|who and name7|
|which test1 good7|who and name1|
|which test1 good7|who and name7|
|which test1 good7|who and name5|
|which test1 good8|who and name3|
|which test1 good0|who and name7|
|which test1 good7|who and name1|
+-+-+

+-+-+
|   id| name|
+-+-+
|which test1 good7|who and name7|
|which test1 good7|who and name5|
|which test1 good8|who and name3|
|which test1 good0|who and name7|
|which test1 good7|who and name1|
|which test1 good7|who and name7|
|which test1 good7|who and name5|
|which test1 good8|who and name3|
|which test1 good0|who and name7|
|which test1 good7|who and name1|
+-+-+

2019-07-12 09:37:40 AUDIT audit:72 - {"time":"July 12, 2019 2:37:40 AM 
PDT","username":"jenkins","opName":"DROP 
TABLE","opId":"22915665683914215","opStatus":"START"}
2019-07-12 09:37:40 AUDIT audit:93 - {"time":"July 12, 2019 2:37:40 AM 
PDT","username":"jenkins","opName":"DROP 
TABLE","opId":"22915665683914215","opStatus":"SUCCESS","opTime":"645 
ms","table":"default.persontable","extraInfo":{}}
- LuceneDataMapExample
2019-07-12 09:37:40 AUDIT audit:72 - {"time":"July 12, 2019 2:37:40 AM 
PDT","username":"jenkins","opName":"CREATE 
TABLE","opId":"22915666340557758","opStatus":"START"}
2019-07-12 09:37:40 AUDIT audit:93 - {"time":"July 12, 2019 2:37:40 AM 
PDT","username":"jenkins","opName":"CREATE 
TABLE","opId":"22915666340557758","opStatus":"SUCCESS","opTime":"71 
ms","table":"default.origin_table","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-07-12 09:37:40 AUDIT audit:72 - {"time":"July 12, 2019 2:37:40 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"22915666415724339","opStatus":"START"}
2019-07-12 09:37:41 ERROR DataLoadExecutor:55 - Data Load is partially success 
for table origin_table
2019-07-12 09:37:41 AUDIT audit:93 - {"time":"July 12, 2019 2:37:41 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"22915666415724339","opStatus":"SUCCESS","opTime":"131 
ms","table":"default.origin_table","extraInfo":{"SegmentId":"0","DataSize":"2.85KB","IndexSize":"1.43KB"}}
2019-07-12 09:37:41 AUDIT audit:72 - {"time":"July 12, 2019 2:37:41 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"22915666551206344","opStatus":"START"}
2019-07-12 09:37:41 ERROR DataLoadExecutor:55 - Data Load is partially success 
for table origin_table
2019-07-12 09:37:41 AUDIT audit:93 - {"time":"July 12, 2019 2:37:41 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"22915666551206344","opStatus":"SUCCESS","opTime":"126 
ms","table":"default.origin_table","extraInfo":{"SegmentId":"1","DataSize":"2.85KB","IndexSize":"1.43KB"}}
2019-07-12 09:37:41 AUDIT audit:72 - {"time":"July 12, 2019 2:37:41 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"2291581495846","opStatus":"START"}
2019-07-12 09:37:41 ERROR DataLoadExecutor:55 - Data Load is partially success 
for table origin_table
2019-07-12 09:37:41 AUDIT audit:93 - {"time":"July 12, 2019 2:37:41 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"2291581495846","opStatus":"SUCCESS","opTime":"119 
ms","table":"default.origin_table","extraInfo":{"SegmentId":"2","DataSize":"2.85KB","IndexSize":"1.43KB"}}
2019-07-12 09:37:41 AUDIT audit:72 - {"time":"July 12, 2019 2:37:41 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"22915666804407180","opStatus":"START"}
2019-07-12 09:37:41 ERROR DataLoadExecutor:55 - Data Load is partially success 
for table origin_table
2019-07-12 09:37:41 AUDIT audit:93 - {"time":"July 12, 2019 2:37:41 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"22915666804407180","opStatus":"SUCCESS","opTime":"126 
ms","table":"default.origin_table","extraInfo":{"SegmentId":"3","DataSize":"2.85KB","IndexSize":"1.43KB"}}
++
|count(1)|
++
|  40|
++

2019-07-12 09:37:41 AUDIT audit:72 - {"time":"July 12, 2019 2:37:41 AM 
PDT","username":"jenkins","opName":"CREATE 
TABLE","opId":"22915666988068822","opStatus":"START"}
2019-07-12 09:37:41 AUDIT audit:93 - {"time":"July 12, 2019 2:37:41 AM 
PDT","username":"jenkins","opName":"CREATE 
TABLE","opId":"22915666988068822","opStatus":"SUCCESS","opTime":"50 

Jenkins build is still unstable: carbondata-master-spark-2.1 » Apache CarbonData :: Store SDK #3637

2019-07-12 Thread Apache Jenkins Server
See 




Build failed in Jenkins: carbondata-master-spark-2.1 #3637

2019-07-12 Thread Apache Jenkins Server
See 


--
[...truncated 12.31 MB...]
- CarbonReaderExample
Running SQL on carbon files directly
+--+---+--+
|  name|age|height|
+--+---+--+
|robot0|  0|   0.0|
|robot1|  1|   0.5|
|robot2|  2|   1.0|
|robot3|  3|   1.5|
|robot4|  4|   2.0|
|robot5|  5|   2.5|
|robot6|  6|   3.0|
|robot7|  7|   3.5|
|robot8|  8|   4.0|
|robot9|  9|   4.5|
+--+---+--+

- DirectSQLExample
2019-07-12 09:37:42 AUDIT audit:72 - {"time":"July 12, 2019 2:37:42 AM 
PDT","username":"jenkins","opName":"CREATE 
TABLE","opId":"22915667974597035","opStatus":"START"}
2019-07-12 09:37:42 AUDIT audit:93 - {"time":"July 12, 2019 2:37:42 AM 
PDT","username":"jenkins","opName":"CREATE 
TABLE","opId":"22915667974597035","opStatus":"SUCCESS","opTime":"63 
ms","table":"default.hive_carbon_example","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":""}}
2019-07-12 09:37:42 AUDIT audit:72 - {"time":"July 12, 2019 2:37:42 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"22915668056321747","opStatus":"START"}
2019-07-12 09:37:42 AUDIT audit:93 - {"time":"July 12, 2019 2:37:42 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"22915668056321747","opStatus":"SUCCESS","opTime":"167 
ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"0","DataSize":"924.0B","IndexSize":"551.0B"}}
2019-07-12 09:37:42 AUDIT audit:72 - {"time":"July 12, 2019 2:37:42 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"22915668230686715","opStatus":"START"}
2019-07-12 09:37:42 AUDIT audit:93 - {"time":"July 12, 2019 2:37:42 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"22915668230686715","opStatus":"SUCCESS","opTime":"70 
ms","table":"default.hive_carbon_example","extraInfo":{"SegmentId":"1","DataSize":"924.0B","IndexSize":"551.0B"}}
+---+-++
| id| name|  salary|
+---+-++
|  1|  'liang'|20.0|
|  2|'anubhav'| 2.0|
|  1|  'liang'|20.0|
|  2|'anubhav'| 2.0|
+---+-++

2019-07-12 09:37:42 AUDIT audit:72 - {"time":"July 12, 2019 2:37:42 AM 
PDT","username":"jenkins","opName":"CREATE 
TABLE","opId":"22915668388372260","opStatus":"START"}
2019-07-12 09:37:43 AUDIT audit:93 - {"time":"July 12, 2019 2:37:43 AM 
PDT","username":"jenkins","opName":"CREATE 
TABLE","opId":"22915668388372260","opStatus":"SUCCESS","opTime":"58 
ms","table":"default.test_boundary","extraInfo":{"bad_record_path":"","local_dictionary_enable":"true","external":"false","sort_columns":"","comment":"","dictionary_include":"c6_timestamp"}}
2019-07-12 09:37:43 AUDIT audit:72 - {"time":"July 12, 2019 2:37:43 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"22915668450006838","opStatus":"START"}
2019-07-12 09:37:43 ERROR DataLoadExecutor:55 - Data Load is partially success 
for table test_boundary
2019-07-12 09:37:43 AUDIT audit:93 - {"time":"July 12, 2019 2:37:43 AM 
PDT","username":"jenkins","opName":"LOAD 
DATA","opId":"22915668450006838","opStatus":"SUCCESS","opTime":"159 
ms","table":"default.test_boundary","extraInfo":{"SegmentId":"0","DataSize":"4.03KB","IndexSize":"1.06KB"}}
OK
**Total Number Of Rows Fetched ** 0
- HiveExample *** FAILED ***
  java.lang.AssertionError: assertion failed
  at scala.Predef$.assert(Predef.scala:156)
  at 
org.apache.carbondata.examples.HiveExample$.readFromHive(HiveExample.scala:171)
  at 
org.apache.carbondata.examplesCI.RunExamples$$anonfun$21.apply$mcV$sp(RunExamples.scala:135)
  at 
org.apache.carbondata.examplesCI.RunExamples$$anonfun$21.apply(RunExamples.scala:133)
  at 
org.apache.carbondata.examplesCI.RunExamples$$anonfun$21.apply(RunExamples.scala:133)
  at 
org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
  at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
  at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
  at org.scalatest.Transformer.apply(Transformer.scala:22)
  at org.scalatest.Transformer.apply(Transformer.scala:20)
  ...
Exception encountered when invoking run on a nested suite - Cannot call 
methods on a stopped SparkContext.
This stopped SparkContext was created at:

org.apache.spark.sql.test.util.QueryTest.(QueryTest.scala:115)
org.apache.carbondata.examplesCI.RunExamples.(RunExamples.scala:35)
sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
java.lang.reflect.Constructor.newInstance(Constructor.java:423)

Jenkins build is still unstable: carbondata-master-spark-2.1 » Apache CarbonData :: Spark Common Test #3637

2019-07-12 Thread Apache Jenkins Server
See 




Jenkins build is still unstable: carbondata-master-spark-2.1 » Apache CarbonData :: Spark2 #3637

2019-07-12 Thread Apache Jenkins Server
See