[phoenix] branch master updated: PHOENIX-5059 Use the Datasource v2 api in the spark connector

2019-01-07 Thread tdsilva
This is an automated email from the ASF dual-hosted git repository.

tdsilva pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/master by this push:
 new 583cdd3  PHOENIX-5059 Use the Datasource v2 api in the spark connector
583cdd3 is described below

commit 583cdd3e27b80f16ac758f2b0d53d8814a19bdc2
Author: Thomas D'Silva 
AuthorDate: Tue Dec 11 14:59:39 2018 -0800

PHOENIX-5059 Use the Datasource v2 api in the spark connector
---
 .../phoenix/end2end/salted/BaseSaltedTableIT.java  |   6 +-
 phoenix-spark/pom.xml  |   8 +
 .../java/org/apache/phoenix/spark/OrderByIT.java   |  92 ++--
 .../java/org/apache/phoenix/spark/SparkUtil.java   |  25 +-
 phoenix-spark/src/it/resources/globalSetup.sql |   6 +-
 .../phoenix/spark/AbstractPhoenixSparkIT.scala |  12 +-
 .../org/apache/phoenix/spark/PhoenixSparkIT.scala  | 541 +++--
 .../spark/PhoenixSparkITTenantSpecific.scala   |  18 +-
 .../spark/datasource/v2/PhoenixDataSource.java |  82 
 .../v2/reader/PhoenixDataSourceReadOptions.java|  51 ++
 .../v2/reader/PhoenixDataSourceReader.java | 201 
 .../v2/reader/PhoenixInputPartition.java   |  44 ++
 .../v2/reader/PhoenixInputPartitionReader.java | 168 +++
 .../v2/writer/PhoenixDataSourceWriteOptions.java   | 109 +
 .../datasource/v2/writer/PhoenixDataWriter.java| 100 
 .../v2/writer/PhoenixDataWriterFactory.java|  19 +
 .../v2/writer/PhoenixDatasourceWriter.java |  34 ++
 ...org.apache.spark.sql.sources.DataSourceRegister |   1 +
 .../apache/phoenix/spark/ConfigurationUtil.scala   |   1 +
 .../apache/phoenix/spark/DataFrameFunctions.scala  |   2 +-
 .../org/apache/phoenix/spark/DefaultSource.scala   |   1 +
 ...lation.scala => FilterExpressionCompiler.scala} | 109 ++---
 .../org/apache/phoenix/spark/PhoenixRDD.scala  |  61 +--
 .../phoenix/spark/PhoenixRecordWritable.scala  |   2 +-
 .../org/apache/phoenix/spark/PhoenixRelation.scala |  70 +--
 .../apache/phoenix/spark/ProductRDDFunctions.scala |   1 +
 .../phoenix/spark/SparkContextFunctions.scala  |   1 +
 .../org/apache/phoenix/spark/SparkSchemaUtil.scala |  84 
 .../phoenix/spark/SparkSqlContextFunctions.scala   |   1 +
 .../datasources/jdbc/PhoenixJdbcDialect.scala  |  21 +
 .../execution/datasources/jdbc/SparkJdbcUtil.scala | 309 
 pom.xml|   2 +-
 32 files changed, 1655 insertions(+), 527 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
index 3051cd6..ef127ac 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
@@ -194,7 +194,7 @@ public abstract class BaseSaltedTableIT extends 
ParallelStatsDisabledIT  {
 .setSelectColumns(
 Lists.newArrayList("A_INTEGER", "A_STRING", "A_ID", 
"B_STRING", "B_INTEGER"))
 .setFullTableName(tableName)
-.setWhereClause("a_integer = 1 AND a_string >= 'ab' AND 
a_string < 'de' AND a_id = '123'");
+.setWhereClause("A_INTEGER = 1 AND A_STRING >= 'ab' AND 
A_STRING < 'de' AND A_ID = '123'");
 rs = executeQuery(conn, queryBuilder);
 assertTrue(rs.next());
 assertEquals(1, rs.getInt(1));
@@ -205,7 +205,7 @@ public abstract class BaseSaltedTableIT extends 
ParallelStatsDisabledIT  {
 assertFalse(rs.next());
 
 // all single slots with one value.
-queryBuilder.setWhereClause("a_integer = 1 AND a_string = 'ab' AND 
a_id = '123'");
+queryBuilder.setWhereClause("A_INTEGER = 1 AND A_STRING = 'ab' AND 
A_ID = '123'");
 rs = executeQuery(conn, queryBuilder);
 assertTrue(rs.next());
 assertEquals(1, rs.getInt(1));
@@ -216,7 +216,7 @@ public abstract class BaseSaltedTableIT extends 
ParallelStatsDisabledIT  {
 assertFalse(rs.next());
 
 // all single slots with multiple values.
-queryBuilder.setWhereClause("a_integer in (2, 4) AND a_string = 
'abc' AND a_id = '123'");
+queryBuilder.setWhereClause("A_INTEGER in (2, 4) AND A_STRING = 
'abc' AND A_ID = '123'");
 rs = executeQuery(conn, queryBuilder);
 
 assertTrue(rs.next());
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index db511b5..c71c92a 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -496,6 +496,14 @@
 src/it/scala
 
src/it/resources
 
+
+org.apache.maven.plugins
+maven-compiler-plugin
+
+1.8
+1.8
+
+
   

[phoenix] branch 4.x-HBase-1.3 updated: PHOENIX-5059 Use the Datasource v2 api in the spark connector

2019-01-07 Thread tdsilva
This is an automated email from the ASF dual-hosted git repository.

tdsilva pushed a commit to branch 4.x-HBase-1.3
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/4.x-HBase-1.3 by this push:
 new f86b97b  PHOENIX-5059 Use the Datasource v2 api in the spark connector
f86b97b is described below

commit f86b97b00d3935a699eaa8fd122463e468b42cd4
Author: Thomas D'Silva 
AuthorDate: Tue Dec 11 14:59:39 2018 -0800

PHOENIX-5059 Use the Datasource v2 api in the spark connector
---
 .../phoenix/end2end/salted/BaseSaltedTableIT.java  |   6 +-
 phoenix-spark/pom.xml  |   8 +
 .../java/org/apache/phoenix/spark/OrderByIT.java   |  92 ++--
 .../java/org/apache/phoenix/spark/SparkUtil.java   |  25 +-
 phoenix-spark/src/it/resources/globalSetup.sql |   6 +-
 .../phoenix/spark/AbstractPhoenixSparkIT.scala |  12 +-
 .../org/apache/phoenix/spark/PhoenixSparkIT.scala  | 541 +++--
 .../spark/PhoenixSparkITTenantSpecific.scala   |  18 +-
 .../spark/datasource/v2/PhoenixDataSource.java |  82 
 .../v2/reader/PhoenixDataSourceReadOptions.java|  51 ++
 .../v2/reader/PhoenixDataSourceReader.java | 201 
 .../v2/reader/PhoenixInputPartition.java   |  44 ++
 .../v2/reader/PhoenixInputPartitionReader.java | 168 +++
 .../v2/writer/PhoenixDataSourceWriteOptions.java   | 109 +
 .../datasource/v2/writer/PhoenixDataWriter.java| 100 
 .../v2/writer/PhoenixDataWriterFactory.java|  19 +
 .../v2/writer/PhoenixDatasourceWriter.java |  34 ++
 ...org.apache.spark.sql.sources.DataSourceRegister |   1 +
 .../apache/phoenix/spark/ConfigurationUtil.scala   |   1 +
 .../apache/phoenix/spark/DataFrameFunctions.scala  |   2 +-
 .../org/apache/phoenix/spark/DefaultSource.scala   |   1 +
 ...lation.scala => FilterExpressionCompiler.scala} | 109 ++---
 .../org/apache/phoenix/spark/PhoenixRDD.scala  |  61 +--
 .../phoenix/spark/PhoenixRecordWritable.scala  |   2 +-
 .../org/apache/phoenix/spark/PhoenixRelation.scala |  70 +--
 .../apache/phoenix/spark/ProductRDDFunctions.scala |   1 +
 .../phoenix/spark/SparkContextFunctions.scala  |   1 +
 .../org/apache/phoenix/spark/SparkSchemaUtil.scala |  84 
 .../phoenix/spark/SparkSqlContextFunctions.scala   |   1 +
 .../datasources/jdbc/PhoenixJdbcDialect.scala  |  21 +
 .../execution/datasources/jdbc/SparkJdbcUtil.scala | 309 
 pom.xml|   2 +-
 32 files changed, 1655 insertions(+), 527 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
index 3051cd6..ef127ac 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
@@ -194,7 +194,7 @@ public abstract class BaseSaltedTableIT extends 
ParallelStatsDisabledIT  {
 .setSelectColumns(
 Lists.newArrayList("A_INTEGER", "A_STRING", "A_ID", 
"B_STRING", "B_INTEGER"))
 .setFullTableName(tableName)
-.setWhereClause("a_integer = 1 AND a_string >= 'ab' AND 
a_string < 'de' AND a_id = '123'");
+.setWhereClause("A_INTEGER = 1 AND A_STRING >= 'ab' AND 
A_STRING < 'de' AND A_ID = '123'");
 rs = executeQuery(conn, queryBuilder);
 assertTrue(rs.next());
 assertEquals(1, rs.getInt(1));
@@ -205,7 +205,7 @@ public abstract class BaseSaltedTableIT extends 
ParallelStatsDisabledIT  {
 assertFalse(rs.next());
 
 // all single slots with one value.
-queryBuilder.setWhereClause("a_integer = 1 AND a_string = 'ab' AND 
a_id = '123'");
+queryBuilder.setWhereClause("A_INTEGER = 1 AND A_STRING = 'ab' AND 
A_ID = '123'");
 rs = executeQuery(conn, queryBuilder);
 assertTrue(rs.next());
 assertEquals(1, rs.getInt(1));
@@ -216,7 +216,7 @@ public abstract class BaseSaltedTableIT extends 
ParallelStatsDisabledIT  {
 assertFalse(rs.next());
 
 // all single slots with multiple values.
-queryBuilder.setWhereClause("a_integer in (2, 4) AND a_string = 
'abc' AND a_id = '123'");
+queryBuilder.setWhereClause("A_INTEGER in (2, 4) AND A_STRING = 
'abc' AND A_ID = '123'");
 rs = executeQuery(conn, queryBuilder);
 
 assertTrue(rs.next());
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index 038e314..f426c83 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -487,6 +487,14 @@
 src/it/scala
 
src/it/resources
 
+
+org.apache.maven.plugins
+maven-compiler-plugin
+
+1.8
+1.8
+
+  

[phoenix] branch 4.x-HBase-1.4 updated: PHOENIX-5059 Use the Datasource v2 api in the spark connector

2019-01-07 Thread tdsilva
This is an automated email from the ASF dual-hosted git repository.

tdsilva pushed a commit to branch 4.x-HBase-1.4
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/4.x-HBase-1.4 by this push:
 new 7138572  PHOENIX-5059 Use the Datasource v2 api in the spark connector
7138572 is described below

commit 71385723ee08f1cd68269f9770e60847b6f8a1fc
Author: Thomas D'Silva 
AuthorDate: Tue Dec 11 14:59:39 2018 -0800

PHOENIX-5059 Use the Datasource v2 api in the spark connector
---
 .../phoenix/end2end/salted/BaseSaltedTableIT.java  |   6 +-
 phoenix-spark/pom.xml  |   8 +
 .../java/org/apache/phoenix/spark/OrderByIT.java   |  92 ++--
 .../java/org/apache/phoenix/spark/SparkUtil.java   |  25 +-
 phoenix-spark/src/it/resources/globalSetup.sql |   6 +-
 .../phoenix/spark/AbstractPhoenixSparkIT.scala |  12 +-
 .../org/apache/phoenix/spark/PhoenixSparkIT.scala  | 541 +++--
 .../spark/PhoenixSparkITTenantSpecific.scala   |  18 +-
 .../spark/datasource/v2/PhoenixDataSource.java |  82 
 .../v2/reader/PhoenixDataSourceReadOptions.java|  51 ++
 .../v2/reader/PhoenixDataSourceReader.java | 201 
 .../v2/reader/PhoenixInputPartition.java   |  44 ++
 .../v2/reader/PhoenixInputPartitionReader.java | 168 +++
 .../v2/writer/PhoenixDataSourceWriteOptions.java   | 109 +
 .../datasource/v2/writer/PhoenixDataWriter.java| 100 
 .../v2/writer/PhoenixDataWriterFactory.java|  19 +
 .../v2/writer/PhoenixDatasourceWriter.java |  34 ++
 ...org.apache.spark.sql.sources.DataSourceRegister |   1 +
 .../apache/phoenix/spark/ConfigurationUtil.scala   |   1 +
 .../apache/phoenix/spark/DataFrameFunctions.scala  |   2 +-
 .../org/apache/phoenix/spark/DefaultSource.scala   |   1 +
 ...lation.scala => FilterExpressionCompiler.scala} | 109 ++---
 .../org/apache/phoenix/spark/PhoenixRDD.scala  |  61 +--
 .../phoenix/spark/PhoenixRecordWritable.scala  |   2 +-
 .../org/apache/phoenix/spark/PhoenixRelation.scala |  70 +--
 .../apache/phoenix/spark/ProductRDDFunctions.scala |   1 +
 .../phoenix/spark/SparkContextFunctions.scala  |   1 +
 .../org/apache/phoenix/spark/SparkSchemaUtil.scala |  84 
 .../phoenix/spark/SparkSqlContextFunctions.scala   |   1 +
 .../datasources/jdbc/PhoenixJdbcDialect.scala  |  21 +
 .../execution/datasources/jdbc/SparkJdbcUtil.scala | 309 
 pom.xml|   2 +-
 32 files changed, 1655 insertions(+), 527 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
index 3051cd6..ef127ac 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
@@ -194,7 +194,7 @@ public abstract class BaseSaltedTableIT extends 
ParallelStatsDisabledIT  {
 .setSelectColumns(
 Lists.newArrayList("A_INTEGER", "A_STRING", "A_ID", 
"B_STRING", "B_INTEGER"))
 .setFullTableName(tableName)
-.setWhereClause("a_integer = 1 AND a_string >= 'ab' AND 
a_string < 'de' AND a_id = '123'");
+.setWhereClause("A_INTEGER = 1 AND A_STRING >= 'ab' AND 
A_STRING < 'de' AND A_ID = '123'");
 rs = executeQuery(conn, queryBuilder);
 assertTrue(rs.next());
 assertEquals(1, rs.getInt(1));
@@ -205,7 +205,7 @@ public abstract class BaseSaltedTableIT extends 
ParallelStatsDisabledIT  {
 assertFalse(rs.next());
 
 // all single slots with one value.
-queryBuilder.setWhereClause("a_integer = 1 AND a_string = 'ab' AND 
a_id = '123'");
+queryBuilder.setWhereClause("A_INTEGER = 1 AND A_STRING = 'ab' AND 
A_ID = '123'");
 rs = executeQuery(conn, queryBuilder);
 assertTrue(rs.next());
 assertEquals(1, rs.getInt(1));
@@ -216,7 +216,7 @@ public abstract class BaseSaltedTableIT extends 
ParallelStatsDisabledIT  {
 assertFalse(rs.next());
 
 // all single slots with multiple values.
-queryBuilder.setWhereClause("a_integer in (2, 4) AND a_string = 
'abc' AND a_id = '123'");
+queryBuilder.setWhereClause("A_INTEGER in (2, 4) AND A_STRING = 
'abc' AND A_ID = '123'");
 rs = executeQuery(conn, queryBuilder);
 
 assertTrue(rs.next());
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index 789f688..a7c1a4f 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -487,6 +487,14 @@
 src/it/scala
 
src/it/resources
 
+
+org.apache.maven.plugins
+maven-compiler-plugin
+
+1.8
+1.8
+
+  

[phoenix] branch 4.x-HBase-1.2 updated: PHOENIX-5059 Use the Datasource v2 api in the spark connector

2019-01-07 Thread tdsilva
This is an automated email from the ASF dual-hosted git repository.

tdsilva pushed a commit to branch 4.x-HBase-1.2
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/4.x-HBase-1.2 by this push:
 new 4eaa1b8  PHOENIX-5059 Use the Datasource v2 api in the spark connector
4eaa1b8 is described below

commit 4eaa1b84c5a88385e831f9c4136995e8e2e959a2
Author: Thomas D'Silva 
AuthorDate: Tue Dec 11 14:59:39 2018 -0800

PHOENIX-5059 Use the Datasource v2 api in the spark connector
---
 .../phoenix/end2end/salted/BaseSaltedTableIT.java  |   6 +-
 phoenix-spark/pom.xml  |   8 +
 .../java/org/apache/phoenix/spark/OrderByIT.java   |  92 ++--
 .../java/org/apache/phoenix/spark/SparkUtil.java   |  25 +-
 phoenix-spark/src/it/resources/globalSetup.sql |   6 +-
 .../phoenix/spark/AbstractPhoenixSparkIT.scala |  12 +-
 .../org/apache/phoenix/spark/PhoenixSparkIT.scala  | 541 +++--
 .../spark/PhoenixSparkITTenantSpecific.scala   |  18 +-
 .../spark/datasource/v2/PhoenixDataSource.java |  82 
 .../v2/reader/PhoenixDataSourceReadOptions.java|  51 ++
 .../v2/reader/PhoenixDataSourceReader.java | 201 
 .../v2/reader/PhoenixInputPartition.java   |  44 ++
 .../v2/reader/PhoenixInputPartitionReader.java | 168 +++
 .../v2/writer/PhoenixDataSourceWriteOptions.java   | 109 +
 .../datasource/v2/writer/PhoenixDataWriter.java| 100 
 .../v2/writer/PhoenixDataWriterFactory.java|  19 +
 .../v2/writer/PhoenixDatasourceWriter.java |  34 ++
 ...org.apache.spark.sql.sources.DataSourceRegister |   1 +
 .../apache/phoenix/spark/ConfigurationUtil.scala   |   1 +
 .../apache/phoenix/spark/DataFrameFunctions.scala  |   2 +-
 .../org/apache/phoenix/spark/DefaultSource.scala   |   1 +
 ...lation.scala => FilterExpressionCompiler.scala} | 109 ++---
 .../org/apache/phoenix/spark/PhoenixRDD.scala  |  61 +--
 .../phoenix/spark/PhoenixRecordWritable.scala  |   2 +-
 .../org/apache/phoenix/spark/PhoenixRelation.scala |  70 +--
 .../apache/phoenix/spark/ProductRDDFunctions.scala |   1 +
 .../phoenix/spark/SparkContextFunctions.scala  |   1 +
 .../org/apache/phoenix/spark/SparkSchemaUtil.scala |  84 
 .../phoenix/spark/SparkSqlContextFunctions.scala   |   1 +
 .../datasources/jdbc/PhoenixJdbcDialect.scala  |  21 +
 .../execution/datasources/jdbc/SparkJdbcUtil.scala | 309 
 pom.xml|   2 +-
 32 files changed, 1655 insertions(+), 527 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
index 3051cd6..ef127ac 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/salted/BaseSaltedTableIT.java
@@ -194,7 +194,7 @@ public abstract class BaseSaltedTableIT extends 
ParallelStatsDisabledIT  {
 .setSelectColumns(
 Lists.newArrayList("A_INTEGER", "A_STRING", "A_ID", 
"B_STRING", "B_INTEGER"))
 .setFullTableName(tableName)
-.setWhereClause("a_integer = 1 AND a_string >= 'ab' AND 
a_string < 'de' AND a_id = '123'");
+.setWhereClause("A_INTEGER = 1 AND A_STRING >= 'ab' AND 
A_STRING < 'de' AND A_ID = '123'");
 rs = executeQuery(conn, queryBuilder);
 assertTrue(rs.next());
 assertEquals(1, rs.getInt(1));
@@ -205,7 +205,7 @@ public abstract class BaseSaltedTableIT extends 
ParallelStatsDisabledIT  {
 assertFalse(rs.next());
 
 // all single slots with one value.
-queryBuilder.setWhereClause("a_integer = 1 AND a_string = 'ab' AND 
a_id = '123'");
+queryBuilder.setWhereClause("A_INTEGER = 1 AND A_STRING = 'ab' AND 
A_ID = '123'");
 rs = executeQuery(conn, queryBuilder);
 assertTrue(rs.next());
 assertEquals(1, rs.getInt(1));
@@ -216,7 +216,7 @@ public abstract class BaseSaltedTableIT extends 
ParallelStatsDisabledIT  {
 assertFalse(rs.next());
 
 // all single slots with multiple values.
-queryBuilder.setWhereClause("a_integer in (2, 4) AND a_string = 
'abc' AND a_id = '123'");
+queryBuilder.setWhereClause("A_INTEGER in (2, 4) AND A_STRING = 
'abc' AND A_ID = '123'");
 rs = executeQuery(conn, queryBuilder);
 
 assertTrue(rs.next());
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index c85fe76..08b3806 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -487,6 +487,14 @@
 src/it/scala
 
src/it/resources
 
+
+org.apache.maven.plugins
+maven-compiler-plugin
+
+1.8
+1.8
+
+  

[phoenix-connectors] branch 4.14-HBase-1.4 created (now 362d0da)

2019-01-07 Thread vincentpoon
This is an automated email from the ASF dual-hosted git repository.

vincentpoon pushed a change to branch 4.14-HBase-1.4
in repository https://gitbox.apache.org/repos/asf/phoenix-connectors.git.


  at 362d0da  Remove logging implementations to use Presto's log framework

No new revisions were added by this update.



[phoenix-connectors] branch 4.x-HBase-1.4 updated: Remove logging implementations to use Presto's log framework

2019-01-07 Thread vincentpoon
This is an automated email from the ASF dual-hosted git repository.

vincentpoon pushed a commit to branch 4.x-HBase-1.4
in repository https://gitbox.apache.org/repos/asf/phoenix-connectors.git


The following commit(s) were added to refs/heads/4.x-HBase-1.4 by this push:
 new 362d0da  Remove logging implementations to use Presto's log framework
362d0da is described below

commit 362d0da62a80644c822e6a6a74ba12c0dd5aff33
Author: Vincent Poon 
AuthorDate: Mon Jan 7 14:21:37 2019 -0800

Remove logging implementations to use Presto's log framework
---
 presto-phoenix-shaded/pom.xml  |   2 +-
 .../presto-phoenix-client-shaded/pom.xml   | 257 +++--
 2 files changed, 236 insertions(+), 23 deletions(-)

diff --git a/presto-phoenix-shaded/pom.xml b/presto-phoenix-shaded/pom.xml
index 03ce587..60acdc3 100644
--- a/presto-phoenix-shaded/pom.xml
+++ b/presto-phoenix-shaded/pom.xml
@@ -25,7 +25,7 @@
 
 
 true
-com.facebook.presto.phoenix.shaded
+org.apache.phoenix.shaded
 
 
 
diff --git a/presto-phoenix-shaded/presto-phoenix-client-shaded/pom.xml 
b/presto-phoenix-shaded/presto-phoenix-client-shaded/pom.xml
index d24735b..94171f6 100644
--- a/presto-phoenix-shaded/presto-phoenix-client-shaded/pom.xml
+++ b/presto-phoenix-shaded/presto-phoenix-client-shaded/pom.xml
@@ -26,12 +26,26 @@
 
 
 org.apache.phoenix
-phoenix-client
+phoenix-core
 ${project.version}
 
+
 
-*
-*
+commons-logging
+commons-logging
+
+
+log4j
+log4j
+
+
+org.slf4j
+slf4j-log4j12
+
+
+org.slf4j
+slf4j-api
 
 
 
@@ -52,18 +66,53 @@
 true
 true
 
${project.build.directory}/pom.xml
+
true
 
 
 
+
+
+*:*
+
+
+xom:xom
+log4j:log4j
+org.slf4j:slf4j-log4j12
+
commons-logging:commons-logging
+
+
+
 
+
 
-com.google.common
-
${shadeBase}.com.google.common
+com.beust.jcommander
+
${shadeBase}.com.beust.jcommander
 
 
-com.google.inject
-
${shadeBase}.com.google.inject
+com.codahale
+
${shadeBase}.com.codahale
+
+
+com.fasterxml
+
${shadeBase}.com.fasterxml
+
+
+com.google
+
${shadeBase}.com.google
+
+
+
+com.jamesmurty
+
${shadeBase}.com.jamesmurty
+
+
+com.jcraft
+
${shadeBase}.com.jcraft
+
+
+com.lmax
+
${shadeBase}.com.lmax
 
 
 com.sun.xml
@@ -78,14 +127,183 @@
 
${shadeBase}.com.sun.research
 
 
-com.sun.xml
-
${shadeBase}.com.sun.xml
-
-
 com.sun.activation
 
${shadeBase}.com.sun.activation
 
 
+  

Build failed in Jenkins: Phoenix Compile Compatibility with HBase #870

2019-01-07 Thread Apache Jenkins Server
See 


--
Started by timer
[EnvInject] - Loading node environment variables.
Building remotely on H25 (ubuntu xenial) in workspace 

[Phoenix_Compile_Compat_wHBase] $ /bin/bash /tmp/jenkins779427239058443566.sh
core file size  (blocks, -c) 0
data seg size   (kbytes, -d) unlimited
scheduling priority (-e) 0
file size   (blocks, -f) unlimited
pending signals (-i) 386407
max locked memory   (kbytes, -l) 64
max memory size (kbytes, -m) unlimited
open files  (-n) 6
pipe size(512 bytes, -p) 8
POSIX message queues (bytes, -q) 819200
real-time priority  (-r) 0
stack size  (kbytes, -s) 8192
cpu time   (seconds, -t) unlimited
max user processes  (-u) 10240
virtual memory  (kbytes, -v) unlimited
file locks  (-x) unlimited
core id : 0
core id : 1
core id : 2
core id : 3
core id : 4
core id : 5
physical id : 0
physical id : 1
MemTotal:   98957636 kB
MemFree:15267908 kB
Filesystem  Size  Used Avail Use% Mounted on
udev 48G 0   48G   0% /dev
tmpfs   9.5G  634M  8.9G   7% /run
/dev/sda3   3.6T  210G  3.2T   7% /
tmpfs48G 0   48G   0% /dev/shm
tmpfs   5.0M 0  5.0M   0% /run/lock
tmpfs48G 0   48G   0% /sys/fs/cgroup
/dev/sda2   473M  191M  258M  43% /boot
/dev/loop1   28M   28M 0 100% /snap/snapcraft/1871
tmpfs   9.5G  4.0K  9.5G   1% /run/user/910
/dev/loop4   52M   52M 0 100% /snap/lxd/9600
/dev/loop5   89M   89M 0 100% /snap/core/5897
/dev/loop7   52M   52M 0 100% /snap/lxd/9664
tmpfs   9.5G 0  9.5G   0% /run/user/1000
/dev/loop2   28M   28M 0 100% /snap/snapcraft/2374
/dev/loop8   90M   90M 0 100% /snap/core/6034
/dev/loop9   52M   52M 0 100% /snap/lxd/9795
/dev/loop6   90M   90M 0 100% /snap/core/6130
apache-maven-2.2.1
apache-maven-3.0.4
apache-maven-3.0.5
apache-maven-3.1.1
apache-maven-3.2.1
apache-maven-3.2.5
apache-maven-3.3.3
apache-maven-3.3.9
apache-maven-3.5.0
apache-maven-3.5.2
apache-maven-3.5.4
apache-maven-3.6.0
latest
latest2
latest3


===
Verifying compile level compatibility with HBase 0.98 with Phoenix 
4.x-HBase-0.98
===

Cloning into 'hbase'...
Switched to a new branch '0.98'
Branch 0.98 set up to track remote branch 0.98 from origin.
[ERROR] Plugin org.codehaus.mojo:findbugs-maven-plugin:2.5.2 or one of its 
dependencies could not be resolved: Failed to read artifact descriptor for 
org.codehaus.mojo:findbugs-maven-plugin:jar:2.5.2: Could not transfer artifact 
org.codehaus.mojo:findbugs-maven-plugin:pom:2.5.2 from/to central 
(https://repo.maven.apache.org/maven2): Received fatal alert: protocol_version 
-> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e 
switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please 
read the following articles:
[ERROR] [Help 1] 
http://cwiki.apache.org/confluence/display/MAVEN/PluginResolutionException
Build step 'Execute shell' marked build as failure