[phoenix] branch 4.x updated: PHOENIX-5065 Inconsistent treatment of NULL and empty string

2020-03-17 Thread stoty
This is an automated email from the ASF dual-hosted git repository.

stoty pushed a commit to branch 4.x
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/4.x by this push:
 new a4cbf11  PHOENIX-5065 Inconsistent treatment of NULL and empty string
a4cbf11 is described below

commit a4cbf11cb6655236756400e68870746ff4482b5a
Author: Richard Antal 
AuthorDate: Mon Mar 9 14:53:53 2020 +0100

PHOENIX-5065 Inconsistent treatment of NULL and empty string

Signed-off-by: Istvan Toth 
---
 .../java/org/apache/phoenix/end2end/InListIT.java  | 49 ++
 .../phoenix/expression/InListExpression.java   | 44 ++-
 2 files changed, 83 insertions(+), 10 deletions(-)

diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/InListIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InListIT.java
index f74f7d7..0aabdcd 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/InListIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InListIT.java
@@ -35,6 +35,7 @@ import java.util.List;
 import java.util.Properties;
 
 import org.apache.phoenix.schema.SortOrder;
+import org.apache.phoenix.schema.TypeMismatchException;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PInteger;
 import org.apache.phoenix.schema.types.PLong;
@@ -485,6 +486,54 @@ public class InListIT extends ParallelStatsDisabledIT {
 
 conn.close();
 }
+
+@Test
+public void testInListExpressionWithNull() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+Statement stmt = conn.createStatement();
+ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM 
SYSTEM.CATALOG WHERE " +
+"TENANT_ID IN ('', 'FOO')");
+ResultSet rs2 = stmt.executeQuery("SELECT COUNT(*) FROM 
SYSTEM.CATALOG WHERE " +
+"TENANT_ID = '' OR TENANT_ID = 'FOO'");
+assertTrue(rs.next());
+assertTrue(rs2.next());
+assertEquals(rs.getInt(1), rs2.getInt(1));
+assertEquals(0, rs.getInt(1));
+}
+}
+
+@Test(expected = TypeMismatchException.class)
+public void testInListExpressionWithNotValidElements() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+Statement stmt = conn.createStatement();
+ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM 
SYSTEM.CATALOG WHERE " +
+"TENANT_ID IN (4, 8)");
+assertTrue(rs.next());
+assertEquals(0, rs.getInt(1));
+}
+}
+
+@Test(expected = SQLException.class)
+public void testInListExpressionWithNoElements() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+Statement stmt = conn.createStatement();
+ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM 
SYSTEM.CATALOG WHERE " +
+"TENANT_ID IN ()");
+assertTrue(rs.next());
+assertEquals(0, rs.getInt(1));
+}
+}
+
+@Test
+public void testInListExpressionWithNullAndWrongTypedData() throws 
Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+Statement stmt = conn.createStatement();
+ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM 
SYSTEM.CATALOG WHERE " +
+"TENANT_ID IN ('', 4)");
+assertTrue(rs.next());
+assertEquals(0, rs.getInt(1));
+}
+}
 
 @Test
 public void testInListExpressionWithDesc() throws Exception {
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/InListExpression.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/InListExpression.java
index 4ee08ee..bd86aaa 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/InListExpression.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/InListExpression.java
@@ -26,6 +26,7 @@ import java.util.Collections;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Set;
+import java.util.stream.Collectors;
 
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -64,22 +65,39 @@ public class InListExpression extends BaseSingleExpression {
 private boolean hashCodeSet = false;
 
 public static Expression create (List children, boolean 
isNegate, ImmutableBytesWritable ptr, boolean rowKeyOrderOptimizable) throws 
SQLException {
+if (children.size() == 1) {
+throw new SQLException("No element in the IN list");
+}
+
 Expression firstChild = children.get(0);
 
 if (firstChild.isStateless() && (!firstChild.evaluate(null, ptr) || 
ptr.getLength() == 0)) {
 return Literal

[phoenix] branch master updated: PHOENIX-5065 Inconsistent treatment of NULL and empty string

2020-03-17 Thread stoty
This is an automated email from the ASF dual-hosted git repository.

stoty pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/master by this push:
 new ba609be  PHOENIX-5065 Inconsistent treatment of NULL and empty string
ba609be is described below

commit ba609bef5110c8a04a1cc3d7c60485fb8c23a1dc
Author: Richard Antal 
AuthorDate: Mon Mar 9 14:53:53 2020 +0100

PHOENIX-5065 Inconsistent treatment of NULL and empty string

Signed-off-by: Istvan Toth 
---
 .../java/org/apache/phoenix/end2end/InListIT.java  | 49 ++
 .../phoenix/expression/InListExpression.java   | 44 ++-
 2 files changed, 83 insertions(+), 10 deletions(-)

diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/InListIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InListIT.java
index 2c5cf6a..7e0a7a5 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/InListIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/InListIT.java
@@ -35,6 +35,7 @@ import java.util.List;
 import java.util.Properties;
 
 import org.apache.phoenix.schema.SortOrder;
+import org.apache.phoenix.schema.TypeMismatchException;
 import org.apache.phoenix.schema.types.PDataType;
 import org.apache.phoenix.schema.types.PInteger;
 import org.apache.phoenix.schema.types.PLong;
@@ -485,6 +486,54 @@ public class InListIT extends ParallelStatsDisabledIT {
 
 conn.close();
 }
+
+@Test
+public void testInListExpressionWithNull() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+Statement stmt = conn.createStatement();
+ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM 
SYSTEM.CATALOG WHERE " +
+"TENANT_ID IN ('', 'FOO')");
+ResultSet rs2 = stmt.executeQuery("SELECT COUNT(*) FROM 
SYSTEM.CATALOG WHERE " +
+"TENANT_ID = '' OR TENANT_ID = 'FOO'");
+assertTrue(rs.next());
+assertTrue(rs2.next());
+assertEquals(rs.getInt(1), rs2.getInt(1));
+assertEquals(0, rs.getInt(1));
+}
+}
+
+@Test(expected = TypeMismatchException.class)
+public void testInListExpressionWithNotValidElements() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+Statement stmt = conn.createStatement();
+ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM 
SYSTEM.CATALOG WHERE " +
+"TENANT_ID IN (4, 8)");
+assertTrue(rs.next());
+assertEquals(0, rs.getInt(1));
+}
+}
+
+@Test(expected = SQLException.class)
+public void testInListExpressionWithNoElements() throws Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+Statement stmt = conn.createStatement();
+ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM 
SYSTEM.CATALOG WHERE " +
+"TENANT_ID IN ()");
+assertTrue(rs.next());
+assertEquals(0, rs.getInt(1));
+}
+}
+
+@Test
+public void testInListExpressionWithNullAndWrongTypedData() throws 
Exception {
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+Statement stmt = conn.createStatement();
+ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM 
SYSTEM.CATALOG WHERE " +
+"TENANT_ID IN ('', 4)");
+assertTrue(rs.next());
+assertEquals(0, rs.getInt(1));
+}
+}
 
 @Test
 public void testInListExpressionWithDesc() throws Exception {
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/InListExpression.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/InListExpression.java
index 4235406..fc276c8 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/expression/InListExpression.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/expression/InListExpression.java
@@ -26,6 +26,7 @@ import java.util.Collections;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Set;
+import java.util.stream.Collectors;
 
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@@ -64,22 +65,39 @@ public class InListExpression extends BaseSingleExpression {
 private boolean hashCodeSet = false;
 
 public static Expression create (List children, boolean 
isNegate, ImmutableBytesWritable ptr, boolean rowKeyOrderOptimizable) throws 
SQLException {
+if (children.size() == 1) {
+throw new SQLException("No element in the IN list");
+}
+
 Expression firstChild = children.get(0);
 
 if (firstChild.isStateless() && (!firstChild.evaluate(null, ptr) || 
ptr.getLength() == 0)) {
 return L

[phoenix] branch 4.x updated: PHOENIX-5753 Fix erroneous query result when RVC is clipped with desc column

2020-03-17 Thread chenglei
This is an automated email from the ASF dual-hosted git repository.

chenglei pushed a commit to branch 4.x
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/4.x by this push:
 new 8da1582  PHOENIX-5753 Fix erroneous query result when RVC is clipped 
with desc column
8da1582 is described below

commit 8da1582ea8322955db6692823a68d222d310805d
Author: chenglei 
AuthorDate: Wed Mar 18 12:33:31 2020 +0800

PHOENIX-5753 Fix erroneous query result when RVC is clipped with desc column
---
 .../apache/phoenix/end2end/SkipScanQueryIT.java|  57 +++
 .../end2end/index/GlobalIndexOptimizationIT.java   |   8 +-
 .../org/apache/phoenix/compile/WhereOptimizer.java |  92 +++--
 .../java/org/apache/phoenix/query/KeyRange.java|  12 +
 .../apache/phoenix/compile/WhereCompilerTest.java  |  57 ---
 .../apache/phoenix/compile/WhereOptimizerTest.java | 388 +++--
 6 files changed, 495 insertions(+), 119 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
index f66f196..ccba651 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
@@ -18,6 +18,7 @@
 package org.apache.phoenix.end2end;
 
 import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.apache.phoenix.util.TestUtil.assertResultSet;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
@@ -655,4 +656,60 @@ public class SkipScanQueryIT extends 
ParallelStatsDisabledIT {
 assertFalse(rs.next());
 }
 }
+
+@Test
+public void testRVCClipBug5753() throws Exception {
+String tableName = generateUniqueName();
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+conn.setAutoCommit(true);
+Statement stmt = conn.createStatement();
+
+String sql = "CREATE TABLE "+tableName+" (" +
+ " pk1 INTEGER NOT NULL , " +
+ " pk2 INTEGER NOT NULL, " +
+ " pk3 INTEGER NOT NULL, " +
+ " pk4 INTEGER NOT NULL, " +
+ " pk5 INTEGER NOT NULL, " +
+ " pk6 INTEGER NOT NULL, " +
+ " pk7 INTEGER NOT NULL, " +
+ " pk8 INTEGER NOT NULL, " +
+ " v INTEGER, CONSTRAINT PK PRIMARY KEY(pk1,pk2,pk3 
desc,pk4,pk5,pk6 desc,pk7,pk8))";;
+
+stmt.execute(sql);
+
+stmt.execute(
+"UPSERT INTO " + tableName + " 
(pk1,pk2,pk3,pk4,pk5,pk6,pk7,pk8,v) "+
+"VALUES (1,3,4,10,2,6,7,9,1)");
+
+sql = "select pk1,pk2,pk3,pk4 from " + tableName +
+ " where (pk1 >=1 and pk1<=2) and (pk2>=3 and pk2<=4) and 
(pk3,pk4) < (5,7) order by pk1,pk2,pk3";
+
+ResultSet rs = conn.prepareStatement(sql).executeQuery();
+assertResultSet(rs, new Object[][]{{1,3,4,10}});
+
+sql = "select * from " + tableName +
+" where (pk1 >=1 and pk1<=2) and (pk2>=2 and pk2<=3) and 
(pk3,pk4) < (5,7) and "+
+" (pk5,pk6,pk7) < (5,6,7) and pk8 > 8 order by 
pk1,pk2,pk3";
+rs = conn.prepareStatement(sql).executeQuery();
+assertResultSet(rs, new Object[][]{{1,3,4,10}});
+
+stmt.execute(
+"UPSERT INTO " + tableName + " 
(pk1,pk2,pk3,pk4,pk5,pk6,pk7,pk8,v) "+
+"VALUES (1,3,2,10,5,4,3,9,1)");
+rs = conn.prepareStatement(sql).executeQuery();
+assertResultSet(rs, new Object[][]{{1,3,2,10},{1,3,4,10}});
+
+stmt.execute(
+"UPSERT INTO " + tableName + " 
(pk1,pk2,pk3,pk4,pk5,pk6,pk7,pk8,v) "+
+"VALUES (1,3,5,6,4,7,8,9,1)");
+rs = conn.prepareStatement(sql).executeQuery();
+assertResultSet(rs, new 
Object[][]{{1,3,2,10},{1,3,4,10},{1,3,5,6}});
+
+sql = "select * from " + tableName +
+" where (pk1 >=1 and pk1<=2) and (pk2>=2 and pk2<=3) and 
(pk3,pk4) in ((5,6),(2,10)) and "+
+" (pk5,pk6,pk7) in ((4,7,8),(5,4,3)) and pk8 > 8 order by 
pk1,pk2,pk3";
+rs = conn.prepareStatement(sql).executeQuery();
+assertResultSet(rs, new Object[][]{{1,3,2,10},{1,3,5,6}});
+}
+}
 }
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/GlobalIndexOptimizationIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/GlobalIndexOptimizationIT.java
index 9567d36..5c2558e 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/GlobalIndexOptimizationIT.java
+++ 
b/phoenix-core/src/it/java/org/apach

[phoenix] branch master updated: PHOENIX-5753 Fix erroneous query result when RVC is clipped with desc column

2020-03-17 Thread chenglei
This is an automated email from the ASF dual-hosted git repository.

chenglei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/master by this push:
 new fb4f857  PHOENIX-5753 Fix erroneous query result when RVC is clipped 
with desc column
fb4f857 is described below

commit fb4f857696a29777a5ad9d68dd61fd51800f8c6b
Author: chenglei 
AuthorDate: Wed Mar 18 12:26:15 2020 +0800

PHOENIX-5753 Fix erroneous query result when RVC is clipped with desc column
---
 .../apache/phoenix/end2end/SkipScanQueryIT.java|  57 +++
 .../end2end/index/GlobalIndexOptimizationIT.java   |   8 +-
 .../org/apache/phoenix/compile/WhereOptimizer.java |  92 +++--
 .../java/org/apache/phoenix/query/KeyRange.java|  12 +
 .../apache/phoenix/compile/WhereCompilerTest.java  |  57 ---
 .../apache/phoenix/compile/WhereOptimizerTest.java | 388 +++--
 6 files changed, 495 insertions(+), 119 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
index 5a6bc23..64b897dd 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/SkipScanQueryIT.java
@@ -18,6 +18,7 @@
 package org.apache.phoenix.end2end;
 
 import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.apache.phoenix.util.TestUtil.assertResultSet;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
@@ -656,4 +657,60 @@ public class SkipScanQueryIT extends 
ParallelStatsDisabledIT {
 assertFalse(rs.next());
 }
 }
+
+@Test
+public void testRVCClipBug5753() throws Exception {
+String tableName = generateUniqueName();
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+conn.setAutoCommit(true);
+Statement stmt = conn.createStatement();
+
+String sql = "CREATE TABLE "+tableName+" (" +
+ " pk1 INTEGER NOT NULL , " +
+ " pk2 INTEGER NOT NULL, " +
+ " pk3 INTEGER NOT NULL, " +
+ " pk4 INTEGER NOT NULL, " +
+ " pk5 INTEGER NOT NULL, " +
+ " pk6 INTEGER NOT NULL, " +
+ " pk7 INTEGER NOT NULL, " +
+ " pk8 INTEGER NOT NULL, " +
+ " v INTEGER, CONSTRAINT PK PRIMARY KEY(pk1,pk2,pk3 
desc,pk4,pk5,pk6 desc,pk7,pk8))";;
+
+stmt.execute(sql);
+
+stmt.execute(
+"UPSERT INTO " + tableName + " 
(pk1,pk2,pk3,pk4,pk5,pk6,pk7,pk8,v) "+
+"VALUES (1,3,4,10,2,6,7,9,1)");
+
+sql = "select pk1,pk2,pk3,pk4 from " + tableName +
+ " where (pk1 >=1 and pk1<=2) and (pk2>=3 and pk2<=4) and 
(pk3,pk4) < (5,7) order by pk1,pk2,pk3";
+
+ResultSet rs = conn.prepareStatement(sql).executeQuery();
+assertResultSet(rs, new Object[][]{{1,3,4,10}});
+
+sql = "select * from " + tableName +
+" where (pk1 >=1 and pk1<=2) and (pk2>=2 and pk2<=3) and 
(pk3,pk4) < (5,7) and "+
+" (pk5,pk6,pk7) < (5,6,7) and pk8 > 8 order by 
pk1,pk2,pk3";
+rs = conn.prepareStatement(sql).executeQuery();
+assertResultSet(rs, new Object[][]{{1,3,4,10}});
+
+stmt.execute(
+"UPSERT INTO " + tableName + " 
(pk1,pk2,pk3,pk4,pk5,pk6,pk7,pk8,v) "+
+"VALUES (1,3,2,10,5,4,3,9,1)");
+rs = conn.prepareStatement(sql).executeQuery();
+assertResultSet(rs, new Object[][]{{1,3,2,10},{1,3,4,10}});
+
+stmt.execute(
+"UPSERT INTO " + tableName + " 
(pk1,pk2,pk3,pk4,pk5,pk6,pk7,pk8,v) "+
+"VALUES (1,3,5,6,4,7,8,9,1)");
+rs = conn.prepareStatement(sql).executeQuery();
+assertResultSet(rs, new 
Object[][]{{1,3,2,10},{1,3,4,10},{1,3,5,6}});
+
+sql = "select * from " + tableName +
+" where (pk1 >=1 and pk1<=2) and (pk2>=2 and pk2<=3) and 
(pk3,pk4) in ((5,6),(2,10)) and "+
+" (pk5,pk6,pk7) in ((4,7,8),(5,4,3)) and pk8 > 8 order by 
pk1,pk2,pk3";
+rs = conn.prepareStatement(sql).executeQuery();
+assertResultSet(rs, new Object[][]{{1,3,2,10},{1,3,5,6}});
+}
+}
 }
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/GlobalIndexOptimizationIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/GlobalIndexOptimizationIT.java
index 9567d36..5c2558e 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/GlobalIndexOptimizationIT.java
+++ 
b/phoenix-core/src/it/java/or

Apache Phoenix - Timeout crawler - Build https://builds.apache.org/job/Phoenix-4.x-matrix/12/

2020-03-17 Thread Apache Jenkins Server
[...truncated 30 lines...]
Looking at the log, list of test(s) that timed-out:

Build:
https://builds.apache.org/job/Phoenix-4.x-matrix/12/


Affected test class(es):
Set(['as SYSTEM'])


Build step 'Execute shell' marked build as failure
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any

Jenkins build is back to normal : Phoenix | 4.x | HBase Profile » 1.5 #12

2020-03-17 Thread Apache Jenkins Server
See 




Apache Phoenix - Timeout crawler - Build https://builds.apache.org/job/Phoenix-master-matrix/29/

2020-03-17 Thread Apache Jenkins Server
[...truncated 21 lines...]
Looking at the log, list of test(s) that timed-out:

Build:
https://builds.apache.org/job/Phoenix-master-matrix/29/


Affected test class(es):
Set(['as SYSTEM'])


Build step 'Execute shell' marked build as failure
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any

Build failed in Jenkins: Phoenix | Master | HBase Profile » 2.2 #29

2020-03-17 Thread Apache Jenkins Server
See 


Changes:

[gjacoby] PHOENIX-5317 Upserting rows into child views with pk fails when the 
base


--
[...truncated 131.32 KB...]
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 96.144 s 
- in org.apache.phoenix.end2end.ConnectionUtilIT
[INFO] Running org.apache.phoenix.end2end.ContextClassloaderIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.085 s 
- in org.apache.phoenix.end2end.ContextClassloaderIT
[INFO] Running org.apache.phoenix.end2end.CostBasedDecisionIT
[WARNING] Tests run: 6, Failures: 0, Errors: 0, Skipped: 6, Time elapsed: 
160.723 s - in org.apache.phoenix.end2end.BackwardCompatibilityIT
[INFO] Running org.apache.phoenix.end2end.CsvBulkLoadToolIT
[INFO] Running org.apache.phoenix.end2end.DropSchemaIT
[INFO] Running org.apache.phoenix.end2end.FlappingLocalIndexIT
[INFO] Running org.apache.phoenix.end2end.IndexExtendedIT
[INFO] Running org.apache.phoenix.end2end.IndexBuildTimestampIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 42.433 s 
- in org.apache.phoenix.end2end.DropSchemaIT
[INFO] Running org.apache.phoenix.end2end.IndexRebuildTaskIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 53.985 s 
- in org.apache.phoenix.end2end.IndexRebuildTaskIT
[INFO] Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 232.709 
s - in org.apache.phoenix.end2end.CsvBulkLoadToolIT
[INFO] Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 408.677 
s - in org.apache.phoenix.end2end.ConcurrentMutationsExtendedIT
[INFO] Running org.apache.phoenix.end2end.IndexScrutinyToolForTenantIT
[INFO] Running org.apache.phoenix.end2end.IndexScrutinyToolIT
[INFO] Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 328.056 
s - in org.apache.phoenix.end2end.FlappingLocalIndexIT
[INFO] Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 343.952 
s - in org.apache.phoenix.end2end.IndexBuildTimestampIT
[INFO] Running org.apache.phoenix.end2end.IndexToolForPartialBuildIT
[INFO] Tests run: 64, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 418.327 
s - in org.apache.phoenix.end2end.IndexExtendedIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 42.421 s 
- in org.apache.phoenix.end2end.IndexToolForPartialBuildIT
[INFO] Running 
org.apache.phoenix.end2end.IndexToolForPartialBuildWithNamespaceEnabledIT
[INFO] Running org.apache.phoenix.end2end.MigrateSystemTablesToSystemNamespaceIT
[INFO] Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 152.1 s 
- in org.apache.phoenix.end2end.IndexScrutinyToolForTenantIT
[INFO] Running org.apache.phoenix.end2end.IndexToolIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 75.12 s 
- in org.apache.phoenix.end2end.IndexToolForPartialBuildWithNamespaceEnabledIT
[INFO] Running org.apache.phoenix.end2end.LocalIndexSplitMergeIT
[INFO] Running 
org.apache.phoenix.end2end.OrderByWithServerClientSpoolingDisabledIT
[INFO] Running org.apache.phoenix.end2end.OrderByWithServerMemoryLimitIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 206.989 
s - in org.apache.phoenix.end2end.LocalIndexSplitMergeIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.886 s 
- in org.apache.phoenix.end2end.OrderByWithServerMemoryLimitIT
[INFO] Running org.apache.phoenix.end2end.ParameterizedIndexUpgradeToolIT
[INFO] Tests run: 20, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 950.12 
s - in org.apache.phoenix.end2end.CostBasedDecisionIT
[INFO] Running org.apache.phoenix.end2end.OrderByWithSpillingIT
[INFO] Running org.apache.phoenix.end2end.PartialResultServerConfigurationIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 96.26 s 
- in org.apache.phoenix.end2end.PartialResultServerConfigurationIT
[INFO] Running org.apache.phoenix.end2end.PermissionNSDisabledIT
[WARNING] Tests run: 33, Failures: 0, Errors: 0, Skipped: 3, Time elapsed: 
783.634 s - in org.apache.phoenix.end2end.IndexScrutinyToolIT
[INFO] Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 497.518 
s - in org.apache.phoenix.end2end.OrderByWithServerClientSpoolingDisabledIT
[INFO] Running org.apache.phoenix.end2end.PermissionNSEnabledIT
[INFO] Running org.apache.phoenix.end2end.PermissionsCacheIT
[INFO] Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 497.098 
s - in org.apache.phoenix.end2end.OrderByWithSpillingIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 922.889 
s - in org.apache.phoenix.end2end.MigrateSystemTablesToSystemNamespaceIT
[INFO] Running org.apache.phoenix.end2end.PhoenixDriverIT
[INFO] Running org.apache.phoenix.end2end.QueryLoggerIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 143.921 
s - in org.apache.phoenix.end2end.PhoenixDriverIT
[INFO]

Build failed in Jenkins: Phoenix | Master | HBase Profile » 2.0 #29

2020-03-17 Thread Apache Jenkins Server
See 


Changes:

[gjacoby] PHOENIX-5317 Upserting rows into child views with pk fails when the 
base


--
[...truncated 408.91 KB...]
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.apache.hadoop.hbase.util.Methods.call(Methods.java:40)
at org.apache.hadoop.hbase.security.User.runAsLoginUser(User.java:183)
... 11 more

Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException: 
java.io.IOException: org.apache.hadoop.hbase.security.AccessDeniedException: 
Insufficient permissions (user=regularUser1_N78, scope=hbase:acl, 
family=l:regularUser2_N79, 
params=[table=hbase:acl,family=l:regularUser2_N79],action=WRITE)
at org.apache.hadoop.hbase.security.User.runAsLoginUser(User.java:185)
at 
org.apache.hadoop.hbase.security.access.AccessController.revoke(AccessController.java:2117)
at 
org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos$AccessControlService$1.revoke(AccessControlProtos.java:10031)
at 
org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos$AccessControlService.callMethod(AccessControlProtos.java:10192)
at 
org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:8106)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:2409)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:2391)
at 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:42010)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:413)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:130)
at 
org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:324)
at 
org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:304)
Caused by: org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient 
permissions (user=regularUser1_N78, scope=hbase:acl, 
family=l:regularUser2_N79, 
params=[table=hbase:acl,family=l:regularUser2_N79],action=WRITE)
at 
org.apache.hadoop.hbase.security.access.AccessController.preDelete(AccessController.java:1551)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$26.call(RegionCoprocessorHost.java:980)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$26.call(RegionCoprocessorHost.java:977)
at 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost$ObserverOperationWithoutResult.callObserver(CoprocessorHost.java:540)
at 
org.apache.hadoop.hbase.coprocessor.CoprocessorHost.execOperation(CoprocessorHost.java:614)
at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preDelete(RegionCoprocessorHost.java:977)
at 
org.apache.hadoop.hbase.regionserver.HRegion$MutationBatchOperation.callPreMutateCPHook(HRegion.java:3628)
at 
org.apache.hadoop.hbase.regionserver.HRegion$MutationBatchOperation.access$700(HRegion.java:3391)
at 
org.apache.hadoop.hbase.regionserver.HRegion$MutationBatchOperation$1.visit(HRegion.java:3460)
at 
org.apache.hadoop.hbase.regionserver.HRegion$BatchOperation.visitBatchOperations(HRegion.java:3068)
at 
org.apache.hadoop.hbase.regionserver.HRegion$MutationBatchOperation.checkAndPrepare(HRegion.java:3450)
at 
org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:3887)
at 
org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:3821)
at 
org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:3812)
at 
org.apache.hadoop.hbase.regionserver.HRegion.batchMutate(HRegion.java:3826)
at 
org.apache.hadoop.hbase.regionserver.HRegion.doBatchMutate(HRegion.java:4153)
at 
org.apache.hadoop.hbase.regionserver.HRegion.delete(HRegion.java:2907)
at 
org.apache.hadoop.hbase.regionserver.RSRpcServices.mutate(RSRpcServices.java:2840)
at 
org.apache.hadoop.hbase.client.ClientServiceCallable.doMutate(ClientServiceCallable.java:55)
at org.apache.hadoop.hbase.client.HTable$2.rpcCall(HTable.java:498)
at org.apache.hadoop.hbase.client.HTable$2.rpcCall(HTable.java:493)
at 
org.apache.hadoop.hbase.client.RegionServerCallable.call(RegionServerCallable.java:127)
at 
org.apache.hadoop.hbase.client.RpcRetryingCallerImpl.callWithRetries(RpcRetryingCallerImpl.java:107)
at org.apache.hadoop.hbase.client.HTable.delete(HTable.java:503)
at 
org.apache.hadoop.hbase.security.access.AccessControlLists.removePermissionRecord(AccessControlLists.java:262)
at 
org.apache.hadoop.hbase.security.access.AccessControlLists.removeUserPermission(AccessControlLists.java:246)
at 
org.apache.hadoop.hbase.security.access.AccessContr

Jenkins build is back to normal : Phoenix | 4.x | HBase Profile » 1.4 #11

2020-03-17 Thread Apache Jenkins Server
See 




Apache Phoenix - Timeout crawler - Build https://builds.apache.org/job/Phoenix-4.x-matrix/11/

2020-03-17 Thread Apache Jenkins Server
[...truncated 21 lines...]
Looking at the log, list of test(s) that timed-out:

Build:
https://builds.apache.org/job/Phoenix-4.x-matrix/11/


Affected test class(es):
Set(['as SYSTEM'])


Build step 'Execute shell' marked build as failure
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any

Build failed in Jenkins: Phoenix | 4.x | HBase Profile » 1.3 #11

2020-03-17 Thread Apache Jenkins Server
See 


Changes:

[gjacoby] PHOENIX-5317 Upserting rows into child views with pk fails when the 
base


--
[...truncated 163.70 KB...]
[INFO] Excluding com.github.stephenc.findbugs:findbugs-annotations:jar:1.3.9-1 
from the shaded jar.
[INFO] Excluding com.github.stephenc.jcip:jcip-annotations:jar:1.0-1 from the 
shaded jar.
[INFO] Excluding com.fasterxml.jackson.core:jackson-databind:jar:2.10.3 from 
the shaded jar.
[INFO] Excluding com.fasterxml.jackson.core:jackson-core:jar:2.10.3 from the 
shaded jar.
[INFO] Excluding com.fasterxml.jackson.core:jackson-annotations:jar:2.10.3 from 
the shaded jar.
[INFO] Excluding com.google.protobuf:protobuf-java:jar:2.5.0 from the shaded 
jar.
[INFO] Excluding log4j:log4j:jar:1.2.17 from the shaded jar.
[INFO] Excluding org.slf4j:slf4j-api:jar:1.6.4 from the shaded jar.
[INFO] Excluding org.iq80.snappy:snappy:jar:0.3 from the shaded jar.
[INFO] Excluding org.apache.htrace:htrace-core:jar:3.1.0-incubating from the 
shaded jar.
[INFO] Excluding commons-codec:commons-codec:jar:1.7 from the shaded jar.
[INFO] Excluding commons-collections:commons-collections:jar:3.2.2 from the 
shaded jar.
[INFO] Including org.apache.commons:commons-csv:jar:1.0 in the shaded jar.
[INFO] Excluding com.google.code.findbugs:jsr305:jar:2.0.1 from the shaded jar.
[INFO] Excluding org.apache.hbase:hbase-annotations:jar:1.3.5 from the shaded 
jar.
[INFO] Excluding org.apache.hbase:hbase-common:jar:1.3.5 from the shaded jar.
[INFO] Excluding org.mortbay.jetty:jetty-util:jar:6.1.26 from the shaded jar.
[INFO] Excluding org.apache.hbase:hbase-protocol:jar:1.3.5 from the shaded jar.
[INFO] Excluding org.apache.hbase:hbase-client:jar:1.3.5 from the shaded jar.
[INFO] Excluding io.netty:netty-all:jar:4.0.50.Final from the shaded jar.
[INFO] Excluding org.apache.zookeeper:zookeeper:jar:3.4.6 from the shaded jar.
[INFO] Excluding org.codehaus.jackson:jackson-mapper-asl:jar:1.9.13 from the 
shaded jar.
[INFO] Excluding org.jruby.jcodings:jcodings:jar:1.0.8 from the shaded jar.
[INFO] Excluding org.apache.hbase:hbase-server:jar:1.3.5 from the shaded jar.
[INFO] Excluding org.apache.hbase:hbase-procedure:jar:1.3.5 from the shaded jar.
[INFO] Excluding org.apache.hbase:hbase-prefix-tree:jar:1.3.5 from the shaded 
jar.
[INFO] Excluding commons-httpclient:commons-httpclient:jar:3.1 from the shaded 
jar.
[INFO] Excluding com.sun.jersey:jersey-core:jar:1.9 from the shaded jar.
[INFO] Excluding com.sun.jersey:jersey-server:jar:1.9 from the shaded jar.
[INFO] Excluding asm:asm:jar:3.1 from the shaded jar.
[INFO] Excluding org.mortbay.jetty:jetty:jar:6.1.26 from the shaded jar.
[INFO] Excluding org.mortbay.jetty:jetty-sslengine:jar:6.1.26 from the shaded 
jar.
[INFO] Excluding org.mortbay.jetty:jsp-2.1:jar:6.1.14 from the shaded jar.
[INFO] Excluding org.mortbay.jetty:jsp-api-2.1:jar:6.1.14 from the shaded jar.
[INFO] Excluding org.mortbay.jetty:servlet-api-2.5:jar:6.1.14 from the shaded 
jar.
[INFO] Excluding org.codehaus.jackson:jackson-core-asl:jar:1.9.13 from the 
shaded jar.
[INFO] Excluding org.codehaus.jackson:jackson-jaxrs:jar:1.9.13 from the shaded 
jar.
[INFO] Excluding tomcat:jasper-compiler:jar:5.5.23 from the shaded jar.
[INFO] Excluding tomcat:jasper-runtime:jar:5.5.23 from the shaded jar.
[INFO] Excluding commons-el:commons-el:jar:1.0 from the shaded jar.
[INFO] Excluding org.jamon:jamon-runtime:jar:2.4.1 from the shaded jar.
[INFO] Excluding org.apache.hbase:hbase-hadoop-compat:jar:1.3.5 from the shaded 
jar.
[INFO] Excluding org.apache.hbase:hbase-hadoop2-compat:jar:1.3.5 from the 
shaded jar.
[INFO] Excluding org.apache.hadoop:hadoop-common:jar:2.7.5 from the shaded jar.
[INFO] Excluding xmlenc:xmlenc:jar:0.52 from the shaded jar.
[INFO] Excluding commons-net:commons-net:jar:3.1 from the shaded jar.
[INFO] Excluding javax.servlet:servlet-api:jar:2.5 from the shaded jar.
[INFO] Excluding javax.servlet.jsp:jsp-api:jar:2.1 from the shaded jar.
[INFO] Excluding com.sun.jersey:jersey-json:jar:1.9 from the shaded jar.
[INFO] Excluding org.codehaus.jettison:jettison:jar:1.1 from the shaded jar.
[INFO] Excluding com.sun.xml.bind:jaxb-impl:jar:2.2.3-1 from the shaded jar.
[INFO] Excluding org.codehaus.jackson:jackson-xc:jar:1.8.3 from the shaded jar.
[INFO] Excluding net.java.dev.jets3t:jets3t:jar:0.9.0 from the shaded jar.
[INFO] Excluding com.jamesmurty.utils:java-xmlbuilder:jar:0.4 from the shaded 
jar.
[INFO] Excluding commons-configuration:commons-configuration:jar:1.6 from the 
shaded jar.
[INFO] Excluding commons-digester:commons-digester:jar:1.8 from the shaded jar.
[INFO] Excluding commons-beanutils:commons-beanutils-core:jar:1.8.0 from the 
shaded jar.
[INFO] Excluding org.apache.avro:avro:jar:1.7.4 from the shaded jar.
[INFO] Excluding com.thoughtworks.paranamer:paranamer:jar:2.3 from the shaded 
jar.
[INFO] Excluding com.jcraft:jsch:jar:0.1.54 from the sha

Build failed in Jenkins: Phoenix | 4.x | HBase Profile » 1.5 #11

2020-03-17 Thread Apache Jenkins Server
See 


Changes:

[gjacoby] PHOENIX-5317 Upserting rows into child views with pk fails when the 
base


--
[...truncated 496.10 KB...]
[INFO]  T E S T S
[INFO] ---
[INFO] 
[INFO] Results:
[INFO] 
[INFO] Tests run: 0, Failures: 0, Errors: 0, Skipped: 0
[INFO] 
[INFO] 
[INFO] --- maven-failsafe-plugin:2.22.0:integration-test 
(NeedTheirOwnClusterTests) @ phoenix-core ---
[INFO] 
[INFO] ---
[INFO]  T E S T S
[INFO] ---
[INFO] Running 
org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
[WARNING] Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 0.003 
s - in 
org.apache.hadoop.hbase.regionserver.wal.WALReplayWithIndexWritesAndCompressedWALIT
[INFO] Running org.apache.phoenix.end2end.ConnectionUtilIT
[INFO] Running 
org.apache.hadoop.hbase.regionserver.wal.WALRecoveryRegionPostOpenIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.044 s 
- in org.apache.hadoop.hbase.regionserver.wal.WALRecoveryRegionPostOpenIT
[INFO] Running org.apache.phoenix.end2end.ConcurrentMutationsExtendedIT
[INFO] Running org.apache.phoenix.end2end.CountDistinctCompressionIT
[INFO] Running org.apache.phoenix.end2end.ContextClassloaderIT
[INFO] Running org.apache.phoenix.end2end.CsvBulkLoadToolIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.665 s 
- in org.apache.phoenix.end2end.CountDistinctCompressionIT
[INFO] Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.73 s - 
in org.apache.phoenix.end2end.ContextClassloaderIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 53.824 s 
- in org.apache.phoenix.end2end.ConnectionUtilIT
[INFO] Running org.apache.phoenix.end2end.CostBasedDecisionIT
[INFO] Running org.apache.phoenix.end2end.DropSchemaIT
[INFO] Running org.apache.phoenix.end2end.IndexBuildTimestampIT
[INFO] Running org.apache.phoenix.end2end.FlappingLocalIndexIT
[INFO] Running org.apache.phoenix.end2end.IndexExtendedIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 31.895 s 
- in org.apache.phoenix.end2end.DropSchemaIT
[INFO] Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 154.572 
s - in org.apache.phoenix.end2end.CsvBulkLoadToolIT
[INFO] Running org.apache.phoenix.end2end.IndexRebuildTaskIT
[INFO] Running org.apache.phoenix.end2end.IndexScrutinyToolForTenantIT
[INFO] Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 272.45 
s - in org.apache.phoenix.end2end.ConcurrentMutationsExtendedIT
[INFO] Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 186.573 
s - in org.apache.phoenix.end2end.FlappingLocalIndexIT
[INFO] Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 190.087 
s - in org.apache.phoenix.end2end.IndexBuildTimestampIT
[INFO] Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 93.448 s 
- in org.apache.phoenix.end2end.IndexScrutinyToolForTenantIT
[INFO] Running org.apache.phoenix.end2end.IndexScrutinyToolIT
[INFO] Running 
org.apache.phoenix.end2end.IndexToolForPartialBuildWithNamespaceEnabledIT
[INFO] Running org.apache.phoenix.end2end.IndexToolForPartialBuildIT
[INFO] Tests run: 64, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 268.472 
s - in org.apache.phoenix.end2end.IndexExtendedIT
[INFO] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 36.489 s 
- in org.apache.phoenix.end2end.IndexToolForPartialBuildIT
[INFO] Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 55.958 s 
- in org.apache.phoenix.end2end.IndexToolForPartialBuildWithNamespaceEnabledIT
[INFO] Running org.apache.phoenix.end2end.IndexToolIT
[ERROR] Tests run: 1, Failures: 1, Errors: 0, Skipped: 0, Time elapsed: 231.657 
s <<< FAILURE! - in org.apache.phoenix.end2end.IndexRebuildTaskIT
[ERROR] testIndexRebuildTask(org.apache.phoenix.end2end.IndexRebuildTaskIT)  
Time elapsed: 231.656 s  <<< FAILURE!
java.lang.AssertionError: Ran out of time waiting for task state to become 
COMPLETED
at 
org.apache.phoenix.end2end.IndexRebuildTaskIT.waitForTaskState(IndexRebuildTaskIT.java:196)
at 
org.apache.phoenix.end2end.IndexRebuildTaskIT.testIndexRebuildTask(IndexRebuildTaskIT.java:156)

[INFO] Running org.apache.phoenix.end2end.MigrateSystemTablesToSystemNamespaceIT
[INFO] Running org.apache.phoenix.end2end.LocalIndexSplitMergeIT
[INFO] Running org.apache.phoenix.end2end.MaxLookbackIT
[INFO] Running 
org.apache.phoenix.end2end.OrderByWithServerClientSpoolingDisabledIT
[INFO] Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 68.442 s 
- in org.apache.phoenix.end2end.MaxLookbackIT
[INFO] Running org.apache.phoenix.end2end.OrderByWithServerMemoryLimitIT
[INFO] Tests run: 1, Failures: 0, Error

Jenkins build is back to normal : Phoenix | Master | HBase Profile » 2.1 #29

2020-03-17 Thread Apache Jenkins Server
See 




[phoenix] branch PHOENIX-5748-4.x-HBase-1.5 updated: PHOENIX-5749: Add unit tests for verifySingleIndexRow() of IndexRebui… (#725)

2020-03-17 Thread skadam
This is an automated email from the ASF dual-hosted git repository.

skadam pushed a commit to branch PHOENIX-5748-4.x-HBase-1.5
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/PHOENIX-5748-4.x-HBase-1.5 by 
this push:
 new d6f939b  PHOENIX-5749: Add unit tests for verifySingleIndexRow() of 
IndexRebui… (#725)
d6f939b is described below

commit d6f939bc903480675825bea6c3b1b2be04bca599
Author: Swaroopa Kadam 
AuthorDate: Tue Mar 17 12:24:38 2020 -0700

PHOENIX-5749: Add unit tests for verifySingleIndexRow() of IndexRebui… 
(#725)

PHOENIX-5749: Add unit tests for verifySingleIndexRow() of 
IndexRebuildRegionScanner
---
 .../coprocessor/IndexRebuildRegionScanner.java | 304 ++
 .../coprocessor/IndexToolVerificationResult.java   | 304 ++
 .../index/PhoenixIndexImportDirectReducer.java |   5 +-
 .../phoenix/index/VerifySingleIndexRowTest.java| 637 +
 4 files changed, 998 insertions(+), 252 deletions(-)

diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/IndexRebuildRegionScanner.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/IndexRebuildRegionScanner.java
index 6cb1145..ad549e5 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/IndexRebuildRegionScanner.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/IndexRebuildRegionScanner.java
@@ -42,15 +42,16 @@ import static 
org.apache.phoenix.query.QueryServices.MUTATE_BATCH_SIZE_BYTES_ATT
 
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.NavigableSet;
+import java.util.TreeMap;
 import java.util.concurrent.ExecutionException;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.Lists;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
@@ -109,236 +110,14 @@ import com.google.common.collect.Maps;
 
 public class IndexRebuildRegionScanner extends BaseRegionScanner {
 
-public static class VerificationResult {
-public static class PhaseResult {
-private long validIndexRowCount = 0;
-private long expiredIndexRowCount = 0;
-private long missingIndexRowCount = 0;
-private long invalidIndexRowCount = 0;
-
-public void add(PhaseResult phaseResult) {
-validIndexRowCount += phaseResult.validIndexRowCount;
-expiredIndexRowCount += phaseResult.expiredIndexRowCount;
-missingIndexRowCount += phaseResult.missingIndexRowCount;
-invalidIndexRowCount += phaseResult.invalidIndexRowCount;
-}
-
-public long getTotalCount() {
-return validIndexRowCount + expiredIndexRowCount + 
missingIndexRowCount + invalidIndexRowCount;
-}
-
-@Override
-public String toString() {
-return "PhaseResult{" +
-"validIndexRowCount=" + validIndexRowCount +
-", expiredIndexRowCount=" + expiredIndexRowCount +
-", missingIndexRowCount=" + missingIndexRowCount +
-", invalidIndexRowCount=" + invalidIndexRowCount +
-'}';
-}
-}
-
-private long scannedDataRowCount = 0;
-private long rebuiltIndexRowCount = 0;
-private PhaseResult before = new PhaseResult();
-private PhaseResult after = new PhaseResult();
-
-@Override
-public String toString() {
-return "VerificationResult{" +
-"scannedDataRowCount=" + scannedDataRowCount +
-", rebuiltIndexRowCount=" + rebuiltIndexRowCount +
-", before=" + before +
-", after=" + after +
-'}';
-}
-
-public long getScannedDataRowCount() {
-return scannedDataRowCount;
-}
-
-public long getRebuiltIndexRowCount() {
-return rebuiltIndexRowCount;
-}
-
-public long getBeforeRebuildValidIndexRowCount() {
-return before.validIndexRowCount;
-}
-
-public long getBeforeRebuildExpiredIndexRowCount() {
-return before.expiredIndexRowCount;
-}
-
-public long getBeforeRebuildInvalidIndexRowCount() {
-return before.invalidIndexRowCount;
-}
-
-public long getBeforeRebuildMissingIndexRowCount() {
-return before.missingIndexRowCount;
-}
-
-public long getAfterRebuildValidIndexRowCount() {
-return after.validIndexRowCount;
-}
-
-public long getAfterRebuildExpiredIndexRowCount() {
-return after.expiredIndexR

[phoenix] branch 4.x updated: PHOENIX-5607 Client-server backward compatibility tests

2020-03-17 Thread chinmayskulkarni
This is an automated email from the ASF dual-hosted git repository.

chinmayskulkarni pushed a commit to branch 4.x
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/4.x by this push:
 new b83593b  PHOENIX-5607 Client-server backward compatibility tests
b83593b is described below

commit b83593b953931e8bf7c94d8d9be26e7b3a4d1203
Author: Sandeep Guggilam 
AuthorDate: Wed Mar 11 20:32:48 2020 -0700

PHOENIX-5607 Client-server backward compatibility tests

Signed-off-by: Chinmay Kulkarni 
---
 .../phoenix/end2end/BackwardCompatibilityIT.java   | 428 +
 .../it/resources/compatible_client_versions.json   |   7 +
 .../resources/gold_files/gold_query_add_data.txt   |  43 +++
 .../resources/gold_files/gold_query_add_delete.txt |  22 ++
 .../resources/gold_files/gold_query_create_add.txt |  32 ++
 .../src/it/resources/sql_files/add_data.sql|  27 ++
 .../src/it/resources/sql_files/add_delete.sql  |  26 ++
 .../src/it/resources/sql_files/create_add.sql  |  33 ++
 phoenix-core/src/it/resources/sql_files/query.sql  |  24 ++
 .../it/resources/sql_files/query_add_delete.sql|  26 ++
 .../src/it/resources/sql_files/query_more.sql  |  25 ++
 phoenix-core/src/it/scripts/execute_query.sh   |  40 ++
 .../phoenix/coprocessor/MetaDataProtocol.java  |   5 +-
 13 files changed, 735 insertions(+), 3 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BackwardCompatibilityIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BackwardCompatibilityIT.java
new file mode 100644
index 000..ee105e2
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BackwardCompatibilityIT.java
@@ -0,0 +1,428 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.apache.phoenix.query.BaseTest.setUpConfigForMiniCluster;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assume.assumeFalse;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.InputStreamReader;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.util.Collection;
+import java.util.List;
+import java.util.Properties;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.curator.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.util.VersionInfo;
+import org.apache.phoenix.coprocessor.MetaDataProtocol;
+import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
+import org.apache.phoenix.jdbc.PhoenixDriver;
+import org.apache.phoenix.query.QueryConstants;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.query.QueryServicesOptions;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParser;
+import com.google.gson.stream.JsonReader;
+
+/**
+ * This class is meant for testing all compatible client versions 
+ * against the current server version. It runs SQL queries with given 
+ * client versions and compares the output against gold files
+ */
+
+@RunWith(Parameterized.class)
+@Category(NeedsOwnMiniClusterTest.class)
+public class BackwardC

[phoenix] branch master updated: PHOENIX-5317 Upserting rows into child views with pk fails when the base view has an index on it

2020-03-17 Thread gjacoby
This is an automated email from the ASF dual-hosted git repository.

gjacoby pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/master by this push:
 new 2d88e78  PHOENIX-5317 Upserting rows into child views with pk fails 
when the base view has an index on it
2d88e78 is described below

commit 2d88e786eb80fa5c1d9e74c55c6d0c464738c4f2
Author: Sandeep Guggilam 
AuthorDate: Thu Mar 12 11:26:55 2020 -0700

PHOENIX-5317 Upserting rows into child views with pk fails when the base 
view has an index on it

Signed-off-by: Geoffrey Jacoby 
---
 .../phoenix/end2end/MetaDataEndpointImplIT.java|  83 -
 .../coprocessor/generated/ServerCachingProtos.java | 129 ++---
 .../org/apache/phoenix/index/IndexMaintainer.java  |  46 ++--
 .../src/main/ServerCachingService.proto|   1 +
 4 files changed, 227 insertions(+), 32 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MetaDataEndpointImplIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MetaDataEndpointImplIT.java
index 75af5f8..6724da9 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MetaDataEndpointImplIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MetaDataEndpointImplIT.java
@@ -10,18 +10,18 @@ import java.sql.SQLException;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
+import java.util.Properties;
 
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.phoenix.end2end.ParallelStatsDisabledIT;
-import org.apache.phoenix.util.TableViewFinderResult;
-import org.apache.phoenix.util.ViewUtil;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.TableNotFoundException;
 import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.TableViewFinderResult;
+import org.apache.phoenix.util.ViewUtil;
 import org.junit.Test;
 
 import com.google.common.base.Joiner;
@@ -96,6 +96,83 @@ public class MetaDataEndpointImplIT extends 
ParallelStatsDisabledIT {
 assertColumnNamesEqual(PhoenixRuntime.getTable(conn, 
childMostView.getName().getString()), "PK2", "V1", "V2", "CARRIER", 
"DROPPED_CALLS");
 
 }
+
+@Test
+public void testUpsertIntoChildViewWithPKAndIndex() throws Exception {
+String baseTable = generateUniqueName();
+String view = generateUniqueName();
+String childView = generateUniqueName();
+
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+String baseTableDDL = "CREATE TABLE IF NOT EXISTS " + baseTable + 
+" (TENANT_ID VARCHAR NOT NULL, KEY_PREFIX CHAR(3) NOT 
NULL, "
++ "V1 VARCHAR CONSTRAINT PK PRIMARY KEY(TENANT_ID, 
KEY_PREFIX)) "
++ "VERSIONS=1, IMMUTABLE_ROWS=TRUE";
+conn.createStatement().execute(baseTableDDL);
+String view1DDL = "CREATE VIEW IF NOT EXISTS " + view + 
+"(V2 VARCHAR NOT NULL,V3 BIGINT NOT NULL, "
++ "V4 VARCHAR CONSTRAINT PKVIEW PRIMARY KEY(V2, V3)) AS 
SELECT * FROM " 
++ baseTable + " WHERE KEY_PREFIX = '0CY'";
+conn.createStatement().execute(view1DDL);
+
+// Create an Index on the base view
+String view1Index = generateUniqueName() + "_IDX";
+conn.createStatement().execute("CREATE INDEX " + view1Index + 
+" ON " + view + " (V2, V3) include (V1, V4)");
+
+// Create a child view with primary key constraint
+String childViewDDL = "CREATE VIEW IF NOT EXISTS " + childView 
++ " (V5 VARCHAR NOT NULL, V6 VARCHAR NOT NULL CONSTRAINT 
PK PRIMARY KEY "
++ "(V5, V6)) AS SELECT * FROM " + view;
+conn.createStatement().execute(childViewDDL);
+
+String upsert = "UPSERT INTO " + childView + " (TENANT_ID, V2, V3, 
V5, V6) "
++ "VALUES ('00D0050',  'z', 10, 'z', 
'z')";
+conn.createStatement().executeUpdate(upsert);
+conn.commit();
+}
+}
+
+@Test
+public void testUpsertIntoTenantChildViewWithPKAndIndex() throws Exception 
{
+String baseTable = generateUniqueName();
+String view = generateUniqueName();
+String childView = generateUniqueName();
+String tenantId = "TENANT";
+
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+String baseTableDDL = "CREATE TABLE IF NOT EXISTS " + baseTable + 
+" (TENANT_ID VARCHAR NOT NULL, KEY_PREFIX CHAR(3) NOT 
NULL, "
++ "V1 VARCHAR CONSTRAINT PK

[phoenix] branch 4.x updated: PHOENIX-5317 Upserting rows into child views with pk fails when the base view has an index on it

2020-03-17 Thread gjacoby
This is an automated email from the ASF dual-hosted git repository.

gjacoby pushed a commit to branch 4.x
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/4.x by this push:
 new 3713421  PHOENIX-5317 Upserting rows into child views with pk fails 
when the base view has an index on it
3713421 is described below

commit 37134215a47b1b4ed8fd426c112b4f4e4e41e8cd
Author: Sandeep Guggilam 
AuthorDate: Sun Mar 8 14:10:53 2020 -0700

PHOENIX-5317 Upserting rows into child views with pk fails when the base 
view has an index on it

Signed-off-by: Geoffrey Jacoby 
---
 .../phoenix/end2end/MetaDataEndpointImplIT.java|  82 -
 .../coprocessor/generated/ServerCachingProtos.java | 129 ++---
 .../org/apache/phoenix/index/IndexMaintainer.java  |  49 ++--
 .../src/main/ServerCachingService.proto|   1 +
 4 files changed, 230 insertions(+), 31 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MetaDataEndpointImplIT.java
 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MetaDataEndpointImplIT.java
index 21ab6f8..dca4b6b 100644
--- 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/MetaDataEndpointImplIT.java
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/MetaDataEndpointImplIT.java
@@ -11,18 +11,19 @@ import java.sql.SQLException;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
+import java.util.Properties;
 
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HTable;
-import org.apache.phoenix.util.TableViewFinderResult;
-import org.apache.phoenix.util.ViewUtil;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
 import org.apache.phoenix.schema.PColumn;
 import org.apache.phoenix.schema.PTable;
 import org.apache.phoenix.schema.TableNotFoundException;
 import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.TableViewFinderResult;
+import org.apache.phoenix.util.ViewUtil;
 import org.junit.Test;
 
 import com.google.common.base.Joiner;
@@ -113,6 +114,83 @@ public class MetaDataEndpointImplIT extends 
ParallelStatsDisabledIT {
 // now lets check and make sure the columns are correct
 assertColumnNamesEqual(PhoenixRuntime.getTable(conn, 
leftChild.toUpperCase()), "PK2", "V1", "V2", "CARRIER");
 }
+
+@Test
+public void testUpsertIntoChildViewWithPKAndIndex() throws Exception {
+String baseTable = generateUniqueName();
+String view = generateUniqueName();
+String childView = generateUniqueName();
+
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+String baseTableDDL = "CREATE TABLE IF NOT EXISTS " + baseTable + 
+" (TENANT_ID VARCHAR NOT NULL, KEY_PREFIX CHAR(3) NOT 
NULL, "
++ "V1 VARCHAR CONSTRAINT PK PRIMARY KEY(TENANT_ID, 
KEY_PREFIX)) "
++ "VERSIONS=1, IMMUTABLE_ROWS=TRUE";
+conn.createStatement().execute(baseTableDDL);
+String view1DDL = "CREATE VIEW IF NOT EXISTS " + view + 
+"(V2 VARCHAR NOT NULL,V3 BIGINT NOT NULL, "
++ "V4 VARCHAR CONSTRAINT PKVIEW PRIMARY KEY(V2, V3)) AS 
SELECT * FROM " 
++ baseTable + " WHERE KEY_PREFIX = '0CY'";
+conn.createStatement().execute(view1DDL);
+
+// Create an Index on the base view
+String view1Index = generateUniqueName() + "_IDX";
+conn.createStatement().execute("CREATE INDEX " + view1Index + 
+" ON " + view + " (V2, V3) include (V1, V4)");
+
+// Create a child view with primary key constraint
+String childViewDDL = "CREATE VIEW IF NOT EXISTS " + childView 
++ " (V5 VARCHAR NOT NULL, V6 VARCHAR NOT NULL CONSTRAINT 
PK PRIMARY KEY "
++ "(V5, V6)) AS SELECT * FROM " + view;
+conn.createStatement().execute(childViewDDL);
+
+String upsert = "UPSERT INTO " + childView + " (TENANT_ID, V2, V3, 
V5, V6) "
++ "VALUES ('00D0050',  'z', 10, 'z', 
'z')";
+conn.createStatement().executeUpdate(upsert);
+conn.commit();
+}
+}
+
+@Test
+public void testUpsertIntoTenantChildViewWithPKAndIndex() throws Exception 
{
+String baseTable = generateUniqueName();
+String view = generateUniqueName();
+String childView = generateUniqueName();
+String tenantId = "TENANT";
+
+try (Connection conn = DriverManager.getConnection(getUrl())) {
+String baseTableDDL = "CREATE TABLE IF NOT EXISTS " + baseTable + 
+" (TENANT_ID VARCHAR NOT NULL, KEY_PREFIX CHAR(3) NOT 
NULL, "
++ "V1 VARCHAR CONSTRAINT PK PRIM