Apache-Phoenix | master | HBase 2.5 | Build #651 ABORTED

2024-02-24 Thread Apache Jenkins Server

master branch  HBase 2.5  build #651 status ABORTED
Build #651 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/master/651/


Apache-Phoenix | master | HBase 2.4 | Build #651 FAILURE

2024-02-24 Thread Apache Jenkins Server

master branch  HBase 2.4  build #651 status FAILURE
Build #651 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/master/651/


Apache-Phoenix | 5.2 | HBase 2.5 | Build #4 ABORTED

2024-02-24 Thread Apache Jenkins Server

5.2 branch  HBase 2.5  build #4 status ABORTED
Build #4 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.2/4/


Apache-Phoenix | 5.2 | HBase 2.4 | Build #4 ABORTED

2024-02-24 Thread Apache Jenkins Server

5.2 branch  HBase 2.4  build #4 status ABORTED
Build #4 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.2/4/


(phoenix) branch 5.2 updated: PHOENIX-7229 BloomFilter metrics usage need revert (ADDENDUM)

2024-02-24 Thread vjasani
This is an automated email from the ASF dual-hosted git repository.

vjasani pushed a commit to branch 5.2
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/5.2 by this push:
 new 63bdc62823 PHOENIX-7229 BloomFilter metrics usage need revert 
(ADDENDUM)
63bdc62823 is described below

commit 63bdc62823ec9dfbab3dfc1bf7301c4a60380e59
Author: Viraj Jasani 
AuthorDate: Sat Feb 24 23:39:11 2024 -0800

PHOENIX-7229 BloomFilter metrics usage need revert (ADDENDUM)
---
 .../org/apache/phoenix/end2end/BloomFilterIT.java  | 244 -
 1 file changed, 244 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BloomFilterIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BloomFilterIT.java
deleted file mode 100644
index 555650d2ee..00
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BloomFilterIT.java
+++ /dev/null
@@ -1,244 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.phoenix.end2end;
-
-import static org.apache.phoenix.util.TestUtil.flush;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.Map;
-
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.regionserver.HRegionServer;
-import org.apache.hadoop.hbase.regionserver.MetricsRegionServer;
-import org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapper;
-import org.apache.phoenix.thirdparty.com.google.common.base.MoreObjects;
-import org.apache.phoenix.thirdparty.com.google.common.collect.Maps;
-import org.apache.phoenix.util.ReadOnlyProps;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-@Category(NeedsOwnMiniClusterTest.class)
-public class BloomFilterIT extends ParallelStatsDisabledIT {
-
-private static final Logger LOGGER = 
LoggerFactory.getLogger(BloomFilterIT.class);
-private static class BloomFilterMetrics {
-// total lookup requests
-private long requestsCount;
-// requests where key does not exist
-private long negativeResultsCount;
-// potential lookup requests rejected because no bloom filter present 
in storefile
-private long eligibleRequestsCount;
-
-private BloomFilterMetrics() {
-this.requestsCount = 0;
-this.negativeResultsCount = 0;
-this.eligibleRequestsCount = 0;
-}
-
-private BloomFilterMetrics(long requestsCount, long 
negativeResultsCount, long eligibleRequestsCount) {
-this.requestsCount = requestsCount;
-this.negativeResultsCount = negativeResultsCount;
-this.eligibleRequestsCount = eligibleRequestsCount;
-}
-
-@Override
-public boolean equals(Object obj) {
-if (this == obj) {
-return true;
-}
-if (obj == null || getClass() != obj.getClass()) {
-return false;
-}
-BloomFilterMetrics rhs = (BloomFilterMetrics)obj;
-return (this.requestsCount == rhs.requestsCount &&
-this.negativeResultsCount == rhs.negativeResultsCount &&
-this.eligibleRequestsCount == rhs.eligibleRequestsCount);
-}
-
-@Override
-public String toString() {
-return MoreObjects.toStringHelper(this)
-.add("requestsCount", requestsCount)
-.add("negativeResultsCount", negativeResultsCount)
-.add("eligibleRequestsCount", eligibleRequestsCount)
-.toString();
-}
-}
-private BloomFilterMetrics beforeMetrics;
-
-private BloomFilterMetrics getBloomFilterMetrics() {
-  

(phoenix) branch master updated: PHOENIX-7229 BloomFilter metrics usage need revert (ADDENDUM)

2024-02-24 Thread vjasani
This is an automated email from the ASF dual-hosted git repository.

vjasani pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/master by this push:
 new d559ff3d4d PHOENIX-7229 BloomFilter metrics usage need revert 
(ADDENDUM)
d559ff3d4d is described below

commit d559ff3d4d35041ff69621e6a9fb4e234d51f67f
Author: Viraj Jasani 
AuthorDate: Sat Feb 24 23:39:11 2024 -0800

PHOENIX-7229 BloomFilter metrics usage need revert (ADDENDUM)
---
 .../org/apache/phoenix/end2end/BloomFilterIT.java  | 244 -
 1 file changed, 244 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BloomFilterIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BloomFilterIT.java
deleted file mode 100644
index 555650d2ee..00
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BloomFilterIT.java
+++ /dev/null
@@ -1,244 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.phoenix.end2end;
-
-import static org.apache.phoenix.util.TestUtil.flush;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.Map;
-
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.regionserver.HRegionServer;
-import org.apache.hadoop.hbase.regionserver.MetricsRegionServer;
-import org.apache.hadoop.hbase.regionserver.MetricsRegionServerWrapper;
-import org.apache.phoenix.thirdparty.com.google.common.base.MoreObjects;
-import org.apache.phoenix.thirdparty.com.google.common.collect.Maps;
-import org.apache.phoenix.util.ReadOnlyProps;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-@Category(NeedsOwnMiniClusterTest.class)
-public class BloomFilterIT extends ParallelStatsDisabledIT {
-
-private static final Logger LOGGER = 
LoggerFactory.getLogger(BloomFilterIT.class);
-private static class BloomFilterMetrics {
-// total lookup requests
-private long requestsCount;
-// requests where key does not exist
-private long negativeResultsCount;
-// potential lookup requests rejected because no bloom filter present 
in storefile
-private long eligibleRequestsCount;
-
-private BloomFilterMetrics() {
-this.requestsCount = 0;
-this.negativeResultsCount = 0;
-this.eligibleRequestsCount = 0;
-}
-
-private BloomFilterMetrics(long requestsCount, long 
negativeResultsCount, long eligibleRequestsCount) {
-this.requestsCount = requestsCount;
-this.negativeResultsCount = negativeResultsCount;
-this.eligibleRequestsCount = eligibleRequestsCount;
-}
-
-@Override
-public boolean equals(Object obj) {
-if (this == obj) {
-return true;
-}
-if (obj == null || getClass() != obj.getClass()) {
-return false;
-}
-BloomFilterMetrics rhs = (BloomFilterMetrics)obj;
-return (this.requestsCount == rhs.requestsCount &&
-this.negativeResultsCount == rhs.negativeResultsCount &&
-this.eligibleRequestsCount == rhs.eligibleRequestsCount);
-}
-
-@Override
-public String toString() {
-return MoreObjects.toStringHelper(this)
-.add("requestsCount", requestsCount)
-.add("negativeResultsCount", negativeResultsCount)
-.add("eligibleRequestsCount", eligibleRequestsCount)
-.toString();
-}
-}
-private BloomFilterMetrics beforeMetrics;
-
-private BloomFilterMetrics 

Apache-Phoenix | master | HBase 2.4 | Build #650 FAILURE

2024-02-24 Thread Apache Jenkins Server

master branch  HBase 2.4  build #650 status FAILURE
Build #650 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/master/650/


(phoenix) branch 5.2 updated: PHOENIX-7229 Leverage bloom filters for single key point lookups (#1832)

2024-02-24 Thread vjasani
This is an automated email from the ASF dual-hosted git repository.

vjasani pushed a commit to branch 5.2
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/5.2 by this push:
 new 931a3e7743 PHOENIX-7229 Leverage bloom filters for single key point 
lookups (#1832)
931a3e7743 is described below

commit 931a3e7743c47696cb14fb94de23070f841e0bcf
Author: tkhurana 
AuthorDate: Sat Feb 24 21:20:15 2024 -0800

PHOENIX-7229 Leverage bloom filters for single key point lookups (#1832)

Co-authored-by: Viraj Jasani 
---
 .../phoenix/iterate/BaseResultIterators.java   |  17 +-
 phoenix-core/pom.xml   |   5 +
 .../org/apache/phoenix/end2end/BloomFilterIT.java  | 244 +
 .../apache/phoenix/compile/WhereOptimizerTest.java |  50 +
 .../java/org/apache/phoenix/util/TestUtil.java |   5 +
 5 files changed, 320 insertions(+), 1 deletion(-)

diff --git 
a/phoenix-core-client/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
 
b/phoenix-core-client/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
index 5fda525316..359cf2255b 100644
--- 
a/phoenix-core-client/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
+++ 
b/phoenix-core-client/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
@@ -37,6 +37,7 @@ import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.EOFException;
+import java.io.IOException;
 import java.sql.SQLException;
 import java.util.*;
 import java.util.Arrays;
@@ -72,6 +73,7 @@ import org.apache.phoenix.compile.StatementContext;
 import org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants;
 import org.apache.phoenix.coprocessorclient.HashJoinCacheNotFoundException;
 import 
org.apache.phoenix.coprocessorclient.UngroupedAggregateRegionObserverHelper;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionInfo;
 import org.apache.phoenix.execute.MutationState;
 import org.apache.phoenix.execute.ScanPlan;
@@ -936,7 +938,20 @@ public abstract class BaseResultIterators extends 
ExplainTable implements Result
 if (!isLocalIndex && scanRanges.isPointLookup() && 
!scanRanges.useSkipScanFilter()) {
 List> parallelScans = 
Lists.newArrayListWithExpectedSize(1);
 List scans = Lists.newArrayListWithExpectedSize(1);
-scans.add(context.getScan());
+Scan scanFromContext = context.getScan();
+if (scanRanges.getPointLookupCount() == 1) {
+// leverage bloom filter for single key point lookup by 
turning scan to
+// Get Scan#isGetScan()
+try {
+scanFromContext = new Scan(context.getScan());
+} catch (IOException e) {
+LOGGER.error("Failure to construct point lookup scan", e);
+throw new PhoenixIOException(e);
+}
+scanFromContext.withStopRow(scanFromContext.getStartRow(),
+scanFromContext.includeStartRow());
+}
+scans.add(scanFromContext);
 parallelScans.add(scans);
 generateEstimates(scanRanges, table, GuidePostsInfo.NO_GUIDEPOST,
 GuidePostsInfo.NO_GUIDEPOST.isEmptyGuidePost(), 
parallelScans, estimates,
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 6b0738e365..36f994d6ae 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -227,6 +227,11 @@
 hbase-hadoop2-compat
 test
 
+
+org.apache.hbase
+hbase-hadoop-compat
+test
+
 
 org.apache.hbase
 hbase-mapreduce
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BloomFilterIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BloomFilterIT.java
new file mode 100644
index 00..555650d2ee
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BloomFilterIT.java
@@ -0,0 +1,244 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.

(phoenix) branch master updated: PHOENIX-7229 Leverage bloom filters for single key point lookups (#1832)

2024-02-24 Thread vjasani
This is an automated email from the ASF dual-hosted git repository.

vjasani pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/master by this push:
 new 00bb0d0517 PHOENIX-7229 Leverage bloom filters for single key point 
lookups (#1832)
00bb0d0517 is described below

commit 00bb0d0517cb9d56dcf6c61b37421728667d2753
Author: tkhurana 
AuthorDate: Sat Feb 24 21:20:15 2024 -0800

PHOENIX-7229 Leverage bloom filters for single key point lookups (#1832)

Co-authored-by: Viraj Jasani 
---
 .../phoenix/iterate/BaseResultIterators.java   |  17 +-
 phoenix-core/pom.xml   |   5 +
 .../org/apache/phoenix/end2end/BloomFilterIT.java  | 244 +
 .../apache/phoenix/compile/WhereOptimizerTest.java |  50 +
 .../java/org/apache/phoenix/util/TestUtil.java |   5 +
 5 files changed, 320 insertions(+), 1 deletion(-)

diff --git 
a/phoenix-core-client/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
 
b/phoenix-core-client/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
index 5fda525316..359cf2255b 100644
--- 
a/phoenix-core-client/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
+++ 
b/phoenix-core-client/src/main/java/org/apache/phoenix/iterate/BaseResultIterators.java
@@ -37,6 +37,7 @@ import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
 import java.io.EOFException;
+import java.io.IOException;
 import java.sql.SQLException;
 import java.util.*;
 import java.util.Arrays;
@@ -72,6 +73,7 @@ import org.apache.phoenix.compile.StatementContext;
 import org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants;
 import org.apache.phoenix.coprocessorclient.HashJoinCacheNotFoundException;
 import 
org.apache.phoenix.coprocessorclient.UngroupedAggregateRegionObserverHelper;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionInfo;
 import org.apache.phoenix.execute.MutationState;
 import org.apache.phoenix.execute.ScanPlan;
@@ -936,7 +938,20 @@ public abstract class BaseResultIterators extends 
ExplainTable implements Result
 if (!isLocalIndex && scanRanges.isPointLookup() && 
!scanRanges.useSkipScanFilter()) {
 List> parallelScans = 
Lists.newArrayListWithExpectedSize(1);
 List scans = Lists.newArrayListWithExpectedSize(1);
-scans.add(context.getScan());
+Scan scanFromContext = context.getScan();
+if (scanRanges.getPointLookupCount() == 1) {
+// leverage bloom filter for single key point lookup by 
turning scan to
+// Get Scan#isGetScan()
+try {
+scanFromContext = new Scan(context.getScan());
+} catch (IOException e) {
+LOGGER.error("Failure to construct point lookup scan", e);
+throw new PhoenixIOException(e);
+}
+scanFromContext.withStopRow(scanFromContext.getStartRow(),
+scanFromContext.includeStartRow());
+}
+scans.add(scanFromContext);
 parallelScans.add(scans);
 generateEstimates(scanRanges, table, GuidePostsInfo.NO_GUIDEPOST,
 GuidePostsInfo.NO_GUIDEPOST.isEmptyGuidePost(), 
parallelScans, estimates,
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index 6b0738e365..36f994d6ae 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -227,6 +227,11 @@
 hbase-hadoop2-compat
 test
 
+
+org.apache.hbase
+hbase-hadoop-compat
+test
+
 
 org.apache.hbase
 hbase-mapreduce
diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/BloomFilterIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BloomFilterIT.java
new file mode 100644
index 00..555650d2ee
--- /dev/null
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/BloomFilterIT.java
@@ -0,0 +1,244 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the 

Apache-Phoenix | master | HBase 2.5 | Build #648 SUCCESS

2024-02-24 Thread Apache Jenkins Server

master branch  HBase 2.5  build #648 status SUCCESS
Build #648 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/master/648/


Apache-Phoenix | master | HBase 2.5 | Build #649 FAILURE

2024-02-24 Thread Apache Jenkins Server

master branch  HBase 2.5  build #649 status FAILURE
Build #649 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/master/649/


Apache-Phoenix | 5.2 | HBase 2.4 | Build #3 FAILURE

2024-02-24 Thread Apache Jenkins Server

5.2 branch  HBase 2.4  build #3 status FAILURE
Build #3 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.2/3/


Apache-Phoenix | master | HBase 2.4 | Build #649 SUCCESS

2024-02-24 Thread Apache Jenkins Server

master branch  HBase 2.4  build #649 status SUCCESS
Build #649 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/master/649/


Apache-Phoenix | 5.2 | HBase 2.5 | Build #3 FAILURE

2024-02-24 Thread Apache Jenkins Server

5.2 branch  HBase 2.5  build #3 status FAILURE
Build #3 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.2/3/


Apache-Phoenix | master | HBase 2.4 | Build #648 FAILURE

2024-02-24 Thread Apache Jenkins Server

master branch  HBase 2.4  build #648 status FAILURE
Build #648 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/master/648/


Apache-Phoenix | 5.1 | HBase 2.4 | Build #322 SUCCESS

2024-02-24 Thread Apache Jenkins Server

5.1 branch  HBase 2.4  build #322 status SUCCESS
Build #322 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.1/322/


Apache-Phoenix | 5.1 | HBase 2.5 | Build #322 SUCCESS

2024-02-24 Thread Apache Jenkins Server

5.1 branch  HBase 2.5  build #322 status SUCCESS
Build #322 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.1/322/


Apache-Phoenix | 5.1 | HBase 2.3 | Build #322 SUCCESS

2024-02-24 Thread Apache Jenkins Server

5.1 branch  HBase 2.3  build #322 status SUCCESS
Build #322 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.1/322/


Apache-Phoenix | 5.1 | HBase 2.2 | Build #322 FAILURE

2024-02-24 Thread Apache Jenkins Server

5.1 branch  HBase 2.2  build #322 status FAILURE
Build #322 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.1/322/


Apache-Phoenix | 5.1 | HBase 2.1 | Build #322 FAILURE

2024-02-24 Thread Apache Jenkins Server

5.1 branch  HBase 2.1  build #322 status FAILURE
Build #322 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.1/322/


(phoenix) branch 5.1 updated: PHOENIX-7234 Bump org.apache.commons:commons-compress from 1.21 to 1.26.0 (#1834)

2024-02-24 Thread vjasani
This is an automated email from the ASF dual-hosted git repository.

vjasani pushed a commit to branch 5.1
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/5.1 by this push:
 new 26e72ee701 PHOENIX-7234 Bump org.apache.commons:commons-compress from 
1.21 to 1.26.0 (#1834)
26e72ee701 is described below

commit 26e72ee701b6112a26530cd1655777b46ecb08fe
Author: Istvan Toth 
AuthorDate: Sat Feb 24 21:14:40 2024 +0100

PHOENIX-7234 Bump org.apache.commons:commons-compress from 1.21 to 1.26.0 
(#1834)
---
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index edf1ebb582..dec3ade754 100644
--- a/pom.xml
+++ b/pom.xml
@@ -120,7 +120,7 @@
 2.11.0
 3.8
 1.0
-1.21
+1.26.0
 1.9.0
 1.0-1
 2.0.1



(phoenix) branch 5.2 updated: PHOENIX-7234 Bump org.apache.commons:commons-compress from 1.21 to 1.26.0 (#1834)

2024-02-24 Thread vjasani
This is an automated email from the ASF dual-hosted git repository.

vjasani pushed a commit to branch 5.2
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/5.2 by this push:
 new 37c996c1b8 PHOENIX-7234 Bump org.apache.commons:commons-compress from 
1.21 to 1.26.0 (#1834)
37c996c1b8 is described below

commit 37c996c1b8841e5ec2b7dbc15495f51451e45494
Author: Istvan Toth 
AuthorDate: Sat Feb 24 21:14:40 2024 +0100

PHOENIX-7234 Bump org.apache.commons:commons-compress from 1.21 to 1.26.0 
(#1834)
---
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index acedb120f4..0ca0441cdf 100644
--- a/pom.xml
+++ b/pom.xml
@@ -113,7 +113,7 @@
 2.11.0
 3.8
 1.0
-1.21
+1.26.0
 1.9.0
 1.0-1
 2.0.1



(phoenix) branch master updated: PHOENIX-7234 Bump org.apache.commons:commons-compress from 1.21 to 1.26.0 (#1834)

2024-02-24 Thread vjasani
This is an automated email from the ASF dual-hosted git repository.

vjasani pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/master by this push:
 new 6a2333174e PHOENIX-7234 Bump org.apache.commons:commons-compress from 
1.21 to 1.26.0 (#1834)
6a2333174e is described below

commit 6a2333174e55704e3d46f75a59e4c3c350c21a72
Author: Istvan Toth 
AuthorDate: Sat Feb 24 21:14:40 2024 +0100

PHOENIX-7234 Bump org.apache.commons:commons-compress from 1.21 to 1.26.0 
(#1834)
---
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index acedb120f4..0ca0441cdf 100644
--- a/pom.xml
+++ b/pom.xml
@@ -113,7 +113,7 @@
 2.11.0
 3.8
 1.0
-1.21
+1.26.0
 1.9.0
 1.0-1
 2.0.1



(phoenix) branch 5.2 updated: PHOENIX-7223 Make Sure Tools Always Close HBase Connections on Exit

2024-02-24 Thread vjasani
This is an automated email from the ASF dual-hosted git repository.

vjasani pushed a commit to branch 5.2
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/5.2 by this push:
 new c5e3c999ad PHOENIX-7223 Make Sure Tools Always Close HBase Connections 
on Exit
c5e3c999ad is described below

commit c5e3c999ad1bed9b5bdafd3f02533e5b0e6aa307
Author: Istvan Toth 
AuthorDate: Tue Feb 20 13:01:19 2024 +0100

PHOENIX-7223 Make Sure Tools Always Close HBase Connections on Exit

* don't throw exceptions from Tools, log the error and return non-zero exit 
code
* Close all Phoenix Connections in Tools
* Close cached CQSI objects on PhoenixDriver.close()
---
 .../jdbc/ClusterRoleRecordGeneratorTool.java   |  19 ++-
 .../org/apache/phoenix/jdbc/PhoenixDriver.java |  13 +-
 .../apache/phoenix/jdbc/PhoenixHAAdminTool.java|  63 
 .../org/apache/phoenix/schema/tool/SchemaTool.java |  29 ++--
 .../phoenix/mapreduce/AbstractBulkLoadTool.java| 159 -
 .../apache/phoenix/mapreduce/OrphanViewTool.java   |   2 +
 .../phoenix/mapreduce/index/IndexUpgradeTool.java  |  11 +-
 .../phoenix/schema/stats/UpdateStatisticsTool.java |  19 ++-
 .../util/MergeViewIndexIdSequencesTool.java|  17 +--
 .../apache/phoenix/end2end/CsvBulkLoadToolIT.java  |  41 +++---
 .../phoenix/end2end/RegexBulkLoadToolIT.java   |  20 +--
 11 files changed, 221 insertions(+), 172 deletions(-)

diff --git 
a/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/ClusterRoleRecordGeneratorTool.java
 
b/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/ClusterRoleRecordGeneratorTool.java
index 49ec3db61a..93899f87a2 100644
--- 
a/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/ClusterRoleRecordGeneratorTool.java
+++ 
b/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/ClusterRoleRecordGeneratorTool.java
@@ -66,13 +66,18 @@ public class ClusterRoleRecordGeneratorTool extends 
Configured implements Tool {
 
 @Override
 public int run(String[] args) throws Exception {
-String fileName = getConf().get(PHOENIX_HA_GENERATOR_FILE_ATTR);
-File file = StringUtils.isEmpty(fileName)
-? File.createTempFile("phoenix.ha.cluster.role.records", 
".json")
-: new File(fileName);
-JacksonUtil.getObjectWriterPretty().writeValue(file, 
listAllRecordsByZk());
-System.out.println("Created JSON file '" + file + "'");
-return 0;
+try {
+String fileName = getConf().get(PHOENIX_HA_GENERATOR_FILE_ATTR);
+File file = StringUtils.isEmpty(fileName)
+? File.createTempFile("phoenix.ha.cluster.role.records", 
".json")
+: new File(fileName);
+JacksonUtil.getObjectWriterPretty().writeValue(file, 
listAllRecordsByZk());
+System.out.println("Created JSON file '" + file + "'");
+return 0;
+} catch (Exception e) {
+e.printStackTrace();
+return -1;
+}
 }
 
 List listAllRecordsByZk() throws IOException {
diff --git 
a/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java 
b/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
index ca412d5238..8bdc6ea182 100644
--- 
a/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
+++ 
b/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
@@ -144,6 +144,7 @@ public final class PhoenixDriver extends 
PhoenixEmbeddedDriver {
 }
 
 // One entry per cluster here
+// TODO that's not true, we can have multiple connections with different 
configs / principals
 private final Cache 
connectionQueryServicesCache =
 initializeConnectionCache();
 
@@ -341,8 +342,18 @@ public final class PhoenixDriver extends 
PhoenixEmbeddedDriver {
 services = null;
 }
 }
+
+if (connectionQueryServicesCache != null) {
+try {
+for (ConnectionQueryServices cqsi : 
connectionQueryServicesCache.asMap().values()) {
+cqsi.close();
+}
+} catch (Exception e) {
+LOGGER.warn("Failed to close ConnectionQueryServices 
instance", e);
+}
+}
 }
-
+
 private enum LockMode {
 READ, WRITE
 };
diff --git 
a/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixHAAdminTool.java
 
b/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixHAAdminTool.java
index c6bdadc335..e7a9cd7a22 100644
--- 
a/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixHAAdminTool.java
+++ 
b/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixHAAdminTool.java
@@ -105,38 +105,43 @@ public class PhoenixHAAdminTool extends Configured 
implements Tool {
 return RET_ARGUMENT_ERROR;
 

Apache-Phoenix | master | HBase 2.5 | Build #647 FAILURE

2024-02-24 Thread Apache Jenkins Server

master branch  HBase 2.5  build #647 status FAILURE
Build #647 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/master/647/


Apache-Phoenix | master | HBase 2.4 | Build #647 FAILURE

2024-02-24 Thread Apache Jenkins Server

master branch  HBase 2.4  build #647 status FAILURE
Build #647 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/master/647/


Apache-Phoenix | 5.1 | HBase 2.4 | Build #321 SUCCESS

2024-02-24 Thread Apache Jenkins Server

5.1 branch  HBase 2.4  build #321 status SUCCESS
Build #321 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.1/321/


Apache-Phoenix | 5.1 | HBase 2.5 | Build #321 SUCCESS

2024-02-24 Thread Apache Jenkins Server

5.1 branch  HBase 2.5  build #321 status SUCCESS
Build #321 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.1/321/


Apache-Phoenix | 5.1 | HBase 2.3 | Build #321 SUCCESS

2024-02-24 Thread Apache Jenkins Server

5.1 branch  HBase 2.3  build #321 status SUCCESS
Build #321 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.1/321/


Apache-Phoenix | 5.1 | HBase 2.2 | Build #321 FAILURE

2024-02-24 Thread Apache Jenkins Server

5.1 branch  HBase 2.2  build #321 status FAILURE
Build #321 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.1/321/


(phoenix) branch dependabot/maven/org.apache.commons-commons-compress-1.26.0 deleted (was c634d78f5e)

2024-02-24 Thread github-bot
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a change to branch 
dependabot/maven/org.apache.commons-commons-compress-1.26.0
in repository https://gitbox.apache.org/repos/asf/phoenix.git


 was c634d78f5e Bump org.apache.commons:commons-compress from 1.21 to 1.26.0

The revisions that were on this branch are still contained in
other references; therefore, this change does not discard any commits
from the repository.



Apache-Phoenix | 5.1 | HBase 2.1 | Build #321 FAILURE

2024-02-24 Thread Apache Jenkins Server

5.1 branch  HBase 2.1  build #321 status FAILURE
Build #321 https://ci-hadoop.apache.org/job/Phoenix/job/Phoenix-mulitbranch/job/5.1/321/


(phoenix) branch 5.1 updated: PHOENIX-7223 Make Sure Tools Always Close HBase Connections on Exit

2024-02-24 Thread stoty
This is an automated email from the ASF dual-hosted git repository.

stoty pushed a commit to branch 5.1
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/5.1 by this push:
 new 2699e530e4 PHOENIX-7223 Make Sure Tools Always Close HBase Connections 
on Exit
2699e530e4 is described below

commit 2699e530e41a5c0b1f08b607d2c3aca9064aae81
Author: Istvan Toth 
AuthorDate: Tue Feb 20 13:01:19 2024 +0100

PHOENIX-7223 Make Sure Tools Always Close HBase Connections on Exit

* don't throw exceptions from Tools, log the error and return non-zero exit 
code
* Close all Phoenix Connections in Tools
* Close cached CQSI objects on PhoenixDriver.close()
---
 .../apache/phoenix/end2end/CsvBulkLoadToolIT.java  |  40 +++---
 .../phoenix/end2end/RegexBulkLoadToolIT.java   |  20 +--
 .../org/apache/phoenix/jdbc/PhoenixDriver.java |  13 +-
 .../phoenix/mapreduce/AbstractBulkLoadTool.java| 157 -
 .../apache/phoenix/mapreduce/OrphanViewTool.java   |   2 +
 .../phoenix/mapreduce/index/IndexUpgradeTool.java  |  11 +-
 .../phoenix/schema/stats/UpdateStatisticsTool.java |  19 ++-
 .../org/apache/phoenix/schema/tool/SchemaTool.java |  29 ++--
 .../util/MergeViewIndexIdSequencesTool.java|  17 +--
 9 files changed, 174 insertions(+), 134 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
index c6e8246c0a..60d7071f89 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/CsvBulkLoadToolIT.java
@@ -35,7 +35,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.FileAlreadyExistsException;
 import org.apache.phoenix.end2end.index.IndexTestUtil;
 import org.apache.phoenix.jdbc.PhoenixConnection;
 import org.apache.phoenix.mapreduce.CsvBulkLoadTool;
@@ -146,12 +145,10 @@ public class CsvBulkLoadToolIT extends BaseOwnClusterIT {
 "--table", "table1",
 "--schema", "s",
 "--zookeeper", zkQuorum});
-fail("Bulk loading error should have happened earlier");
-} catch (Exception e){
-assertTrue(e.getMessage().contains("Bulk Loading error: Bulk 
loading is disabled for " +
-"non empty tables with global indexes, because it will 
corrupt " +
-"the global index table in most cases.\n" +
-"Use the --corruptindexes option to override this 
check."));
+assertTrue("Bulk loading error should have happened earlier", 
exitCode != 0);
+} catch (Exception e) {
+fail("Tools should return non-zero exit codes on failure"
++ " instead of throwing an exception");
 }
 
 ResultSet rs = stmt.executeQuery("SELECT id, name, t FROM s.table1 
ORDER BY id");
@@ -393,7 +390,7 @@ public class CsvBulkLoadToolIT extends BaseOwnClusterIT {
 + " (FIRST_NAME ASC)"
 + " INCLUDE (LAST_NAME)";
 stmt.execute(ddl);
-
+
 FileSystem fs = FileSystem.get(getUtility().getConfiguration());
 FSDataOutputStream outputStream = fs.create(new 
Path("/tmp/input3.csv"));
 PrintWriter printWriter = new PrintWriter(outputStream);
@@ -518,17 +515,17 @@ public class CsvBulkLoadToolIT extends BaseOwnClusterIT {
 CsvBulkLoadTool csvBulkLoadTool = new CsvBulkLoadTool();
 csvBulkLoadTool.setConf(getUtility().getConfiguration());
 try {
-csvBulkLoadTool.run(new String[] {
+int exitCode = csvBulkLoadTool.run(new String[] {
 "--input", "/tmp/input4.csv",
 "--table", tableName,
 "--zookeeper", zkQuorum });
-fail(String.format("Table %s not created, hence should 
fail",tableName));
+assertTrue(String.format("Table %s not created, hence should 
fail", tableName),
+exitCode != 0);
 } catch (Exception ex) {
-assertTrue(ex instanceof IllegalArgumentException); 
-assertTrue(ex.getMessage().contains(String.format("Table %s not 
found", tableName)));
-}
+fail("Tools should return non-zero exit codes on failure"
++ " instead of throwing an exception");}
 }
-
+
 @Test
 public void testAlreadyExistsOutputPath() {
 String tableName = "TABLE9";
@@ -537,7 +534,7 @@ public class CsvBulkLoadToolIT extends BaseOwnClusterIT {
 Statement stmt = conn.createStatement();
 stmt.execute("CREATE TABLE " + tableName + "(ID INTEGER NOT NULL 
PRIMARY KEY, "
 + "FIRST_NAME 

(phoenix) branch master updated: PHOENIX-7223 Make Sure Tools Always Close HBase Connections on Exit

2024-02-24 Thread stoty
This is an automated email from the ASF dual-hosted git repository.

stoty pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/master by this push:
 new 49ad9db27e PHOENIX-7223 Make Sure Tools Always Close HBase Connections 
on Exit
49ad9db27e is described below

commit 49ad9db27e6a196edfa469274a949954e3ea23a5
Author: Istvan Toth 
AuthorDate: Tue Feb 20 13:01:19 2024 +0100

PHOENIX-7223 Make Sure Tools Always Close HBase Connections on Exit

* don't throw exceptions from Tools, log the error and return non-zero exit 
code
* Close all Phoenix Connections in Tools
* Close cached CQSI objects on PhoenixDriver.close()
---
 .../jdbc/ClusterRoleRecordGeneratorTool.java   |  19 ++-
 .../org/apache/phoenix/jdbc/PhoenixDriver.java |  13 +-
 .../apache/phoenix/jdbc/PhoenixHAAdminTool.java|  63 
 .../org/apache/phoenix/schema/tool/SchemaTool.java |  29 ++--
 .../phoenix/mapreduce/AbstractBulkLoadTool.java| 159 -
 .../apache/phoenix/mapreduce/OrphanViewTool.java   |   2 +
 .../phoenix/mapreduce/index/IndexUpgradeTool.java  |  11 +-
 .../phoenix/schema/stats/UpdateStatisticsTool.java |  19 ++-
 .../util/MergeViewIndexIdSequencesTool.java|  17 +--
 .../apache/phoenix/end2end/CsvBulkLoadToolIT.java  |  41 +++---
 .../phoenix/end2end/RegexBulkLoadToolIT.java   |  20 +--
 11 files changed, 221 insertions(+), 172 deletions(-)

diff --git 
a/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/ClusterRoleRecordGeneratorTool.java
 
b/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/ClusterRoleRecordGeneratorTool.java
index 49ec3db61a..93899f87a2 100644
--- 
a/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/ClusterRoleRecordGeneratorTool.java
+++ 
b/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/ClusterRoleRecordGeneratorTool.java
@@ -66,13 +66,18 @@ public class ClusterRoleRecordGeneratorTool extends 
Configured implements Tool {
 
 @Override
 public int run(String[] args) throws Exception {
-String fileName = getConf().get(PHOENIX_HA_GENERATOR_FILE_ATTR);
-File file = StringUtils.isEmpty(fileName)
-? File.createTempFile("phoenix.ha.cluster.role.records", 
".json")
-: new File(fileName);
-JacksonUtil.getObjectWriterPretty().writeValue(file, 
listAllRecordsByZk());
-System.out.println("Created JSON file '" + file + "'");
-return 0;
+try {
+String fileName = getConf().get(PHOENIX_HA_GENERATOR_FILE_ATTR);
+File file = StringUtils.isEmpty(fileName)
+? File.createTempFile("phoenix.ha.cluster.role.records", 
".json")
+: new File(fileName);
+JacksonUtil.getObjectWriterPretty().writeValue(file, 
listAllRecordsByZk());
+System.out.println("Created JSON file '" + file + "'");
+return 0;
+} catch (Exception e) {
+e.printStackTrace();
+return -1;
+}
 }
 
 List listAllRecordsByZk() throws IOException {
diff --git 
a/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java 
b/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
index ca412d5238..8bdc6ea182 100644
--- 
a/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
+++ 
b/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
@@ -144,6 +144,7 @@ public final class PhoenixDriver extends 
PhoenixEmbeddedDriver {
 }
 
 // One entry per cluster here
+// TODO that's not true, we can have multiple connections with different 
configs / principals
 private final Cache 
connectionQueryServicesCache =
 initializeConnectionCache();
 
@@ -341,8 +342,18 @@ public final class PhoenixDriver extends 
PhoenixEmbeddedDriver {
 services = null;
 }
 }
+
+if (connectionQueryServicesCache != null) {
+try {
+for (ConnectionQueryServices cqsi : 
connectionQueryServicesCache.asMap().values()) {
+cqsi.close();
+}
+} catch (Exception e) {
+LOGGER.warn("Failed to close ConnectionQueryServices 
instance", e);
+}
+}
 }
-
+
 private enum LockMode {
 READ, WRITE
 };
diff --git 
a/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixHAAdminTool.java
 
b/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixHAAdminTool.java
index c6bdadc335..e7a9cd7a22 100644
--- 
a/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixHAAdminTool.java
+++ 
b/phoenix-core-client/src/main/java/org/apache/phoenix/jdbc/PhoenixHAAdminTool.java
@@ -105,38 +105,43 @@ public class PhoenixHAAdminTool extends Configured 
implements Tool {
 return RET_ARGUMENT_ERROR;