This is an automated email from the ASF dual-hosted git repository.
stoty pushed a commit to branch 5.2
in repository https://gitbox.apache.org/repos/asf/phoenix.git
The following commit(s) were added to refs/heads/5.2 by this push:
new c8eaac7176 PHOENIX-7481 HBase 3 compatibility changes: Cleanup
deprecated APIs, HTable and HTableDescriptor (#2124)
c8eaac7176 is described below
commit c8eaac717603a95fe2e58c4d0b27ad38ec6102d6
Author: Istvan Toth <[email protected]>
AuthorDate: Wed Jun 11 08:25:47 2025 +0200
PHOENIX-7481 HBase 3 compatibility changes: Cleanup deprecated APIs, HTable
and HTableDescriptor (#2124)
Co-authored-by: Villő Szűcs <[email protected]>
---
.../org/apache/phoenix/execute/DelegateHTable.java | 107 -------------------
.../phoenix/transaction/OmidTransactionTable.java | 31 +++---
.../coprocessor/MetaDataRegionObserver.java | 4 +-
.../phoenix/end2end/AlterTableWithViewsIT.java | 5 +-
.../phoenix/end2end/ViewTTLNotEnabledIT.java | 4 +-
.../apache/phoenix/end2end/index/BaseIndexIT.java | 3 +-
.../end2end/index/BaseIndexWithRegionMovesIT.java | 3 +-
.../end2end/index/ImmutableIndexExtendedIT.java | 4 +-
.../phoenix/compat/hbase/CompatDelegateHTable.java | 111 ++++++++++++++++++++
.../compat/hbase/CompatOmidTransactionTable.java | 13 ---
.../phoenix/compat/hbase/CompatDelegateHTable.java | 110 ++++++++++++++++++++
.../compat/hbase/CompatOmidTransactionTable.java | 13 ---
.../phoenix/compat/hbase/CompatDelegateHTable.java | 111 ++++++++++++++++++++
.../compat/hbase/CompatOmidTransactionTable.java | 13 ---
.../phoenix/compat/hbase/CompatDelegateHTable.java | 113 +++++++++++++++++++++
.../compat/hbase/CompatOmidTransactionTable.java | 13 ---
16 files changed, 469 insertions(+), 189 deletions(-)
diff --git
a/phoenix-core-client/src/main/java/org/apache/phoenix/execute/DelegateHTable.java
b/phoenix-core-client/src/main/java/org/apache/phoenix/execute/DelegateHTable.java
index d099bd1680..0150364dd1 100644
---
a/phoenix-core-client/src/main/java/org/apache/phoenix/execute/DelegateHTable.java
+++
b/phoenix-core-client/src/main/java/org/apache/phoenix/execute/DelegateHTable.java
@@ -23,8 +23,6 @@ import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.CompareOperator;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.CheckAndMutate;
@@ -38,13 +36,11 @@ import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Row;
-import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;
import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
-import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.phoenix.compat.hbase.CompatDelegateHTable;
@@ -69,21 +65,11 @@ public class DelegateHTable extends CompatDelegateHTable
implements Table {
return delegate.getConfiguration();
}
- @Override
- public HTableDescriptor getTableDescriptor() throws IOException {
- return delegate.getTableDescriptor();
- }
-
@Override
public boolean exists(Get get) throws IOException {
return delegate.exists(get);
}
- @Override
- public boolean[] existsAll(List<Get> gets) throws IOException {
- return delegate.existsAll(gets);
- }
-
@Override
public void batch(List<? extends Row> actions, Object[] results) throws
IOException,
InterruptedException {
@@ -131,18 +117,6 @@ public class DelegateHTable extends CompatDelegateHTable
implements Table {
delegate.put(puts);
}
- @Override
- public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Put put)
- throws IOException {
- return delegate.checkAndPut(row, family, qualifier, value, put);
- }
-
- @Override
- public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
- byte[] value, Put put) throws IOException {
- return delegate.checkAndPut(row, family, qualifier, compareOp, value,
put);
- }
-
@Override
public void delete(Delete delete) throws IOException {
delegate.delete(delete);
@@ -153,18 +127,6 @@ public class DelegateHTable extends CompatDelegateHTable
implements Table {
delegate.delete(deletes);
}
- @Override
- public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
byte[] value,
- Delete delete) throws IOException {
- return delegate.checkAndDelete(row, family, qualifier, value, delete);
- }
-
- @Override
- public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
- byte[] value, Delete delete) throws IOException {
- return delegate.checkAndDelete(row, family, qualifier, compareOp,
value, delete);
- }
-
@Override
public Result append(Append append) throws IOException {
return delegate.append(append);
@@ -230,80 +192,11 @@ public class DelegateHTable extends CompatDelegateHTable
implements Table {
return delegate.checkAndMutate(checkAndMutate);
}
- @Override
- public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
- byte[] value, RowMutations mutation) throws IOException {
- return delegate.checkAndMutate(row, family, qualifier, compareOp,
value, mutation);
- }
-
- @Override
- public void setOperationTimeout(int operationTimeout) {
- delegate.setOperationTimeout(operationTimeout);
- }
-
- @Override
- public int getOperationTimeout() {
- return delegate.getOperationTimeout();
- }
-
- @Override
- public int getRpcTimeout() {
- return delegate.getRpcTimeout();
- }
-
- @Override
- public void setRpcTimeout(int rpcTimeout) {
- delegate.setRpcTimeout(rpcTimeout);
- }
-
@Override
public TableDescriptor getDescriptor() throws IOException {
return delegate.getDescriptor();
}
- @Override
- public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
- byte[] value, Put put) throws IOException {
- return delegate.checkAndPut(row, family, qualifier, op, value, put);
- }
-
- @Override
- public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
- byte[] value, Delete delete) throws IOException {
- return delegate.checkAndDelete(row, family, qualifier, op, value,
delete);
- }
-
- @Override
- public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) {
- return delegate.checkAndMutate(row, family);
- }
-
- @Override
- public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
- byte[] value, RowMutations mutation) throws IOException {
- return delegate.checkAndMutate(row, family, qualifier, op, value,
mutation);
- }
-
- @Override
- public int getReadRpcTimeout() {
- return delegate.getReadRpcTimeout();
- }
-
- @Override
- public void setReadRpcTimeout(int readRpcTimeout) {
- delegate.setReadRpcTimeout(readRpcTimeout);
- }
-
- @Override
- public int getWriteRpcTimeout() {
- return delegate.getWriteRpcTimeout();
- }
-
- @Override
- public void setWriteRpcTimeout(int writeRpcTimeout) {
- delegate.setWriteRpcTimeout(writeRpcTimeout);
- }
-
@Override
public boolean[] exists(List<Get> gets) throws IOException {
return delegate.exists(gets);
diff --git
a/phoenix-core-client/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java
b/phoenix-core-client/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java
index 483a52b725..e1f534927b 100644
---
a/phoenix-core-client/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java
+++
b/phoenix-core-client/src/main/java/org/apache/phoenix/transaction/OmidTransactionTable.java
@@ -26,7 +26,6 @@ import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompareOperator;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
@@ -65,7 +64,6 @@ public class OmidTransactionTable extends
CompatOmidTransactionTable implements
private final boolean addShadowCells;
public OmidTransactionTable() throws SQLException {
- super(null);
this.tTable = null;
this.tx = null;
this.addShadowCells = false;
@@ -80,7 +78,6 @@ public class OmidTransactionTable extends
CompatOmidTransactionTable implements
}
public OmidTransactionTable(PhoenixTransactionContext ctx, Table hTable,
boolean isConflictFree, boolean addShadowCells) throws SQLException {
- super(hTable);
assert(ctx instanceof OmidTransactionContext);
OmidTransactionContext omidTransactionContext =
(OmidTransactionContext) ctx;
@@ -165,7 +162,7 @@ public class OmidTransactionTable extends
CompatOmidTransactionTable implements
return TableName.valueOf(name);
}
- @Override
+ //No @Override for HBase 3 compatibility
public boolean[] existsAll(List<Get> gets) throws IOException {
throw new UnsupportedOperationException();
}
@@ -186,13 +183,13 @@ public class OmidTransactionTable extends
CompatOmidTransactionTable implements
throw new UnsupportedOperationException();
}
- @Override
+ //No @Override for HBase 3 compatibility
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Put put) throws IOException {
throw new UnsupportedOperationException();
}
- @Override
+ //No @Override for HBase 3 compatibility
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Delete delete) throws IOException {
throw new UnsupportedOperationException();
@@ -256,42 +253,42 @@ public class OmidTransactionTable extends
CompatOmidTransactionTable implements
throw new UnsupportedOperationException();
}
- @Override
+ //No @Override for HBase 3 compatibility
public int getOperationTimeout() {
throw new UnsupportedOperationException();
}
- @Override
+ //No @Override for HBase 3 compatibility
public int getRpcTimeout() {
throw new UnsupportedOperationException();
}
- @Override
+ //No @Override for HBase 3 compatibility
public void setOperationTimeout(int arg0) {
throw new UnsupportedOperationException();
}
- @Override
+ //No @Override for HBase 3 compatibility
public void setRpcTimeout(int arg0) {
throw new UnsupportedOperationException();
}
- @Override
+ //No @Override for HBase 3 compatibility
public int getWriteRpcTimeout() {
throw new UnsupportedOperationException();
}
- @Override
+ //No @Override for HBase 3 compatibility
public void setWriteRpcTimeout(int writeRpcTimeout) {
throw new UnsupportedOperationException();
}
- @Override
+ //No @Override for HBase 3 compatibility
public int getReadRpcTimeout() {
throw new UnsupportedOperationException();
}
- @Override
+ //No @Override for HBase 3 compatibility
public void setReadRpcTimeout(int readRpcTimeout) {
throw new UnsupportedOperationException();
}
@@ -306,19 +303,19 @@ public class OmidTransactionTable extends
CompatOmidTransactionTable implements
throw new UnsupportedOperationException();
}
- @Override
+ //No @Override for HBase 3 compatibility
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op, byte[] value, Put put)
throws IOException {
throw new UnsupportedOperationException();
}
- @Override
+ //No @Override for HBase 3 compatibility
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op, byte[] value,
Delete delete) throws IOException {
throw new UnsupportedOperationException();
}
- @Override
+ //No @Override for HBase 3 compatibility
public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op, byte[] value,
RowMutations mutation) throws IOException {
throw new UnsupportedOperationException();
diff --git
a/phoenix-core-server/src/main/java/org/apache/phoenix/coprocessor/MetaDataRegionObserver.java
b/phoenix-core-server/src/main/java/org/apache/phoenix/coprocessor/MetaDataRegionObserver.java
index 0562dfea81..c504490cad 100644
---
a/phoenix-core-server/src/main/java/org/apache/phoenix/coprocessor/MetaDataRegionObserver.java
+++
b/phoenix-core-server/src/main/java/org/apache/phoenix/coprocessor/MetaDataRegionObserver.java
@@ -40,6 +40,7 @@ import javax.annotation.concurrent.GuardedBy;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
@@ -53,7 +54,6 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
-import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
@@ -307,7 +307,7 @@ public class MetaDataRegionObserver implements
RegionObserver,RegionCoprocessor
Scan scan = new Scan();
SingleColumnValueFilter filter = new
SingleColumnValueFilter(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP_BYTES,
- CompareFilter.CompareOp.NOT_EQUAL,
PLong.INSTANCE.toBytes(0L));
+ CompareOperator.NOT_EQUAL, PLong.INSTANCE.toBytes(0L));
filter.setFilterIfMissing(true);
scan.setFilter(filter);
scan.addColumn(PhoenixDatabaseMetaData.TABLE_FAMILY_BYTES,
diff --git
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
index 12106b2753..8183805478 100644
---
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
+++
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/AlterTableWithViewsIT.java
@@ -39,7 +39,6 @@ import java.util.List;
import java.util.Properties;
import org.apache.commons.lang3.ArrayUtils;
-import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
@@ -1012,7 +1011,7 @@ public class AlterTableWithViewsIT extends
SplitSystemCatalogIT {
PName tenantId = isMultiTenant ? PNameFactory.newName(TENANT1) :
null;
PhoenixConnection phoenixConn =
conn.unwrap(PhoenixConnection.class);
- Table htable =
phoenixConn.getQueryServices().getTable(Bytes.toBytes(baseTableName));
+ Table table =
phoenixConn.getQueryServices().getTable(Bytes.toBytes(baseTableName));
assertFalse(phoenixConn.getTable(new PTableKey(null,
baseTableName)).isTransactional());
assertFalse(viewConn.unwrap(PhoenixConnection.class).getTable(new
PTableKey(tenantId, viewOfTable)).isTransactional());
}
@@ -1195,7 +1194,7 @@ public class AlterTableWithViewsIT extends
SplitSystemCatalogIT {
// scan the physical table and verify there is a single row for
the second local index
Scan scan = new Scan();
- HTable table = (HTable)
conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(viewIndexPhysicalTable);
+ Table table =
conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(viewIndexPhysicalTable);
ResultScanner results = table.getScanner(scan);
Result result = results.next();
assertNotNull(result);
diff --git
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewTTLNotEnabledIT.java
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewTTLNotEnabledIT.java
index dfde99b546..73bccb2749 100644
---
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewTTLNotEnabledIT.java
+++
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewTTLNotEnabledIT.java
@@ -18,9 +18,9 @@
package org.apache.phoenix.end2end;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.phoenix.compile.QueryPlan;
import org.apache.phoenix.coprocessor.PhoenixTTLRegionObserver;
import org.apache.phoenix.jdbc.PhoenixResultSet;
@@ -72,7 +72,7 @@ public class ViewTTLNotEnabledIT extends
ParallelStatsDisabledIT {
// Test the coproc is not registered
org.apache.hadoop.hbase.client.Connection hconn =
getUtility().getConnection();
Admin admin = hconn.getAdmin();
- HTableDescriptor tableDescriptor = admin.getTableDescriptor(
+ TableDescriptor tableDescriptor = admin.getDescriptor(
TableName.valueOf(schemaBuilder.getEntityTableName()));
Assert.assertFalse("Coprocessor " +
PhoenixTTLRegionObserver.class.getName()
+ " should not have been added: ",
diff --git
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/BaseIndexIT.java
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/BaseIndexIT.java
index bbfe45580c..df9f16c65e 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/BaseIndexIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/BaseIndexIT.java
@@ -48,7 +48,6 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
@@ -920,7 +919,7 @@ public abstract class BaseIndexIT extends
ParallelStatsDisabledIT {
conn.commit();
// the index table is one row
- HTable table = (HTable)
conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(fullTableName.getBytes());
+ Table table =
conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(fullTableName.getBytes());
ResultScanner resultScanner = table.getScanner(new Scan());
for (Result result : resultScanner) {
System.out.println(result);
diff --git
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/BaseIndexWithRegionMovesIT.java
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/BaseIndexWithRegionMovesIT.java
index 6f4c270591..eaa2b93277 100644
---
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/BaseIndexWithRegionMovesIT.java
+++
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/BaseIndexWithRegionMovesIT.java
@@ -22,7 +22,6 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
@@ -1028,7 +1027,7 @@ public abstract class BaseIndexWithRegionMovesIT extends
ParallelStatsDisabledWi
conn.commit();
// the index table is one row
- HTable table = (HTable)
conn.unwrap(PhoenixConnection.class).getQueryServices()
+ Table table =
conn.unwrap(PhoenixConnection.class).getQueryServices()
.getTable(fullTableName.getBytes());
ResultScanner resultScanner = table.getScanner(new Scan());
for (Result result : resultScanner) {
diff --git
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ImmutableIndexExtendedIT.java
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ImmutableIndexExtendedIT.java
index 55675b1a06..3547987bd6 100644
---
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ImmutableIndexExtendedIT.java
+++
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/index/ImmutableIndexExtendedIT.java
@@ -20,11 +20,11 @@ package org.apache.phoenix.end2end.index;
import org.apache.phoenix.thirdparty.com.google.common.collect.Lists;
import org.apache.phoenix.thirdparty.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
@@ -225,7 +225,7 @@ public class ImmutableIndexExtendedIT extends
ParallelStatsDisabledIT {
byte[] emptyCQ =
EncodedColumnsUtil.getEmptyKeyValueInfo(table).getFirst();
ConnectionQueryServices queryServices =
conn.unwrap(PhoenixConnection.class).getQueryServices();
- HTable htable = (HTable)
queryServices.getTable(table.getPhysicalName().getBytes());
+ Table htable =
queryServices.getTable(table.getPhysicalName().getBytes());
Scan scan = new Scan();
scan.addColumn(emptyCF, emptyCQ);
ResultScanner resultScanner = htable.getScanner(scan);
diff --git
a/phoenix-hbase-compat-2.4.1/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
b/phoenix-hbase-compat-2.4.1/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
index 47cf21d4cb..35b441a4f3 100644
---
a/phoenix-hbase-compat-2.4.1/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
+++
b/phoenix-hbase-compat-2.4.1/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
@@ -17,10 +17,17 @@
package org.apache.phoenix.compat.hbase;
import java.io.IOException;
+import java.util.List;
+import org.apache.hadoop.hbase.CompareOperator;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
public abstract class CompatDelegateHTable implements Table {
@@ -34,4 +41,108 @@ public abstract class CompatDelegateHTable implements Table
{
public Result mutateRow(RowMutations rm) throws IOException {
return delegate.mutateRow(rm);
}
+
+ @Override
+ public HTableDescriptor getTableDescriptor() throws IOException {
+ return delegate.getTableDescriptor();
+ }
+
+ @Override
+ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
+ byte[] value, Put put) throws IOException {
+ return delegate.checkAndPut(row, family, qualifier, compareOp, value,
put);
+ }
+
+ @Override
+ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
+ byte[] value, Delete delete) throws IOException {
+ return delegate.checkAndDelete(row, family, qualifier, compareOp,
value, delete);
+ }
+
+ @Override
+ public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
+ byte[] value, RowMutations mutation) throws IOException {
+ return delegate.checkAndMutate(row, family, qualifier, compareOp,
value, mutation);
+ }
+
+
+ @Override
+ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Put put)
+ throws IOException {
+ return delegate.checkAndPut(row, family, qualifier, value, put);
+ }
+
+ @Override
+ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
byte[] value,
+ Delete delete) throws IOException {
+ return delegate.checkAndDelete(row, family, qualifier, value, delete);
+ }
+
+ @Override
+ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
+ byte[] value, Put put) throws IOException {
+ return delegate.checkAndPut(row, family, qualifier, op, value, put);
+ }
+
+ @Override
+ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
+ byte[] value, Delete delete) throws IOException {
+ return delegate.checkAndDelete(row, family, qualifier, op, value,
delete);
+ }
+
+ @Override
+ public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
+ byte[] value, RowMutations mutation) throws IOException {
+ return delegate.checkAndMutate(row, family, qualifier, op, value,
mutation);
+ }
+
+ @Override
+ public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) {
+ return delegate.checkAndMutate(row, family);
+ }
+
+ @Override
+ public void setOperationTimeout(int operationTimeout) {
+ delegate.setOperationTimeout(operationTimeout);
+ }
+
+ @Override
+ public int getOperationTimeout() {
+ return delegate.getOperationTimeout();
+ }
+
+ @Override
+ public int getRpcTimeout() {
+ return delegate.getRpcTimeout();
+ }
+
+ @Override
+ public void setRpcTimeout(int rpcTimeout) {
+ delegate.setRpcTimeout(rpcTimeout);
+ }
+
+ @Override
+ public int getReadRpcTimeout() {
+ return delegate.getReadRpcTimeout();
+ }
+
+ @Override
+ public void setReadRpcTimeout(int readRpcTimeout) {
+ delegate.setReadRpcTimeout(readRpcTimeout);
+ }
+
+ @Override
+ public int getWriteRpcTimeout() {
+ return delegate.getWriteRpcTimeout();
+ }
+
+ @Override
+ public void setWriteRpcTimeout(int writeRpcTimeout) {
+ delegate.setWriteRpcTimeout(writeRpcTimeout);
+ }
+
+ @Override
+ public boolean[] existsAll(List<Get> gets) throws IOException {
+ return delegate.existsAll(gets);
+ }
}
diff --git
a/phoenix-hbase-compat-2.4.1/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
b/phoenix-hbase-compat-2.4.1/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
index c4c98c6892..1008d9a909 100644
---
a/phoenix-hbase-compat-2.4.1/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
+++
b/phoenix-hbase-compat-2.4.1/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
@@ -18,28 +18,15 @@ package org.apache.phoenix.compat.hbase;
import java.io.IOException;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.client.Table.CheckAndMutateBuilder;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
public abstract class CompatOmidTransactionTable implements Table {
- protected Table hTable;
-
- public CompatOmidTransactionTable(Table hTable) {
- this.hTable = hTable;
- }
-
- @Override
- public HTableDescriptor getTableDescriptor() throws IOException {
- return hTable.getTableDescriptor();
- }
-
@Override
public Result mutateRow(RowMutations rm) throws IOException {
throw new UnsupportedOperationException();
diff --git
a/phoenix-hbase-compat-2.5.0/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
b/phoenix-hbase-compat-2.5.0/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
index 8e6dca1b79..64ab2c68d2 100644
---
a/phoenix-hbase-compat-2.5.0/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
+++
b/phoenix-hbase-compat-2.5.0/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
@@ -17,11 +17,18 @@
package org.apache.phoenix.compat.hbase;
import java.io.IOException;
+import java.util.List;
+import org.apache.hadoop.hbase.CompareOperator;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
public abstract class CompatDelegateHTable implements Table {
@@ -40,4 +47,107 @@ public abstract class CompatDelegateHTable implements Table
{
public Result mutateRow(RowMutations rm) throws IOException {
return delegate.mutateRow(rm);
}
+
+ @Override
+ public HTableDescriptor getTableDescriptor() throws IOException {
+ return delegate.getTableDescriptor();
+ }
+
+ @Override
+ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
+ byte[] value, Put put) throws IOException {
+ return delegate.checkAndPut(row, family, qualifier, compareOp, value,
put);
+ }
+
+ @Override
+ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
+ byte[] value, Delete delete) throws IOException {
+ return delegate.checkAndDelete(row, family, qualifier, compareOp,
value, delete);
+ }
+
+ @Override
+ public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
+ byte[] value, RowMutations mutation) throws IOException {
+ return delegate.checkAndMutate(row, family, qualifier, compareOp,
value, mutation);
+ }
+
+ @Override
+ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Put put)
+ throws IOException {
+ return delegate.checkAndPut(row, family, qualifier, value, put);
+ }
+
+ @Override
+ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
byte[] value,
+ Delete delete) throws IOException {
+ return delegate.checkAndDelete(row, family, qualifier, value, delete);
+ }
+
+ @Override
+ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
+ byte[] value, Put put) throws IOException {
+ return delegate.checkAndPut(row, family, qualifier, op, value, put);
+ }
+
+ @Override
+ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
+ byte[] value, Delete delete) throws IOException {
+ return delegate.checkAndDelete(row, family, qualifier, op, value,
delete);
+ }
+
+ @Override
+ public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
+ byte[] value, RowMutations mutation) throws IOException {
+ return delegate.checkAndMutate(row, family, qualifier, op, value,
mutation);
+ }
+
+ @Override
+ public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) {
+ return delegate.checkAndMutate(row, family);
+ }
+
+ @Override
+ public void setOperationTimeout(int operationTimeout) {
+ delegate.setOperationTimeout(operationTimeout);
+ }
+
+ @Override
+ public int getOperationTimeout() {
+ return delegate.getOperationTimeout();
+ }
+
+ @Override
+ public int getRpcTimeout() {
+ return delegate.getRpcTimeout();
+ }
+
+ @Override
+ public void setRpcTimeout(int rpcTimeout) {
+ delegate.setRpcTimeout(rpcTimeout);
+ }
+
+ @Override
+ public int getReadRpcTimeout() {
+ return delegate.getReadRpcTimeout();
+ }
+
+ @Override
+ public void setReadRpcTimeout(int readRpcTimeout) {
+ delegate.setReadRpcTimeout(readRpcTimeout);
+ }
+
+ @Override
+ public int getWriteRpcTimeout() {
+ return delegate.getWriteRpcTimeout();
+ }
+
+ @Override
+ public void setWriteRpcTimeout(int writeRpcTimeout) {
+ delegate.setWriteRpcTimeout(writeRpcTimeout);
+ }
+
+ @Override
+ public boolean[] existsAll(List<Get> gets) throws IOException {
+ return delegate.existsAll(gets);
+ }
}
diff --git
a/phoenix-hbase-compat-2.5.0/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
b/phoenix-hbase-compat-2.5.0/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
index cfbfe40775..9cd0edec24 100644
---
a/phoenix-hbase-compat-2.5.0/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
+++
b/phoenix-hbase-compat-2.5.0/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
@@ -18,29 +18,16 @@ package org.apache.phoenix.compat.hbase;
import java.io.IOException;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.client.Table.CheckAndMutateBuilder;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
public abstract class CompatOmidTransactionTable implements Table {
- protected Table hTable;
-
- public CompatOmidTransactionTable(Table hTable) {
- this.hTable = hTable;
- }
-
- @Override
- public HTableDescriptor getTableDescriptor() throws IOException {
- return hTable.getTableDescriptor();
- }
-
@Override
public RegionLocator getRegionLocator() throws IOException {
throw new UnsupportedOperationException();
diff --git
a/phoenix-hbase-compat-2.5.4/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
b/phoenix-hbase-compat-2.5.4/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
index 8e6dca1b79..616b35dadb 100644
---
a/phoenix-hbase-compat-2.5.4/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
+++
b/phoenix-hbase-compat-2.5.4/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
@@ -17,11 +17,18 @@
package org.apache.phoenix.compat.hbase;
import java.io.IOException;
+import java.util.List;
+import org.apache.hadoop.hbase.CompareOperator;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
public abstract class CompatDelegateHTable implements Table {
@@ -40,4 +47,108 @@ public abstract class CompatDelegateHTable implements Table
{
public Result mutateRow(RowMutations rm) throws IOException {
return delegate.mutateRow(rm);
}
+
+ @Override
+ public HTableDescriptor getTableDescriptor() throws IOException {
+ return delegate.getTableDescriptor();
+ }
+
+ @Override
+ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
+ byte[] value, Put put) throws IOException {
+ return delegate.checkAndPut(row, family, qualifier, compareOp, value,
put);
+ }
+
+ @Override
+ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
+ byte[] value, Delete delete) throws IOException {
+ return delegate.checkAndDelete(row, family, qualifier, compareOp,
value, delete);
+ }
+
+ @Override
+ public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
+ byte[] value, RowMutations mutation) throws IOException {
+ return delegate.checkAndMutate(row, family, qualifier, compareOp,
value, mutation);
+ }
+
+ @Override
+ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Put put)
+ throws IOException {
+ return delegate.checkAndPut(row, family, qualifier, value, put);
+ }
+
+ @Override
+ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
byte[] value,
+ Delete delete) throws IOException {
+ return delegate.checkAndDelete(row, family, qualifier, value, delete);
+ }
+
+ @Override
+ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
+ byte[] value, Put put) throws IOException {
+ return delegate.checkAndPut(row, family, qualifier, op, value, put);
+ }
+
+ @Override
+ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
+ byte[] value, Delete delete) throws IOException {
+ return delegate.checkAndDelete(row, family, qualifier, op, value,
delete);
+ }
+
+
+ @Override
+ public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
+ byte[] value, RowMutations mutation) throws IOException {
+ return delegate.checkAndMutate(row, family, qualifier, op, value,
mutation);
+ }
+
+ @Override
+ public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) {
+ return delegate.checkAndMutate(row, family);
+ }
+
+ @Override
+ public void setOperationTimeout(int operationTimeout) {
+ delegate.setOperationTimeout(operationTimeout);
+ }
+
+ @Override
+ public int getOperationTimeout() {
+ return delegate.getOperationTimeout();
+ }
+
+ @Override
+ public int getRpcTimeout() {
+ return delegate.getRpcTimeout();
+ }
+
+ @Override
+ public void setRpcTimeout(int rpcTimeout) {
+ delegate.setRpcTimeout(rpcTimeout);
+ }
+
+ @Override
+ public int getReadRpcTimeout() {
+ return delegate.getReadRpcTimeout();
+ }
+
+ @Override
+ public void setReadRpcTimeout(int readRpcTimeout) {
+ delegate.setReadRpcTimeout(readRpcTimeout);
+ }
+
+ @Override
+ public int getWriteRpcTimeout() {
+ return delegate.getWriteRpcTimeout();
+ }
+
+ @Override
+ public void setWriteRpcTimeout(int writeRpcTimeout) {
+ delegate.setWriteRpcTimeout(writeRpcTimeout);
+ }
+
+ @Override
+ public boolean[] existsAll(List<Get> gets) throws IOException {
+ return delegate.existsAll(gets);
+ }
}
diff --git
a/phoenix-hbase-compat-2.5.4/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
b/phoenix-hbase-compat-2.5.4/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
index cfbfe40775..9cd0edec24 100644
---
a/phoenix-hbase-compat-2.5.4/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
+++
b/phoenix-hbase-compat-2.5.4/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
@@ -18,29 +18,16 @@ package org.apache.phoenix.compat.hbase;
import java.io.IOException;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.client.Table.CheckAndMutateBuilder;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
public abstract class CompatOmidTransactionTable implements Table {
- protected Table hTable;
-
- public CompatOmidTransactionTable(Table hTable) {
- this.hTable = hTable;
- }
-
- @Override
- public HTableDescriptor getTableDescriptor() throws IOException {
- return hTable.getTableDescriptor();
- }
-
@Override
public RegionLocator getRegionLocator() throws IOException {
throw new UnsupportedOperationException();
diff --git
a/phoenix-hbase-compat-2.6.0/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
b/phoenix-hbase-compat-2.6.0/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
index 8e6dca1b79..b2170ad527 100644
---
a/phoenix-hbase-compat-2.6.0/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
+++
b/phoenix-hbase-compat-2.6.0/src/main/java/org/apache/phoenix/compat/hbase/CompatDelegateHTable.java
@@ -17,11 +17,18 @@
package org.apache.phoenix.compat.hbase;
import java.io.IOException;
+import java.util.List;
+import org.apache.hadoop.hbase.CompareOperator;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
public abstract class CompatDelegateHTable implements Table {
@@ -40,4 +47,110 @@ public abstract class CompatDelegateHTable implements Table
{
public Result mutateRow(RowMutations rm) throws IOException {
return delegate.mutateRow(rm);
}
+
+ @Override
+ public HTableDescriptor getTableDescriptor() throws IOException {
+ return delegate.getTableDescriptor();
+ }
+
+
+ @Override
+ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
+ byte[] value, Put put) throws IOException {
+ return delegate.checkAndPut(row, family, qualifier, compareOp, value,
put);
+ }
+
+ @Override
+ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
+ byte[] value, Delete delete) throws IOException {
+ return delegate.checkAndDelete(row, family, qualifier, compareOp,
value, delete);
+ }
+
+ @Override
+ public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp,
+ byte[] value, RowMutations mutation) throws IOException {
+ return delegate.checkAndMutate(row, family, qualifier, compareOp,
value, mutation);
+ }
+
+
+ @Override
+ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Put put)
+ throws IOException {
+ return delegate.checkAndPut(row, family, qualifier, value, put);
+ }
+
+ @Override
+ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
byte[] value,
+ Delete delete) throws IOException {
+ return delegate.checkAndDelete(row, family, qualifier, value, delete);
+ }
+
+ @Override
+ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
+ byte[] value, Put put) throws IOException {
+ return delegate.checkAndPut(row, family, qualifier, op, value, put);
+ }
+
+ @Override
+ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
+ byte[] value, Delete delete) throws IOException {
+ return delegate.checkAndDelete(row, family, qualifier, op, value,
delete);
+ }
+
+
+ @Override
+ public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op,
+ byte[] value, RowMutations mutation) throws IOException {
+ return delegate.checkAndMutate(row, family, qualifier, op, value,
mutation);
+ }
+
+ @Override
+ public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) {
+ return delegate.checkAndMutate(row, family);
+ }
+
+ @Override
+ public void setOperationTimeout(int operationTimeout) {
+ delegate.setOperationTimeout(operationTimeout);
+ }
+
+ @Override
+ public int getOperationTimeout() {
+ return delegate.getOperationTimeout();
+ }
+
+ @Override
+ public int getRpcTimeout() {
+ return delegate.getRpcTimeout();
+ }
+
+ @Override
+ public void setRpcTimeout(int rpcTimeout) {
+ delegate.setRpcTimeout(rpcTimeout);
+ }
+
+ @Override
+ public int getReadRpcTimeout() {
+ return delegate.getReadRpcTimeout();
+ }
+
+ @Override
+ public void setReadRpcTimeout(int readRpcTimeout) {
+ delegate.setReadRpcTimeout(readRpcTimeout);
+ }
+
+ @Override
+ public int getWriteRpcTimeout() {
+ return delegate.getWriteRpcTimeout();
+ }
+
+ @Override
+ public void setWriteRpcTimeout(int writeRpcTimeout) {
+ delegate.setWriteRpcTimeout(writeRpcTimeout);
+ }
+
+ @Override
+ public boolean[] existsAll(List<Get> gets) throws IOException {
+ return delegate.existsAll(gets);
+ }
}
diff --git
a/phoenix-hbase-compat-2.6.0/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
b/phoenix-hbase-compat-2.6.0/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
index 81e4a1873f..651759a891 100644
---
a/phoenix-hbase-compat-2.6.0/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
+++
b/phoenix-hbase-compat-2.6.0/src/main/java/org/apache/phoenix/compat/hbase/CompatOmidTransactionTable.java
@@ -18,29 +18,16 @@ package org.apache.phoenix.compat.hbase;
import java.io.IOException;
-import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.client.Table.CheckAndMutateBuilder;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
public abstract class CompatOmidTransactionTable implements Table {
- protected Table hTable;
-
- public CompatOmidTransactionTable(Table hTable) {
- this.hTable = hTable;
- }
-
- @Override
- public HTableDescriptor getTableDescriptor() throws IOException {
- return hTable.getTableDescriptor();
- }
-
@Override
public RegionLocator getRegionLocator() throws IOException {
throw new UnsupportedOperationException();