[ 
https://issues.apache.org/jira/browse/PHOENIX-3418?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Saurabh updated PHOENIX-3418:
-----------------------------
    Description: 
While running deletes on a table (field_history_archive) with a secondary index 
(field_history_archive_index) , the delete requests start failing right after a 
region split with the below exception. 

This exception appears repeatedly for subsequent deletes and the only way to 
fix it is by restarting the application server

Stack trace - 

org.apache.hadoop.hbase.DoNotRetryIOException: 
org.apache.hadoop.hbase.DoNotRetryIOException: ERROR 1108 (XCL08): Cache of 
region boundaries are out of date. tableName=ARCHIVE.FIELD_HISTORY_ARCHIVE_INDEX
        at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.throwIfScanOutOfRegion(BaseScannerRegionObserver.java:150)
        at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.preScannerOpen(BaseScannerRegionObserver.java:178)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$50.call(RegionCoprocessorHost.java:1289)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1621)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1697)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperationWithResult(RegionCoprocessorHost.java:1670)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preScannerOpen(RegionCoprocessorHost.java:1284)
        at 
org.apache.hadoop.hbase.regionserver.HRegionServer.scan(HRegionServer.java:3255)
        at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32492)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2208)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
        at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.phoenix.schema.StaleRegionBoundaryCacheException: ERROR 
1108 (XCL08): Cache of region boundaries are out of date. 
tableName=ARCHIVE.FIELD_HISTORY_ARCHIVE_INDEX
        at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.throwIfScanOutOfRegion(BaseScannerRegionObserver.java:149)
        ... 13 more


        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at 
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
        at 
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
        at 
org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:296)
        at 
org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:326)
        at 
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:163)
        at 
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:58)
        at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:115)
        at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:91)
        at 
org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:289)
        at 
org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:190)
        at 
org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:185)
        at 
org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:111)
        at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:780)
        at 
hbase.client.CounterProtectedHTable.getScannerWithMetrics(CounterProtectedHTable.java:559)
        at 
hbase.client.CounterProtectedHTable.access$200(CounterProtectedHTable.java:37)
        at 
hbase.client.CounterProtectedHTable$19.inner(CounterProtectedHTable.java:265)
        at 
hbase.client.CounterProtectedHTable$19.inner(CounterProtectedHTable.java:262)
        at 
hbase.client.ProtectedResourceOperation.outer(ProtectedResourceOperation.java:38)
        at 
hbase.client.CounterProtectedHTable.getScanner(CounterProtectedHTable.java:268)
        at 
org.apache.phoenix.iterate.TableResultIterator.initScanner(TableResultIterator.java:172)
        at 
org.apache.phoenix.iterate.TableResultIterator.next(TableResultIterator.java:126)
        at 
org.apache.phoenix.iterate.LookAheadResultIterator$1.advance(LookAheadResultIterator.java:47)
 at 
org.apache.phoenix.iterate.LookAheadResultIterator.init(LookAheadResultIterator.java:59)
        at 
org.apache.phoenix.iterate.LookAheadResultIterator.peek(LookAheadResultIterator.java:73)
        at 
org.apache.phoenix.iterate.SerialIterators$SerialIterator.nextIterator(SerialIterators.java:179)
        at 
org.apache.phoenix.iterate.SerialIterators$SerialIterator.currentIterator(SerialIterators.java:158)
        at 
org.apache.phoenix.iterate.SerialIterators$SerialIterator.peek(SerialIterators.java:210)
        at 
org.apache.phoenix.iterate.ConcatResultIterator.currentIterator(ConcatResultIterator.java:100)
        at 
org.apache.phoenix.iterate.ConcatResultIterator.next(ConcatResultIterator.java:117)
        at 
org.apache.phoenix.iterate.DelegateResultIterator.next(DelegateResultIterator.java:44)
        at 
org.apache.phoenix.iterate.LimitingResultIterator.next(LimitingResultIterator.java:47)
        at 
org.apache.phoenix.jdbc.PhoenixResultSet.next(PhoenixResultSet.java:778)
        at 
phoenix.connection.PhoenixResultSetWithTimeTracking.next(PhoenixResultSetWithTimeTracking.java:65)
        at 

  was:
While running deletes on a table (field_history_archive) with a secondary index 
(field_history_archive_index) , the delete requests start failing right after a 
region split with the below exception. 

This exception appears repeatedly for subsequent deletes and the only way to 
fix it is by restarting the application server

Stack trace - 

org.apache.hadoop.hbase.DoNotRetryIOException: 
org.apache.hadoop.hbase.DoNotRetryIOException: ERROR 1108 (XCL08): Cache of 
region boundaries are out of date. tableName=ARCHIVE.FIELD_HISTORY_ARCHIVE_INDEX
        at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.throwIfScanOutOfRegion(BaseScannerRegionObserver.java:150)
        at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.preScannerOpen(BaseScannerRegionObserver.java:178)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$50.call(RegionCoprocessorHost.java:1289)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1621)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1697)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperationWithResult(RegionCoprocessorHost.java:1670)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preScannerOpen(RegionCoprocessorHost.java:1284)
        at 
org.apache.hadoop.hbase.regionserver.HRegionServer.scan(HRegionServer.java:3255)
        at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32492)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2208)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
        at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.phoenix.schema.StaleRegionBoundaryCacheException: ERROR 
1108 (XCL08): Cache of region boundaries are out of date. 
tableName=ARCHIVE.FIELD_HISTORY_ARCHIVE_INDEX
        at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.throwIfScanOutOfRegion(BaseScannerRegionObserver.java:149)
        ... 13 more


        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at 
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
        at 
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
        at 
org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:296)
        at 
org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:326)
        at 
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:163)
        at 
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:58)
        at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:115)
        at 
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:91)
        at 
org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:289)
        at 
org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:190)
        at 
org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:185)
        at 
org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:111)
        at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:780)
        at 
hbase.client.CounterProtectedHTable.getScannerWithMetrics(CounterProtectedHTable.java:559)
        at 
hbase.client.CounterProtectedHTable.access$200(CounterProtectedHTable.java:37)
        at 
hbase.client.CounterProtectedHTable$19.inner(CounterProtectedHTable.java:265)
        at 
hbase.client.CounterProtectedHTable$19.inner(CounterProtectedHTable.java:262)
        at 
hbase.client.ProtectedResourceOperation.outer(ProtectedResourceOperation.java:38)
        at 
hbase.client.CounterProtectedHTable.getScanner(CounterProtectedHTable.java:268)
        at 
org.apache.phoenix.iterate.TableResultIterator.initScanner(TableResultIterator.java:172)
        at 
org.apache.phoenix.iterate.TableResultIterator.next(TableResultIterator.java:126)
        at 
org.apache.phoenix.iterate.LookAheadResultIterator$1.advance(LookAheadResultIterator.java:47)
 at 
org.apache.phoenix.iterate.LookAheadResultIterator.init(LookAheadResultIterator.java:59)
        at 
org.apache.phoenix.iterate.LookAheadResultIterator.peek(LookAheadResultIterator.java:73)
        at 
org.apache.phoenix.iterate.SerialIterators$SerialIterator.nextIterator(SerialIterators.java:179)
        at 
org.apache.phoenix.iterate.SerialIterators$SerialIterator.currentIterator(SerialIterators.java:158)
        at 
org.apache.phoenix.iterate.SerialIterators$SerialIterator.peek(SerialIterators.java:210)
        at 
org.apache.phoenix.iterate.ConcatResultIterator.currentIterator(ConcatResultIterator.java:100)
        at 
org.apache.phoenix.iterate.ConcatResultIterator.next(ConcatResultIterator.java:117)
        at 
org.apache.phoenix.iterate.DelegateResultIterator.next(DelegateResultIterator.java:44)
        at 
org.apache.phoenix.iterate.LimitingResultIterator.next(LimitingResultIterator.java:47)
        at 
org.apache.phoenix.jdbc.PhoenixResultSet.next(PhoenixResultSet.java:778)
        at 
phoenix.connection.PhoenixResultSetWithTimeTracking.next(PhoenixResultSetWithTimeTracking.java:65)
        at 
archive.fieldhistory.job.delete.FieldHistoryArchiveDAO.collectRows(FieldHistoryArchiveDAO.java:225)
        at 
archive.fieldhistory.job.delete.FieldHistoryArchiveDAO.collectRows(FieldHistoryArchiveDAO.java:215)
        at 
archive.fieldhistory.job.delete.FieldHistoryArchiveDAO.getRowsToDelete(FieldHistoryArchiveDAO.java:186)
        at 
archive.fieldhistory.job.delete.FieldHistoryArchiveDAO.getCandidateRecordsForArchiveRetentionDeletion(FieldHistoryArchiveDAO.java:108)
        at 
archive.fieldhistory.job.delete.ArchivalDeletionMessageHandler.attemptDeletion(ArchivalDeletionMessageHandler.java:206)
        at 
archive.fieldhistory.job.delete.ArchivalDeletionMessageHandler.handleTarzanMessage(ArchivalDeletionMessageHandler.java:146)
        at 
archive.fieldhistory.job.TarzanStyleMessageHandler.handleMessage(TarzanStyleMessageHandler.java:44)
        at 
common.messaging.MqFrameworkBaseHandler.process(MqFrameworkBaseHandler.java:561)
        at 
common.messaging.MqFrameworkBaseHandler.handleMessage(MqFrameworkBaseHandler.java:252)
        at 
common.messaging.QpidMessageHandler$MessageProcessingTask.call(QpidMessageHandler.java:601)
        at 
common.messaging.QpidMessageHandler$MessageProcessingTask.call(QpidMessageHandler.java:1)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: 
org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOException):
 org.apache.hadoop.hbase.DoNotRetryIOException: ERROR 1108 (XCL08): Cache of 
region boundaries are out of date. tableName=ARCHIVE.FIELD_HISTORY_ARCHIVE_INDEX
        at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.throwIfScanOutOfRegion(BaseScannerRegionObserver.java:150)
        at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.preScannerOpen(BaseScannerRegionObserver.java:178)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$50.call(RegionCoprocessorHost.java:1289)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1621)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1697)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperationWithResult(RegionCoprocessorHost.java:1670)
        at 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preScannerOpen(RegionCoprocessorHost.java:1284)
        at 
org.apache.hadoop.hbase.regionserver.HRegionServer.scan(HRegionServer.java:3255)
        at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32492)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2208)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
        at 
org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.phoenix.schema.StaleRegionBoundaryCacheException: ERROR 
1108 (XCL08): Cache of region boundaries are out of date. 
tableName=ARCHIVE.FIELD_HISTORY_ARCHIVE_INDEX
        at 
org.apache.phoenix.coprocessor.BaseScannerRegionObserver.throwIfScanOutOfRegion(BaseScannerRegionObserver.java:149)
        ... 13 more


        at org.apache.hadoop.hbase.ipc.RpcClient.call(RpcClient.java:1489)
        at 
org.apache.hadoop.hbase.ipc.RpcClient.callBlockingMethod(RpcClient.java:1691)
        at 
org.apache.hadoop.hbase.ipc.RpcClient$BlockingRpcChannelImplementation.callBlockingMethod(RpcClient.java:1750)
        at 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.scan(ClientProtos.java:31392)
        at 
org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:318)


> Delete fails due to 'Cache of region boundaries out of date' exception after 
> region split
> -----------------------------------------------------------------------------------------
>
>                 Key: PHOENIX-3418
>                 URL: https://issues.apache.org/jira/browse/PHOENIX-3418
>             Project: Phoenix
>          Issue Type: Bug
>         Environment: CentOS release 6.8 (Final)
>            Reporter: Saurabh
>
> While running deletes on a table (field_history_archive) with a secondary 
> index (field_history_archive_index) , the delete requests start failing right 
> after a region split with the below exception. 
> This exception appears repeatedly for subsequent deletes and the only way to 
> fix it is by restarting the application server
> Stack trace - 
> org.apache.hadoop.hbase.DoNotRetryIOException: 
> org.apache.hadoop.hbase.DoNotRetryIOException: ERROR 1108 (XCL08): Cache of 
> region boundaries are out of date. 
> tableName=ARCHIVE.FIELD_HISTORY_ARCHIVE_INDEX
>         at 
> org.apache.phoenix.coprocessor.BaseScannerRegionObserver.throwIfScanOutOfRegion(BaseScannerRegionObserver.java:150)
>         at 
> org.apache.phoenix.coprocessor.BaseScannerRegionObserver.preScannerOpen(BaseScannerRegionObserver.java:178)
>         at 
> org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$50.call(RegionCoprocessorHost.java:1289)
>         at 
> org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost$RegionOperation.call(RegionCoprocessorHost.java:1621)
>         at 
> org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperation(RegionCoprocessorHost.java:1697)
>         at 
> org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.execOperationWithResult(RegionCoprocessorHost.java:1670)
>         at 
> org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost.preScannerOpen(RegionCoprocessorHost.java:1284)
>         at 
> org.apache.hadoop.hbase.regionserver.HRegionServer.scan(HRegionServer.java:3255)
>         at 
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32492)
>         at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2208)
>         at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:104)
>         at 
> org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:133)
>         at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:108)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: org.apache.phoenix.schema.StaleRegionBoundaryCacheException: ERROR 
> 1108 (XCL08): Cache of region boundaries are out of date. 
> tableName=ARCHIVE.FIELD_HISTORY_ARCHIVE_INDEX
>         at 
> org.apache.phoenix.coprocessor.BaseScannerRegionObserver.throwIfScanOutOfRegion(BaseScannerRegionObserver.java:149)
>         ... 13 more
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native 
> Method)
>         at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>         at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>         at 
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
>         at 
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
>         at 
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:296)
>         at 
> org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:326)
>         at 
> org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:163)
>         at 
> org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:58)
>         at 
> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:115)
>         at 
> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:91)
>         at 
> org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:289)
>         at 
> org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:190)
>         at 
> org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:185)
>         at 
> org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:111)
>         at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:780)
>         at 
> hbase.client.CounterProtectedHTable.getScannerWithMetrics(CounterProtectedHTable.java:559)
>         at 
> hbase.client.CounterProtectedHTable.access$200(CounterProtectedHTable.java:37)
>         at 
> hbase.client.CounterProtectedHTable$19.inner(CounterProtectedHTable.java:265)
>         at 
> hbase.client.CounterProtectedHTable$19.inner(CounterProtectedHTable.java:262)
>         at 
> hbase.client.ProtectedResourceOperation.outer(ProtectedResourceOperation.java:38)
>         at 
> hbase.client.CounterProtectedHTable.getScanner(CounterProtectedHTable.java:268)
>         at 
> org.apache.phoenix.iterate.TableResultIterator.initScanner(TableResultIterator.java:172)
>         at 
> org.apache.phoenix.iterate.TableResultIterator.next(TableResultIterator.java:126)
>         at 
> org.apache.phoenix.iterate.LookAheadResultIterator$1.advance(LookAheadResultIterator.java:47)
>  at 
> org.apache.phoenix.iterate.LookAheadResultIterator.init(LookAheadResultIterator.java:59)
>         at 
> org.apache.phoenix.iterate.LookAheadResultIterator.peek(LookAheadResultIterator.java:73)
>         at 
> org.apache.phoenix.iterate.SerialIterators$SerialIterator.nextIterator(SerialIterators.java:179)
>         at 
> org.apache.phoenix.iterate.SerialIterators$SerialIterator.currentIterator(SerialIterators.java:158)
>         at 
> org.apache.phoenix.iterate.SerialIterators$SerialIterator.peek(SerialIterators.java:210)
>         at 
> org.apache.phoenix.iterate.ConcatResultIterator.currentIterator(ConcatResultIterator.java:100)
>         at 
> org.apache.phoenix.iterate.ConcatResultIterator.next(ConcatResultIterator.java:117)
>         at 
> org.apache.phoenix.iterate.DelegateResultIterator.next(DelegateResultIterator.java:44)
>         at 
> org.apache.phoenix.iterate.LimitingResultIterator.next(LimitingResultIterator.java:47)
>         at 
> org.apache.phoenix.jdbc.PhoenixResultSet.next(PhoenixResultSet.java:778)
>         at 
> phoenix.connection.PhoenixResultSetWithTimeTracking.next(PhoenixResultSetWithTimeTracking.java:65)
>         at 



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to