thanks Jeffrey the clock was in sync, it seems to work after I synced up client and server phoenix-core jar versions
Alex On Wed, May 28, 2014 at 7:58 PM, Jeffrey Zhong <[email protected]>wrote: > > You can try to sync time clock on your server & client machines to see if > things become better. > > From: alex kamil <[email protected]> > Reply-To: <[email protected]> > Date: Wednesday, May 28, 2014 3:40 PM > To: <[email protected]> > Subject: NullPointerException when initializing phoenix schema > > started to get these exceptions on all existing phoenix tables after > running for a few months without issues, any ideas? > > 0: jdbc:phoenix:hostname> select count(*) from P."MY_TABLE"; > > 14/05/28 18:32:18 WARN > client.HConnectionManager$HConnectionImplementation: Error executing for > row > > java.util.concurrent.ExecutionException: > org.apache.hadoop.hbase.DoNotRetryIOException: > org.apache.hadoop.hbase.DoNotRetryIOException: P.MY_TABLE: at index 313 > > at java.util.concurrent.FutureTask$Sync.innerGet(FutureTask.java:252) > > at java.util.concurrent.FutureTask.get(FutureTask.java:111) > > at > org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.processExecs(HConnectionManager.java:1617) > > at > org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:918) > > at > org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1069) > > at > org.apache.phoenix.query.DelegateConnectionQueryServices.getTable(DelegateConnectionQueryServices.java:107) > > at > org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:292) > > at > org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:261) > > at > org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:279) > > at > org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:200) > > at > org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:157) > > at > org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:289) > > at > org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:279) > > at > org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:206) > > at > org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:202) > > at > org.apache.phoenix.util.PhoenixContextExecutor.call(PhoenixContextExecutor.java:54) > > at > org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:202) > > at > org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:949) > > at sqlline.SqlLine$Commands.execute(SqlLine.java:3673) > > at sqlline.SqlLine$Commands.sql(SqlLine.java:3584) > > at sqlline.SqlLine.dispatch(SqlLine.java:821) > > at sqlline.SqlLine.begin(SqlLine.java:699) > > at sqlline.SqlLine.mainWithInputRedirection(SqlLine.java:441) > > at sqlline.SqlLine.main(SqlLine.java:424) > > Caused by: org.apache.hadoop.hbase.DoNotRetryIOException: > org.apache.hadoop.hbase.DoNotRetryIOException: P.MY_TABLE: at index 313 > > at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) > > at > sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) > > at > sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) > > at java.lang.reflect.Constructor.newInstance(Constructor.java:526) > > at > org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:95) > > at > org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:79) > > at > org.apache.hadoop.hbase.client.ServerCallable.translateException(ServerCallable.java:256) > > at > org.apache.hadoop.hbase.client.ServerCallable.withRetries(ServerCallable.java:166) > > at > org.apache.hadoop.hbase.ipc.ExecRPCInvoker.invoke(ExecRPCInvoker.java:79) > > at com.sun.proxy.$Proxy7.getTable(Unknown Source) > > at > org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1073) > > at > org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1070) > > at > org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation$4.call(HConnectionManager.java:1605) > > at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334) > > at java.util.concurrent.FutureTask.run(FutureTask.java:166) > > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > > at java.lang.Thread.run(Thread.java:724) > > Caused by: org.apache.hadoop.ipc.RemoteException: > org.apache.hadoop.hbase.DoNotRetryIOException: P.MY_TABLE: at index 313 > > at org.apache.phoenix.util.ServerUtil.throwIOException(ServerUtil.java:73) > > at > org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:1034) > > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) > > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > > at java.lang.reflect.Method.invoke(Method.java:606) > > at org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5639) > > at > org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3930) > > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) > > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > > at java.lang.reflect.Method.invoke(Method.java:606) > > at > org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:323) > > at > org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426) > > Caused by: java.lang.NullPointerException: at index 313 > > at > com.google.common.collect.ImmutableList.checkElementNotNull(ImmutableList.java:305) > > at > com.google.common.collect.ImmutableList.construct(ImmutableList.java:296) > > at com.google.common.collect.ImmutableList.copyOf(ImmutableList.java:272) > > at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:305) > > at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:216) > > at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:209) > > at > org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:443) > > at > org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:254) > > at > org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1082) > > at > org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:1028) > > ... 12 more > > > at org.apache.hadoop.hbase.ipc.HBaseClient.call(HBaseClient.java:1012) > > at > org.apache.hadoop.hbase.ipc.WritableRpcEngine$Invoker.invoke(WritableRpcEngine.java:87) > > at com.sun.proxy.$Proxy6.execCoprocessor(Unknown Source) > > at > org.apache.hadoop.hbase.ipc.ExecRPCInvoker$1.call(ExecRPCInvoker.java:75) > > at > org.apache.hadoop.hbase.ipc.ExecRPCInvoker$1.call(ExecRPCInvoker.java:73) > > at > org.apache.hadoop.hbase.client.ServerCallable.withRetries(ServerCallable.java:164) > > ... 10 more > > > thanks > > Alex > > CONFIDENTIALITY NOTICE > NOTICE: This message is intended for the use of the individual or entity > to which it is addressed and may contain information that is confidential, > privileged and exempt from disclosure under applicable law. If the reader > of this message is not the intended recipient, you are hereby notified that > any printing, copying, dissemination, distribution, disclosure or > forwarding of this communication is strictly prohibited. If you have > received this communication in error, please contact the sender immediately > and delete it from your system. Thank You.
