Viraj Jasani created PHOENIX-6244:
-------------------------------------

             Summary: Flaky test GlobalMutableNonTxIndexIT
                 Key: PHOENIX-6244
                 URL: https://issues.apache.org/jira/browse/PHOENIX-6244
             Project: Phoenix
          Issue Type: Test
    Affects Versions: 4.15.0
            Reporter: Viraj Jasani


Common reasons behind flakes include OOM and timeout:

OOM error:
{code:java}
2020-11-26 20:40:10,713 ERROR [ProcedureExecutor-19] 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor(1180): CODE-BUG: Uncatched 
runtime exception for procedure: CreateTableProcedure (table=S.TBL_N000436) 
id=20 owner=jenkins state=RUNNABLE:CREATE_TABLE_WRITE_FS_LAYOUT
java.lang.OutOfMemoryError: unable to create new native thread
        at java.lang.Thread.start0(Native Method)
        at java.lang.Thread.start(Thread.java:717)
        at 
org.apache.hadoop.hdfs.DFSOutputStream.start(DFSOutputStream.java:2381)
        at 
org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1869)
        at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1698)
        at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1633)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:448)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:444)
        at 
org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:459)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:387)
        at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:910)
        at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:891)
        at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:788)
        at 
org.apache.hadoop.hbase.util.FSTableDescriptors.writeHTD(FSTableDescriptors.java:681)
        at 
org.apache.hadoop.hbase.util.FSTableDescriptors.writeTableDescriptor(FSTableDescriptors.java:655)
        at 
org.apache.hadoop.hbase.util.FSTableDescriptors.createTableDescriptorForTableDirectory(FSTableDescriptors.java:741)
        at 
org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.createFsLayout(CreateTableProcedure.java:383)
        at 
org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.createFsLayout(CreateTableProcedure.java:361)
        at 
org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.executeFromState(CreateTableProcedure.java:119)
        at 
org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.executeFromState(CreateTableProcedure.java:59)
        at 
org.apache.hadoop.hbase.procedure2.StateMachineProcedure.execute(StateMachineProcedure.java:139)
        at 
org.apache.hadoop.hbase.procedure2.Procedure.doExecute(Procedure.java:499)
        at 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor.execProcedure(ProcedureExecutor.java:1163)
        at 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor.execLoop(ProcedureExecutor.java:958)
        at 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor.execLoop(ProcedureExecutor.java:911)
        at 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor.access$400(ProcedureExecutor.java:79)
        at 
org.apache.hadoop.hbase.procedure2.ProcedureExecutor$2.run(ProcedureExecutor.java:502)

{code}
SQLTimeoutException:
{code:java}
java.sql.SQLTimeoutException: . Query couldn't be completed in the allotted 
time: 300000 ms
        at 
org.apache.phoenix.exception.SQLExceptionCode$16.newException(SQLExceptionCode.java:467)
        at 
org.apache.phoenix.exception.SQLExceptionInfo.buildException(SQLExceptionInfo.java:217)
        at 
org.apache.phoenix.iterate.BaseResultIterators.getIterators(BaseResultIterators.java:1390)
        at 
org.apache.phoenix.iterate.BaseResultIterators.getIterators(BaseResultIterators.java:1279)
        at 
org.apache.phoenix.iterate.ConcatResultIterator.getIterators(ConcatResultIterator.java:50)
        at 
org.apache.phoenix.iterate.ConcatResultIterator.currentIterator(ConcatResultIterator.java:97)
        at 
org.apache.phoenix.iterate.ConcatResultIterator.next(ConcatResultIterator.java:117)
        at 
org.apache.phoenix.iterate.UngroupedAggregatingResultIterator.next(UngroupedAggregatingResultIterator.java:39)
        at 
org.apache.phoenix.compile.ServerBuildIndexCompiler$RowCountMutationPlan.execute(ServerBuildIndexCompiler.java:66)
        at 
org.apache.phoenix.query.ConnectionQueryServicesImpl.updateData(ConnectionQueryServicesImpl.java:4732)
        at 
org.apache.phoenix.schema.MetaDataClient.buildIndex(MetaDataClient.java:1385)
        at 
org.apache.phoenix.schema.MetaDataClient.createIndex(MetaDataClient.java:1773)
        at 
org.apache.phoenix.compile.CreateIndexCompiler$1.execute(CreateIndexCompiler.java:85)
        at 
org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:415)
        at 
org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:397)
        at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
        at 
org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:396)
        at 
org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:384)
        at 
org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1886)
        at 
org.apache.phoenix.end2end.index.BaseIndexIT.testMultipleUpdatesAcrossRegions(BaseIndexIT.java:829)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)

{code}
 

 



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to