[ 
https://issues.apache.org/jira/browse/DRILL-6771?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16637311#comment-16637311
 ] 

Abhishek Girish commented on DRILL-6771:
----------------------------------------

A related test fails with a different stack trace:
{code}
java.sql.SQLException: INTERNAL_ERROR ERROR: 
java.lang.reflect.UndeclaredThrowableException

Setup failed for null
Fragment 0:0

Please, refer to logs for more information.

[Error Id: da5f9798-cb1d-4e5d-a463-32158ffb04e1 on drill80:31010]

  (org.apache.drill.common.exceptions.ExecutionSetupException) 
java.lang.reflect.UndeclaredThrowableException
    
org.apache.drill.common.exceptions.ExecutionSetupException.fromThrowable():30
    org.apache.drill.exec.store.hive.readers.HiveAbstractReader.setup():327
    org.apache.drill.exec.physical.impl.ScanBatch.getNextReaderIfHas():262
    org.apache.drill.exec.physical.impl.ScanBatch.next():173
    org.apache.drill.exec.record.AbstractRecordBatch.next():126
    org.apache.drill.exec.record.AbstractRecordBatch.next():116
    org.apache.drill.exec.record.AbstractUnaryRecordBatch.innerNext():63
    
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.innerNext():143
    org.apache.drill.exec.record.AbstractRecordBatch.next():184
    org.apache.drill.exec.physical.impl.BaseRootExec.next():104
    org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext():83
    org.apache.drill.exec.physical.impl.BaseRootExec.next():94
    org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():297
    org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():284
    java.security.AccessController.doPrivileged():-2
    javax.security.auth.Subject.doAs():422
    org.apache.hadoop.security.UserGroupInformation.doAs():1595
    org.apache.drill.exec.work.fragment.FragmentExecutor.run():284
    org.apache.drill.common.SelfCleaningRunnable.run():38
    java.util.concurrent.ThreadPoolExecutor.runWorker():1149
    java.util.concurrent.ThreadPoolExecutor$Worker.run():624
    java.lang.Thread.run():748
  Caused By (java.util.concurrent.ExecutionException) 
java.lang.reflect.UndeclaredThrowableException
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.AbstractFuture.getDoneValue():502
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.AbstractFuture.get():481
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.AbstractFuture$TrustedFuture.get():83
    org.apache.drill.exec.store.hive.readers.HiveAbstractReader.setup():320
    org.apache.drill.exec.physical.impl.ScanBatch.getNextReaderIfHas():262
    org.apache.drill.exec.physical.impl.ScanBatch.next():173
    org.apache.drill.exec.record.AbstractRecordBatch.next():126
    org.apache.drill.exec.record.AbstractRecordBatch.next():116
    org.apache.drill.exec.record.AbstractUnaryRecordBatch.innerNext():63
    
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.innerNext():143
    org.apache.drill.exec.record.AbstractRecordBatch.next():184
    org.apache.drill.exec.physical.impl.BaseRootExec.next():104
    org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext():83
    org.apache.drill.exec.physical.impl.BaseRootExec.next():94
    org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():297
    org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():284
    java.security.AccessController.doPrivileged():-2
    javax.security.auth.Subject.doAs():422
    org.apache.hadoop.security.UserGroupInformation.doAs():1595
    org.apache.drill.exec.work.fragment.FragmentExecutor.run():284
    org.apache.drill.common.SelfCleaningRunnable.run():38
    java.util.concurrent.ThreadPoolExecutor.runWorker():1149
    java.util.concurrent.ThreadPoolExecutor$Worker.run():624
    java.lang.Thread.run():748
  Caused By (java.lang.reflect.UndeclaredThrowableException) null
    org.apache.hadoop.security.UserGroupInformation.doAs():1610
    org.apache.drill.exec.ops.OperatorContextImpl$1.call():101
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly():117
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.InterruptibleTask.run():38
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask.run():77
    java.util.concurrent.ThreadPoolExecutor.runWorker():1149
    java.util.concurrent.ThreadPoolExecutor$Worker.run():624
    java.lang.Thread.run():748
  Caused By (org.apache.drill.common.exceptions.ExecutionSetupException) Failed 
to get o.a.hadoop.mapred.RecordReader from Hive InputFormat
    
org.apache.drill.exec.store.hive.readers.HiveAbstractReader.initNextReader():279
    org.apache.drill.exec.store.hive.readers.HiveAbstractReader.init():257
    org.apache.drill.exec.store.hive.readers.HiveAbstractReader.access$000():71
    org.apache.drill.exec.store.hive.readers.HiveAbstractReader$1.call():313
    org.apache.drill.exec.store.hive.readers.HiveAbstractReader$1.call():310
    org.apache.drill.exec.ops.OperatorContextImpl$1$1.run():104
    java.security.AccessController.doPrivileged():-2
    javax.security.auth.Subject.doAs():422
    org.apache.hadoop.security.UserGroupInformation.doAs():1595
    org.apache.drill.exec.ops.OperatorContextImpl$1.call():101
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly():117
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.InterruptibleTask.run():38
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask.run():77
    java.util.concurrent.ThreadPoolExecutor.runWorker():1149
    java.util.concurrent.ThreadPoolExecutor$Worker.run():624
    java.lang.Thread.run():748
  Caused By (java.lang.ArrayIndexOutOfBoundsException) 6
    org.apache.orc.OrcFile$WriterVersion.from():145
    org.apache.orc.impl.OrcTail.getWriterVersion():74
    org.apache.orc.impl.ReaderImpl.<init>():385
    org.apache.hadoop.hive.ql.io.orc.ReaderImpl.<init>():62
    org.apache.hadoop.hive.ql.io.orc.OrcFile.createReader():89
    org.apache.hadoop.hive.ql.io.orc.OrcRawRecordMerger.<init>():494
    org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.getReader():1821
    org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.getRecordReader():1706
    
org.apache.drill.exec.store.hive.readers.HiveAbstractReader.initNextReader():276
    org.apache.drill.exec.store.hive.readers.HiveAbstractReader.init():257
    org.apache.drill.exec.store.hive.readers.HiveAbstractReader.access$000():71
    org.apache.drill.exec.store.hive.readers.HiveAbstractReader$1.call():313
    org.apache.drill.exec.store.hive.readers.HiveAbstractReader$1.call():310
    org.apache.drill.exec.ops.OperatorContextImpl$1$1.run():104
    java.security.AccessController.doPrivileged():-2
    javax.security.auth.Subject.doAs():422
    org.apache.hadoop.security.UserGroupInformation.doAs():1595
    org.apache.drill.exec.ops.OperatorContextImpl$1.call():101
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly():117
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.InterruptibleTask.run():38
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask.run():77
    java.util.concurrent.ThreadPoolExecutor.runWorker():1149
    java.util.concurrent.ThreadPoolExecutor$Worker.run():624
    java.lang.Thread.run():748

        at 
org.apache.drill.jdbc.impl.DrillCursor.nextRowInternally(DrillCursor.java:528)
        at 
org.apache.drill.jdbc.impl.DrillCursor.loadInitialSchema(DrillCursor.java:600)
        at 
org.apache.drill.jdbc.impl.DrillResultSetImpl.execute(DrillResultSetImpl.java:1288)
        at 
org.apache.drill.jdbc.impl.DrillResultSetImpl.execute(DrillResultSetImpl.java:61)
        at 
oadd.org.apache.calcite.avatica.AvaticaConnection$1.execute(AvaticaConnection.java:667)
        at 
org.apache.drill.jdbc.impl.DrillMetaImpl.prepareAndExecute(DrillMetaImpl.java:1109)
        at 
org.apache.drill.jdbc.impl.DrillMetaImpl.prepareAndExecute(DrillMetaImpl.java:1120)
        at 
oadd.org.apache.calcite.avatica.AvaticaConnection.prepareAndExecuteInternal(AvaticaConnection.java:675)
        at 
org.apache.drill.jdbc.impl.DrillConnectionImpl.prepareAndExecuteInternal(DrillConnectionImpl.java:196)
        at 
oadd.org.apache.calcite.avatica.AvaticaStatement.executeInternal(AvaticaStatement.java:156)
        at 
oadd.org.apache.calcite.avatica.AvaticaStatement.executeQuery(AvaticaStatement.java:227)
        at 
org.apache.drill.test.framework.DrillTestJdbc.executeQuery(DrillTestJdbc.java:210)
        at 
org.apache.drill.test.framework.DrillTestJdbc.run(DrillTestJdbc.java:115)
        at 
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
{code}

> Queries on Hive 2.3.x fails with SYSTEM ERROR: ArrayIndexOutOfBoundsException
> -----------------------------------------------------------------------------
>
>                 Key: DRILL-6771
>                 URL: https://issues.apache.org/jira/browse/DRILL-6771
>             Project: Apache Drill
>          Issue Type: Bug
>          Components: Query Planning &amp; Optimization, Storage - Hive
>    Affects Versions: 1.15.0
>         Environment: Hive 2.3.3
> MapR 6.1.0
>            Reporter: Abhishek Girish
>            Assignee: Bohdan Kazydub
>            Priority: Critical
>             Fix For: 1.15.0
>
>
> Query: Functional/partition_pruning/hive/general/plan/orc1.q
> {code}
> select * from hive.orc_create_people_dp where state = 'Ca'
> java.sql.SQLException: SYSTEM ERROR: ArrayIndexOutOfBoundsException: 6
> {code}
> Stack Trace:
> {code}
>   (org.apache.drill.exec.work.foreman.ForemanException) Unexpected exception 
> during fragment initialization: Error while applying rule Prel.ScanPrule, 
> args [rel#2103503:DrillScanRel.LOGICAL.ANY([]).[](table=[hive, 
> orc_create_people_dp],groupscan=HiveScan [table=Table(dbName:default, 
> tableName:orc_create_people_dp), columns=[`id`, `first_name`, `last_name`, 
> `address`, `state`, `**`], numPartitions=1, partitions= 
> [Partition(values:[Ca])], 
> inputDirectories=[maprfs:/drill/testdata/hive_storage/orc_create_people_dp/state=Ca],
>  confProperties={}])]
>     org.apache.drill.exec.work.foreman.Foreman.run():300
>     java.util.concurrent.ThreadPoolExecutor.runWorker():1149
>     java.util.concurrent.ThreadPoolExecutor$Worker.run():624
>     java.lang.Thread.run():748
>   Caused By (java.lang.RuntimeException) Error while applying rule 
> Prel.ScanPrule, args 
> [rel#2103503:DrillScanRel.LOGICAL.ANY([]).[](table=[hive, 
> orc_create_people_dp],groupscan=HiveScan [table=Table(dbName:default, 
> tableName:orc_create_people_dp), columns=[`id`, `first_name`, `last_name`, 
> `address`, `state`, `**`], numPartitions=1, partitions= 
> [Partition(values:[Ca])], 
> inputDirectories=[maprfs:/drill/testdata/hive_storage/orc_create_people_dp/state=Ca],
>  confProperties={}])]
>     org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch():236
>     org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp():648
>     org.apache.calcite.tools.Programs$RuleSetProgram.run():339
>     
> org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform():425
>     
> org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel():455
>     org.apache.drill.exec.planner.sql.handlers.ExplainHandler.getPlan():68
>     org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan():145
>     org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan():83
>     org.apache.drill.exec.work.foreman.Foreman.runSQL():584
>     org.apache.drill.exec.work.foreman.Foreman.run():272
>     java.util.concurrent.ThreadPoolExecutor.runWorker():1149
>     java.util.concurrent.ThreadPoolExecutor$Worker.run():624
>     java.lang.Thread.run():748
>   Caused By (org.apache.drill.common.exceptions.DrillRuntimeException) Failed 
> to get InputSplits
>     org.apache.drill.exec.store.hive.HiveMetadataProvider.getInputSplits():182
>     org.apache.drill.exec.store.hive.HiveScan.getInputSplits():288
>     org.apache.drill.exec.store.hive.HiveScan.getMaxParallelizationWidth():197
>     org.apache.drill.exec.planner.physical.ScanPrule.onMatch():42
>     org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch():212
>     org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp():648
>     org.apache.calcite.tools.Programs$RuleSetProgram.run():339
>     
> org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform():425
>     
> org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel():455
>     org.apache.drill.exec.planner.sql.handlers.ExplainHandler.getPlan():68
>     org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan():145
>     org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan():83
>     org.apache.drill.exec.work.foreman.Foreman.runSQL():584
>     org.apache.drill.exec.work.foreman.Foreman.run():272
>     java.util.concurrent.ThreadPoolExecutor.runWorker():1149
>     java.util.concurrent.ThreadPoolExecutor$Worker.run():624
>     java.lang.Thread.run():748
>   Caused By (java.lang.RuntimeException) ORC split generation failed with 
> exception: java.lang.ArrayIndexOutOfBoundsException: 6
>     org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.generateSplitsInfo():1579
>     org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.getSplits():1665
>     
> org.apache.drill.exec.store.hive.HiveMetadataProvider.lambda$splitInputWithUGI$2():258
>     java.security.AccessController.doPrivileged():-2
>     javax.security.auth.Subject.doAs():422
>     org.apache.hadoop.security.UserGroupInformation.doAs():1595
>     
> org.apache.drill.exec.store.hive.HiveMetadataProvider.splitInputWithUGI():247
>     
> org.apache.drill.exec.store.hive.HiveMetadataProvider.getPartitionInputSplits():156
>     
> org.apache.drill.exec.store.hive.HiveMetadataProvider.lambda$getInputSplits$0():177
>     java.util.stream.ReferencePipeline$7$1.accept():267
>     java.util.ArrayList$ArrayListSpliterator.forEachRemaining():1382
>     java.util.stream.AbstractPipeline.copyInto():481
>     java.util.stream.AbstractPipeline.wrapAndCopyInto():471
>     java.util.stream.ReduceOps$ReduceOp.evaluateSequential():708
>     java.util.stream.AbstractPipeline.evaluate():234
>     java.util.stream.ReferencePipeline.collect():499
>     org.apache.drill.exec.store.hive.HiveMetadataProvider.getInputSplits():178
>     org.apache.drill.exec.store.hive.HiveScan.getInputSplits():288
>     org.apache.drill.exec.store.hive.HiveScan.getMaxParallelizationWidth():197
>     org.apache.drill.exec.planner.physical.ScanPrule.onMatch():42
>     org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch():212
>     org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp():648
>     org.apache.calcite.tools.Programs$RuleSetProgram.run():339
>     
> org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform():425
>     
> org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel():455
>     org.apache.drill.exec.planner.sql.handlers.ExplainHandler.getPlan():68
>     org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan():145
>     org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan():83
>     org.apache.drill.exec.work.foreman.Foreman.runSQL():584
>     org.apache.drill.exec.work.foreman.Foreman.run():272
>     java.util.concurrent.ThreadPoolExecutor.runWorker():1149
>     java.util.concurrent.ThreadPoolExecutor$Worker.run():624
>     java.lang.Thread.run():748
>   Caused By (java.util.concurrent.ExecutionException) 
> java.lang.ArrayIndexOutOfBoundsException: 6
>     java.util.concurrent.FutureTask.report():122
>     java.util.concurrent.FutureTask.get():192
>     org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.generateSplitsInfo():1573
>     org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.getSplits():1665
>     
> org.apache.drill.exec.store.hive.HiveMetadataProvider.lambda$splitInputWithUGI$2():258
>     java.security.AccessController.doPrivileged():-2
>     javax.security.auth.Subject.doAs():422
>     org.apache.hadoop.security.UserGroupInformation.doAs():1595
>     
> org.apache.drill.exec.store.hive.HiveMetadataProvider.splitInputWithUGI():247
>     
> org.apache.drill.exec.store.hive.HiveMetadataProvider.getPartitionInputSplits():156
>     
> org.apache.drill.exec.store.hive.HiveMetadataProvider.lambda$getInputSplits$0():177
>     java.util.stream.ReferencePipeline$7$1.accept():267
>     java.util.ArrayList$ArrayListSpliterator.forEachRemaining():1382
>     java.util.stream.AbstractPipeline.copyInto():481
>     java.util.stream.AbstractPipeline.wrapAndCopyInto():471
>     java.util.stream.ReduceOps$ReduceOp.evaluateSequential():708
>     java.util.stream.AbstractPipeline.evaluate():234
>     java.util.stream.ReferencePipeline.collect():499
>     org.apache.drill.exec.store.hive.HiveMetadataProvider.getInputSplits():178
>     org.apache.drill.exec.store.hive.HiveScan.getInputSplits():288
>     org.apache.drill.exec.store.hive.HiveScan.getMaxParallelizationWidth():197
>     org.apache.drill.exec.planner.physical.ScanPrule.onMatch():42
>     org.apache.calcite.plan.volcano.VolcanoRuleCall.onMatch():212
>     org.apache.calcite.plan.volcano.VolcanoPlanner.findBestExp():648
>     org.apache.calcite.tools.Programs$RuleSetProgram.run():339
>     
> org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.transform():425
>     
> org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.convertToPrel():455
>     org.apache.drill.exec.planner.sql.handlers.ExplainHandler.getPlan():68
>     org.apache.drill.exec.planner.sql.DrillSqlWorker.getQueryPlan():145
>     org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan():83
>     org.apache.drill.exec.work.foreman.Foreman.runSQL():584
>     org.apache.drill.exec.work.foreman.Foreman.run():272
>     java.util.concurrent.ThreadPoolExecutor.runWorker():1149
>     java.util.concurrent.ThreadPoolExecutor$Worker.run():624
>     java.lang.Thread.run():748
>   Caused By (java.lang.ArrayIndexOutOfBoundsException) 6
>     org.apache.orc.OrcFile$WriterVersion.from():145
>     org.apache.orc.impl.OrcTail.getWriterVersion():74
>     org.apache.orc.impl.ReaderImpl.<init>():385
>     org.apache.hadoop.hive.ql.io.orc.ReaderImpl.<init>():62
>     org.apache.hadoop.hive.ql.io.orc.OrcFile.createReader():89
>     
> org.apache.hadoop.hive.ql.io.orc.OrcInputFormat$SplitGenerator.populateAndCacheStripeDetails():1380
>     
> org.apache.hadoop.hive.ql.io.orc.OrcInputFormat$SplitGenerator.callInternal():1266
>     
> org.apache.hadoop.hive.ql.io.orc.OrcInputFormat$SplitGenerator.access$2600():1064
>     
> org.apache.hadoop.hive.ql.io.orc.OrcInputFormat$SplitGenerator$1.run():1246
>     
> org.apache.hadoop.hive.ql.io.orc.OrcInputFormat$SplitGenerator$1.run():1243
>     java.security.AccessController.doPrivileged():-2
>     javax.security.auth.Subject.doAs():422
>     org.apache.hadoop.security.UserGroupInformation.doAs():1595
>     org.apache.hadoop.hive.ql.io.orc.OrcInputFormat$SplitGenerator.call():1243
>     org.apache.hadoop.hive.ql.io.orc.OrcInputFormat$SplitGenerator.call():1064
>     java.util.concurrent.FutureTask.run():266
>     java.util.concurrent.ThreadPoolExecutor.runWorker():1149
>     java.util.concurrent.ThreadPoolExecutor$Worker.run():624
>     java.lang.Thread.run():748
>       at 
> org.apache.drill.jdbc.impl.DrillCursor.nextRowInternally(DrillCursor.java:528)
>       at 
> org.apache.drill.jdbc.impl.DrillCursor.loadInitialSchema(DrillCursor.java:600)
>       at 
> org.apache.drill.jdbc.impl.DrillResultSetImpl.execute(DrillResultSetImpl.java:1288)
>       at 
> org.apache.drill.jdbc.impl.DrillResultSetImpl.execute(DrillResultSetImpl.java:61)
>       at 
> oadd.org.apache.calcite.avatica.AvaticaConnection$1.execute(AvaticaConnection.java:667)
>       at 
> org.apache.drill.jdbc.impl.DrillMetaImpl.prepareAndExecute(DrillMetaImpl.java:1109)
>       at 
> org.apache.drill.jdbc.impl.DrillMetaImpl.prepareAndExecute(DrillMetaImpl.java:1120)
>       at 
> oadd.org.apache.calcite.avatica.AvaticaConnection.prepareAndExecuteInternal(AvaticaConnection.java:675)
>       at 
> org.apache.drill.jdbc.impl.DrillConnectionImpl.prepareAndExecuteInternal(DrillConnectionImpl.java:196)
>       at 
> oadd.org.apache.calcite.avatica.AvaticaStatement.executeInternal(AvaticaStatement.java:156)
>       at 
> oadd.org.apache.calcite.avatica.AvaticaStatement.executeQuery(AvaticaStatement.java:227)
>       at 
> org.apache.drill.test.framework.DrillTestJdbc.executeQuery(DrillTestJdbc.java:210)
>       at 
> org.apache.drill.test.framework.DrillTestJdbc.run(DrillTestJdbc.java:115)
>       at 
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>       at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>       at java.lang.Thread.run(Thread.java:748)
> Caused by: oadd.org.apache.drill.common.exceptions.UserRemoteException: 
> SYSTEM ERROR: ArrayIndexOutOfBoundsException: 6
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to