[ 
https://issues.apache.org/jira/browse/DRILL-7785?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17187325#comment-17187325
 ] 

Abhishek Girish commented on DRILL-7785:
----------------------------------------

Another query:
{code}
Functional/hive/hive_storage/fileformats/orc/transactional/orc_table_partitioned_clustered_bucketed.sql
select * from hive_orc_transactional.orc_table_partitioned_clustered_bucketed

Exception:

java.sql.SQLException: EXECUTION_ERROR ERROR: 
java.lang.reflect.UndeclaredThrowableException

Failed to setup reader: HiveDefaultRecordReader
Fragment: 0:0

[Error Id: 7428d4f1-9968-481a-b0a3-5613d060448d on drill83:31010]

  (org.apache.drill.common.exceptions.ExecutionSetupException) 
java.lang.reflect.UndeclaredThrowableException
    
org.apache.drill.common.exceptions.ExecutionSetupException.fromThrowable():30
    org.apache.drill.exec.store.hive.readers.HiveDefaultRecordReader.setup():257
    org.apache.drill.exec.physical.impl.ScanBatch.getNextReaderIfHas():331
    org.apache.drill.exec.physical.impl.ScanBatch.internalNext():227
    org.apache.drill.exec.physical.impl.ScanBatch.next():298
    org.apache.drill.exec.record.AbstractRecordBatch.next():119
    org.apache.drill.exec.record.AbstractRecordBatch.next():111
    org.apache.drill.exec.record.AbstractUnaryRecordBatch.innerNext():59
    
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.innerNext():85
    org.apache.drill.exec.record.AbstractRecordBatch.next():170
    org.apache.drill.exec.physical.impl.BaseRootExec.next():103
    org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext():81
    org.apache.drill.exec.physical.impl.BaseRootExec.next():93
    org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():323
    org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():310
    java.security.AccessController.doPrivileged():-2
    javax.security.auth.Subject.doAs():422
    org.apache.hadoop.security.UserGroupInformation.doAs():1669
    org.apache.drill.exec.work.fragment.FragmentExecutor.run():310
    org.apache.drill.common.SelfCleaningRunnable.run():38
    java.util.concurrent.ThreadPoolExecutor.runWorker():1149
    java.util.concurrent.ThreadPoolExecutor$Worker.run():624
    java.lang.Thread.run():748
  Caused By (java.util.concurrent.ExecutionException) 
java.lang.reflect.UndeclaredThrowableException
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.AbstractFuture.getDoneValue():553
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.AbstractFuture.get():534
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.FluentFuture$TrustedFuture.get():88
    org.apache.drill.exec.store.hive.readers.HiveDefaultRecordReader.setup():252
    org.apache.drill.exec.physical.impl.ScanBatch.getNextReaderIfHas():331
    org.apache.drill.exec.physical.impl.ScanBatch.internalNext():227
    org.apache.drill.exec.physical.impl.ScanBatch.next():298
    org.apache.drill.exec.record.AbstractRecordBatch.next():119
    org.apache.drill.exec.record.AbstractRecordBatch.next():111
    org.apache.drill.exec.record.AbstractUnaryRecordBatch.innerNext():59
    
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.innerNext():85
    org.apache.drill.exec.record.AbstractRecordBatch.next():170
    org.apache.drill.exec.physical.impl.BaseRootExec.next():103
    org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext():81
    org.apache.drill.exec.physical.impl.BaseRootExec.next():93
    org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():323
    org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():310
    java.security.AccessController.doPrivileged():-2
    javax.security.auth.Subject.doAs():422
    org.apache.hadoop.security.UserGroupInformation.doAs():1669
    org.apache.drill.exec.work.fragment.FragmentExecutor.run():310
    org.apache.drill.common.SelfCleaningRunnable.run():38
    java.util.concurrent.ThreadPoolExecutor.runWorker():1149
    java.util.concurrent.ThreadPoolExecutor$Worker.run():624
    java.lang.Thread.run():748
  Caused By (java.lang.reflect.UndeclaredThrowableException) null
    org.apache.hadoop.security.UserGroupInformation.doAs():1687
    org.apache.drill.exec.ops.OperatorContextImpl$1.call():101
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly():125
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.InterruptibleTask.run():69
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask.run():78
    java.util.concurrent.ThreadPoolExecutor.runWorker():1149
    java.util.concurrent.ThreadPoolExecutor$Worker.run():624
    java.lang.Thread.run():748
  Caused By (org.apache.drill.common.exceptions.ExecutionSetupException) Failed 
to get o.a.hadoop.mapred.RecordReader from Hive InputFormat
    
org.apache.drill.exec.store.hive.readers.HiveDefaultRecordReader.initNextReader():509
    
org.apache.drill.exec.store.hive.readers.HiveDefaultRecordReader.lambda$getInitTask$1():367
    org.apache.drill.exec.ops.OperatorContextImpl$1$1.run():104
    java.security.AccessController.doPrivileged():-2
    javax.security.auth.Subject.doAs():422
    org.apache.hadoop.security.UserGroupInformation.doAs():1669
    org.apache.drill.exec.ops.OperatorContextImpl$1.call():101
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly():125
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.InterruptibleTask.run():69
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask.run():78
    java.util.concurrent.ThreadPoolExecutor.runWorker():1149
    java.util.concurrent.ThreadPoolExecutor$Worker.run():624
    java.lang.Thread.run():748
  Caused By (java.io.IOException) Open failed for file: 
/user/hive/warehouse/hive_orc_transactional.db/orc_table_partitioned_clustered_bucketed/dt=2015-01-01,
 error: Invalid argument (22)
    com.mapr.fs.MapRClientImpl.open():338
    com.mapr.fs.MapRFileSystem.open():1018
    org.apache.hadoop.fs.FileSystem.open():807
    org.apache.orc.impl.ReaderImpl.extractFileTail():492
    org.apache.orc.impl.ReaderImpl.<init>():378
    org.apache.hadoop.hive.ql.io.orc.ReaderImpl.<init>():63
    org.apache.hadoop.hive.ql.io.orc.OrcFile.createReader():90
    org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.getRecordReader():1848
    
org.apache.drill.exec.store.hive.readers.HiveDefaultRecordReader.initNextReader():505
    
org.apache.drill.exec.store.hive.readers.HiveDefaultRecordReader.lambda$getInitTask$1():367
    org.apache.drill.exec.ops.OperatorContextImpl$1$1.run():104
    java.security.AccessController.doPrivileged():-2
    javax.security.auth.Subject.doAs():422
    org.apache.hadoop.security.UserGroupInformation.doAs():1669
    org.apache.drill.exec.ops.OperatorContextImpl$1.call():101
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly():125
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.InterruptibleTask.run():69
    
org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask.run():78
    java.util.concurrent.ThreadPoolExecutor.runWorker():1149
    java.util.concurrent.ThreadPoolExecutor$Worker.run():624
    java.lang.Thread.run():748

        at 
org.apache.drill.jdbc.impl.DrillCursor.nextRowInternally(DrillCursor.java:534)
        at 
org.apache.drill.jdbc.impl.DrillCursor.loadInitialSchema(DrillCursor.java:599)
        at 
org.apache.drill.jdbc.impl.DrillResultSetImpl.execute(DrillResultSetImpl.java:1278)
        at 
org.apache.drill.jdbc.impl.DrillResultSetImpl.execute(DrillResultSetImpl.java:58)
        at 
oadd.org.apache.calcite.avatica.AvaticaConnection$1.execute(AvaticaConnection.java:667)
        at 
org.apache.drill.jdbc.impl.DrillMetaImpl.prepareAndExecute(DrillMetaImpl.java:1102)
        at 
org.apache.drill.jdbc.impl.DrillMetaImpl.prepareAndExecute(DrillMetaImpl.java:1113)
        at 
oadd.org.apache.calcite.avatica.AvaticaConnection.prepareAndExecuteInternal(AvaticaConnection.java:675)
        at 
org.apache.drill.jdbc.impl.DrillConnectionImpl.prepareAndExecuteInternal(DrillConnectionImpl.java:200)
        at 
oadd.org.apache.calcite.avatica.AvaticaStatement.executeInternal(AvaticaStatement.java:156)
        at 
oadd.org.apache.calcite.avatica.AvaticaStatement.execute(AvaticaStatement.java:217)
        at 
org.apache.drill.test.framework.DrillTestJdbc.executeQuery(DrillTestJdbc.java:210)
        at 
org.apache.drill.test.framework.DrillTestJdbc.run(DrillTestJdbc.java:115)
        at 
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
{code}

> Hive Clustered Bucketed ORC transactional table fails with 
> UndeclaredThrowableException
> ---------------------------------------------------------------------------------------
>
>                 Key: DRILL-7785
>                 URL: https://issues.apache.org/jira/browse/DRILL-7785
>             Project: Apache Drill
>          Issue Type: Bug
>          Components: Execution - Flow, Storage - Hive
>    Affects Versions: 1.18.0
>            Reporter: Abhishek Girish
>            Assignee: Vova Vysotskyi
>            Priority: Major
>
> Query: 
> {code}
> Functional/hive/hive_storage/fileformats/orc/transactional/orc_table_clustered_bucketed.sql
> select * from hive_orc_transactional.orc_table_clustered_bucketed
> {code}
> Exception:
> {code}
> java.sql.SQLException: EXECUTION_ERROR ERROR: 
> java.lang.reflect.UndeclaredThrowableException
> Failed to setup reader: HiveDefaultRecordReader
> Fragment: 0:0
> [Error Id: 323434cc-7bd2-4551-94d4-a5925f6a66af on drill80:31010]
>   (org.apache.drill.common.exceptions.ExecutionSetupException) 
> java.lang.reflect.UndeclaredThrowableException
>     
> org.apache.drill.common.exceptions.ExecutionSetupException.fromThrowable():30
>     
> org.apache.drill.exec.store.hive.readers.HiveDefaultRecordReader.setup():257
>     org.apache.drill.exec.physical.impl.ScanBatch.getNextReaderIfHas():331
>     org.apache.drill.exec.physical.impl.ScanBatch.internalNext():227
>     org.apache.drill.exec.physical.impl.ScanBatch.next():298
>     org.apache.drill.exec.record.AbstractRecordBatch.next():119
>     org.apache.drill.exec.record.AbstractRecordBatch.next():111
>     org.apache.drill.exec.record.AbstractUnaryRecordBatch.innerNext():59
>     
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.innerNext():85
>     org.apache.drill.exec.record.AbstractRecordBatch.next():170
>     org.apache.drill.exec.physical.impl.BaseRootExec.next():103
>     
> org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext():81
>     org.apache.drill.exec.physical.impl.BaseRootExec.next():93
>     org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():323
>     org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():310
>     java.security.AccessController.doPrivileged():-2
>     javax.security.auth.Subject.doAs():422
>     org.apache.hadoop.security.UserGroupInformation.doAs():1669
>     org.apache.drill.exec.work.fragment.FragmentExecutor.run():310
>     org.apache.drill.common.SelfCleaningRunnable.run():38
>     java.util.concurrent.ThreadPoolExecutor.runWorker():1149
>     java.util.concurrent.ThreadPoolExecutor$Worker.run():624
>     java.lang.Thread.run():748
>   Caused By (java.util.concurrent.ExecutionException) 
> java.lang.reflect.UndeclaredThrowableException
>     
> org.apache.drill.shaded.guava.com.google.common.util.concurrent.AbstractFuture.getDoneValue():553
>     
> org.apache.drill.shaded.guava.com.google.common.util.concurrent.AbstractFuture.get():534
>     
> org.apache.drill.shaded.guava.com.google.common.util.concurrent.FluentFuture$TrustedFuture.get():88
>     
> org.apache.drill.exec.store.hive.readers.HiveDefaultRecordReader.setup():252
>     org.apache.drill.exec.physical.impl.ScanBatch.getNextReaderIfHas():331
>     org.apache.drill.exec.physical.impl.ScanBatch.internalNext():227
>     org.apache.drill.exec.physical.impl.ScanBatch.next():298
>     org.apache.drill.exec.record.AbstractRecordBatch.next():119
>     org.apache.drill.exec.record.AbstractRecordBatch.next():111
>     org.apache.drill.exec.record.AbstractUnaryRecordBatch.innerNext():59
>     
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.innerNext():85
>     org.apache.drill.exec.record.AbstractRecordBatch.next():170
>     org.apache.drill.exec.physical.impl.BaseRootExec.next():103
>     
> org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext():81
>     org.apache.drill.exec.physical.impl.BaseRootExec.next():93
>     org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():323
>     org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():310
>     java.security.AccessController.doPrivileged():-2
>     javax.security.auth.Subject.doAs():422
>     org.apache.hadoop.security.UserGroupInformation.doAs():1669
>     org.apache.drill.exec.work.fragment.FragmentExecutor.run():310
>     org.apache.drill.common.SelfCleaningRunnable.run():38
>     java.util.concurrent.ThreadPoolExecutor.runWorker():1149
>     java.util.concurrent.ThreadPoolExecutor$Worker.run():624
>     java.lang.Thread.run():748
>   Caused By (java.lang.reflect.UndeclaredThrowableException) null
>     org.apache.hadoop.security.UserGroupInformation.doAs():1687
>     org.apache.drill.exec.ops.OperatorContextImpl$1.call():101
>     
> org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly():125
>     
> org.apache.drill.shaded.guava.com.google.common.util.concurrent.InterruptibleTask.run():69
>     
> org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask.run():78
>     java.util.concurrent.ThreadPoolExecutor.runWorker():1149
>     java.util.concurrent.ThreadPoolExecutor$Worker.run():624
>     java.lang.Thread.run():748
>   Caused By (org.apache.drill.common.exceptions.ExecutionSetupException) 
> Failed to get o.a.hadoop.mapred.RecordReader from Hive InputFormat
>     
> org.apache.drill.exec.store.hive.readers.HiveDefaultRecordReader.initNextReader():509
>     
> org.apache.drill.exec.store.hive.readers.HiveDefaultRecordReader.lambda$getInitTask$1():367
>     org.apache.drill.exec.ops.OperatorContextImpl$1$1.run():104
>     java.security.AccessController.doPrivileged():-2
>     javax.security.auth.Subject.doAs():422
>     org.apache.hadoop.security.UserGroupInformation.doAs():1669
>     org.apache.drill.exec.ops.OperatorContextImpl$1.call():101
>     
> org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly():125
>     
> org.apache.drill.shaded.guava.com.google.common.util.concurrent.InterruptibleTask.run():69
>     
> org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask.run():78
>     java.util.concurrent.ThreadPoolExecutor.runWorker():1149
>     java.util.concurrent.ThreadPoolExecutor$Worker.run():624
>     java.lang.Thread.run():748
>   Caused By (java.io.IOException) Open failed for file: 
> /user/hive/warehouse/hive_orc_transactional.db/orc_table_clustered_bucketed, 
> error: Invalid argument (22)
>     com.mapr.fs.MapRClientImpl.open():338
>     com.mapr.fs.MapRFileSystem.open():1018
>     org.apache.hadoop.fs.FileSystem.open():807
>     org.apache.orc.impl.ReaderImpl.extractFileTail():492
>     org.apache.orc.impl.ReaderImpl.<init>():378
>     org.apache.hadoop.hive.ql.io.orc.ReaderImpl.<init>():63
>     org.apache.hadoop.hive.ql.io.orc.OrcFile.createReader():90
>     org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.getRecordReader():1848
>     
> org.apache.drill.exec.store.hive.readers.HiveDefaultRecordReader.initNextReader():505
>     
> org.apache.drill.exec.store.hive.readers.HiveDefaultRecordReader.lambda$getInitTask$1():367
>     org.apache.drill.exec.ops.OperatorContextImpl$1$1.run():104
>     java.security.AccessController.doPrivileged():-2
>     javax.security.auth.Subject.doAs():422
>     org.apache.hadoop.security.UserGroupInformation.doAs():1669
>     org.apache.drill.exec.ops.OperatorContextImpl$1.call():101
>     
> org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask$TrustedFutureInterruptibleTask.runInterruptibly():125
>     
> org.apache.drill.shaded.guava.com.google.common.util.concurrent.InterruptibleTask.run():69
>     
> org.apache.drill.shaded.guava.com.google.common.util.concurrent.TrustedListenableFutureTask.run():78
>     java.util.concurrent.ThreadPoolExecutor.runWorker():1149
>     java.util.concurrent.ThreadPoolExecutor$Worker.run():624
>     java.lang.Thread.run():748
> {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to