[ 
https://issues.apache.org/jira/browse/HIVE-5970?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13845144#comment-13845144
 ] 

Jakob Sultan Ericsson commented on HIVE-5970:
---------------------------------------------

Also running into same problem. We have setup a new test cluster (Hortonworks 
2.0) where we move some test data (roughly 6 000 000 000 rows) and try to move 
into a more optimized format (orc with buckets in a partitioned table).

Not sure if this will help you but here is the stacktrace. Do you want some 
more information?

{noformat}
INSERT OVERWRITE TABLE hellotestdataend2 PARTITION(dt) SELECT * FROM 
event.hellotestdataend2 SORT BY dt, someid, msts, userid
{noformat}

{noformat}
at javax.security.auth.Subject.doAs(Subject.java:415)
at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1491)
at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:157)
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error 
while processing row (tag=0) 
{"key":{"reducesinkkey0":10017,"reducesinkkey1":1384880695473,"reducesinkkey2":1242212467},"value":{"_col0":1384880695473,"_col1":"10017","_col2":1242212467,"_col3":"ASPV6f3f82454d0df89704c74f7f0884fa15","_col4":1384880654,"_col5":19,"_col6":15,"_col7":21700,"_col8":0,"_col9":2552,"_col10":100,"_col11":1384880624,"_col12":0,"_col13":"2013-11-19"}}
at org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:258)
... 7 more
Caused by: java.lang.ArrayIndexOutOfBoundsException: 3
at 
org.apache.hadoop.hive.ql.io.orc.RunLengthIntegerWriterV2.preparePatchedBlob(RunLengthIntegerWriterV2.java:593)
at 
org.apache.hadoop.hive.ql.io.orc.RunLengthIntegerWriterV2.determineEncoding(RunLengthIntegerWriterV2.java:541)
at 
org.apache.hadoop.hive.ql.io.orc.RunLengthIntegerWriterV2.write(RunLengthIntegerWriterV2.java:746)
at 
org.apache.hadoop.hive.ql.io.orc.WriterImpl$IntegerTreeWriter.write(WriterImpl.java:744)
at 
org.apache.hadoop.hive.ql.io.orc.WriterImpl$StructTreeWriter.write(WriterImpl.java:1320)
at org.apache.hadoop.hive.ql.io.orc.WriterImpl.addRow(WriterImpl.java:1849)
at 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat$OrcRecordWriter.write(OrcOutputFormat.java:75)
at 
org.apache.hadoop.hive.ql.exec.FileSinkOperator.processOp(FileSinkOperator.java:638)
at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:501)
at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:842)
at 
org.apache.hadoop.hive.ql.exec.SelectOperator.processOp(SelectOperator.java:88)
at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:501)
at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:842)
at 
org.apache.hadoop.hive.ql.exec.ExtractOperator.processOp(ExtractOperator.java:45)
at org.apache.hadoop.hive.ql.exec.Operator.process(Operator.java:501)
at org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:249)
... 7 more
{noformat}

> ArrayIndexOutOfBoundsException in RunLengthIntegerReaderV2.java
> ---------------------------------------------------------------
>
>                 Key: HIVE-5970
>                 URL: https://issues.apache.org/jira/browse/HIVE-5970
>             Project: Hive
>          Issue Type: Bug
>          Components: File Formats
>    Affects Versions: 0.12.0
>            Reporter: Eric Chu
>            Priority: Critical
>              Labels: orcfile
>         Attachments: test_data
>
>
> A workload involving ORC tables starts getting the following 
> ArrayIndexOutOfBoundsException AFTER the upgrade to Hive 0.12. The file is 
> added as part of HIVE-4123. 
> 2013-12-04 14:42:08,537 ERROR 
> cause:java.io.IOException: java.io.IOException: 
> java.lang.ArrayIndexOutOfBoundsException: 0
> 2013-12-04 14:42:08,537 WARN org.apache.hadoop.mapred.Child: Error running 
> child
> java.io.IOException: java.io.IOException: 
> java.lang.ArrayIndexOutOfBoundsException: 0
>         at 
> org.apache.hadoop.hive.io.HiveIOExceptionHandlerChain.handleRecordReaderNextException(HiveIOExceptionHandlerChain.java:121)
>         at 
> org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil.handleRecordReaderNextException(HiveIOExceptionHandlerUtil.java:77)
>         at 
> org.apache.hadoop.hive.shims.HadoopShimsSecure$CombineFileRecordReader.doNextWithExceptionHandler(HadoopShimsSecure.java:304)
>         at 
> org.apache.hadoop.hive.shims.HadoopShimsSecure$CombineFileRecordReader.next(HadoopShimsSecure.java:220)
>         at 
> org.apache.hadoop.mapred.MapTask$TrackedRecordReader.moveToNext(MapTask.java:215)
>         at 
> org.apache.hadoop.mapred.MapTask$TrackedRecordReader.next(MapTask.java:200)
>         at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:48)
>         at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:417)
>         at org.apache.hadoop.mapred.MapTask.run(MapTask.java:332)
>         at org.apache.hadoop.mapred.Child$4.run(Child.java:268)
>         at java.security.AccessController.doPrivileged(Native Method)
>         at javax.security.auth.Subject.doAs(Subject.java:396)
>         at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1408)
>         at org.apache.hadoop.mapred.Child.main(Child.java:262)
> Caused by: java.io.IOException: java.lang.ArrayIndexOutOfBoundsException: 0
>         at 
> org.apache.hadoop.hive.io.HiveIOExceptionHandlerChain.handleRecordReaderNextException(HiveIOExceptionHandlerChain.java:121)
>         at 
> org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil.handleRecordReaderNextException(HiveIOExceptionHandlerUtil.java:77)
>         at 
> org.apache.hadoop.hive.ql.io.HiveContextAwareRecordReader.doNext(HiveContextAwareRecordReader.java:276)
>         at 
> org.apache.hadoop.hive.ql.io.CombineHiveRecordReader.doNext(CombineHiveRecordReader.java:101)
>         at 
> org.apache.hadoop.hive.ql.io.CombineHiveRecordReader.doNext(CombineHiveRecordReader.java:41)
>         at 
> org.apache.hadoop.hive.ql.io.HiveContextAwareRecordReader.next(HiveContextAwareRecordReader.java:108)
>         at 
> org.apache.hadoop.hive.shims.HadoopShimsSecure$CombineFileRecordReader.doNextWithExceptionHandler(HadoopShimsSecure.java:302)
>         ... 11 more
> Caused by: java.lang.ArrayIndexOutOfBoundsException: 0
>         at 
> org.apache.hadoop.hive.ql.io.orc.RunLengthIntegerReaderV2.readPatchedBaseValues(RunLengthIntegerReaderV2.java:171)
>         at 
> org.apache.hadoop.hive.ql.io.orc.RunLengthIntegerReaderV2.readValues(RunLengthIntegerReaderV2.java:54)
>         at 
> org.apache.hadoop.hive.ql.io.orc.RunLengthIntegerReaderV2.next(RunLengthIntegerReaderV2.java:287)
>         at 
> org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl$LongTreeReader.next(RecordReaderImpl.java:473)
>         at 
> org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl$StructTreeReader.next(RecordReaderImpl.java:1157)
>         at 
> org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl.next(RecordReaderImpl.java:2196)
>         at 
> org.apache.hadoop.hive.ql.io.orc.OrcInputFormat$OrcRecordReader.next(OrcInputFormat.java:129)
>         at 
> org.apache.hadoop.hive.ql.io.orc.OrcInputFormat$OrcRecordReader.next(OrcInputFormat.java:80)
>         at 
> org.apache.hadoop.hive.ql.io.HiveContextAwareRecordReader.doNext(HiveContextAwareRecordReader.java:274)
>         ... 15 more



--
This message was sent by Atlassian JIRA
(v6.1.4#6159)

Reply via email to