[ https://issues.apache.org/jira/browse/HIVE-7507?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14075926#comment-14075926 ]
Navis commented on HIVE-7507: ----------------------------- Any idea to fix this? IMO, we can start with preventing renaming column of partition. Current semantic analyzer uses last modified serde to acquire column name/types and it's not true in cases of above example. > Altering columns in hive results in classcast exceptions > -------------------------------------------------------- > > Key: HIVE-7507 > URL: https://issues.apache.org/jira/browse/HIVE-7507 > Project: Hive > Issue Type: Bug > Components: Database/Schema > Affects Versions: 0.13.1 > Reporter: Vikram Dixit K > > {code} > set hive.enforce.bucketing=true; > set hive.enforce.sorting = true; > set hive.optimize.bucketingsorting=false; > set hive.auto.convert.join.noconditionaltask.size=10000; > create table test (key int, value string) partitioned by (p int) clustered by > (key) into 2 buckets stored as textfile; > create table test1 (key int, value string) stored as textfile; > insert into table test partition (p=1) select * from src; > alter table test set fileformat orc; > insert into table test partition (p=2) select * from src; > insert into table test1 select * from src; > alter table test CHANGE key k1 int after value; > insert into table test partition (p=3) select value, key from src; > set hive.auto.convert.join = true; > set hive.auto.convert.join.noconditionaltask = true; > explain > select test.k1, test.value from test join test1 on (test.k1 = test1.key) > order by test.k1; > select test.k1, test.value from test join test1 on (test.k1 = test1.key) > order by test.k1; > {code} > {code} > java.lang.Exception: java.io.IOException: java.io.IOException: > java.lang.ClassCastException: org.apache.hadoop.io.IntWritable cannot be cast > to org.apache.hadoop.io.Text > at > org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462) > at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522) > Caused by: java.io.IOException: java.io.IOException: > java.lang.ClassCastException: org.apache.hadoop.io.IntWritable cannot be cast > to org.apache.hadoop.io.Text > at > org.apache.hadoop.hive.io.HiveIOExceptionHandlerChain.handleRecordReaderNextException(HiveIOExceptionHandlerChain.java:121) > at > org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil.handleRecordReaderNextException(HiveIOExceptionHandlerUtil.java:77) > at > org.apache.hadoop.hive.shims.HadoopShimsSecure$CombineFileRecordReader.doNextWithExceptionHandler(HadoopShimsSecure.java:255) > at > org.apache.hadoop.hive.shims.HadoopShimsSecure$CombineFileRecordReader.next(HadoopShimsSecure.java:170) > at > org.apache.hadoop.mapred.MapTask$TrackedRecordReader.moveToNext(MapTask.java:198) > at > org.apache.hadoop.mapred.MapTask$TrackedRecordReader.next(MapTask.java:184) > at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:52) > at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:430) > at org.apache.hadoop.mapred.MapTask.run(MapTask.java:342) > at > org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243) > at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:439) > at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303) > at java.util.concurrent.FutureTask.run(FutureTask.java:138) > at > java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918) > at java.lang.Thread.run(Thread.java:695) > Caused by: java.io.IOException: java.lang.ClassCastException: > org.apache.hadoop.io.IntWritable cannot be cast to org.apache.hadoop.io.Text > at > org.apache.hadoop.hive.io.HiveIOExceptionHandlerChain.handleRecordReaderNextException(HiveIOExceptionHandlerChain.java:121) > at > org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil.handleRecordReaderNextException(HiveIOExceptionHandlerUtil.java:77) > at > org.apache.hadoop.hive.ql.io.HiveContextAwareRecordReader.doNext(HiveContextAwareRecordReader.java:344) > at > org.apache.hadoop.hive.ql.io.CombineHiveRecordReader.doNext(CombineHiveRecordReader.java:101) > at > org.apache.hadoop.hive.ql.io.CombineHiveRecordReader.doNext(CombineHiveRecordReader.java:41) > at > org.apache.hadoop.hive.ql.io.HiveContextAwareRecordReader.next(HiveContextAwareRecordReader.java:122) > at > org.apache.hadoop.hive.shims.HadoopShimsSecure$CombineFileRecordReader.doNextWithExceptionHandler(HadoopShimsSecure.java:253) > ... 13 more > Caused by: java.lang.ClassCastException: org.apache.hadoop.io.IntWritable > cannot be cast to org.apache.hadoop.io.Text > at > org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl$StringDictionaryTreeReader.next(RecordReaderImpl.java:1596) > at > org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl$StringTreeReader.next(RecordReaderImpl.java:1350) > at > org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl$StructTreeReader.next(RecordReaderImpl.java:1792) > at > org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl.next(RecordReaderImpl.java:3007) > at > org.apache.hadoop.hive.ql.io.orc.OrcInputFormat$OrcRecordReader.next(OrcInputFormat.java:153) > at > org.apache.hadoop.hive.ql.io.orc.OrcInputFormat$OrcRecordReader.next(OrcInputFormat.java:127) > {code} -- This message was sent by Atlassian JIRA (v6.2#6252)