[ 
https://issues.apache.org/jira/browse/KYLIN-5210?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

pengfei.zhan updated KYLIN-5210:
--------------------------------
    Affects Version/s: v4.0.1

> org.apache.hadoop.hive.ql.exec.Utilities.copyTableJobPropertiesToConf(Lorg/apache/hadoop/hive/ql/plan/TableDesc;Lorg/apache/hadoop/conf/
> ----------------------------------------------------------------------------------------------------------------------------------------
>
>                 Key: KYLIN-5210
>                 URL: https://issues.apache.org/jira/browse/KYLIN-5210
>             Project: Kylin
>          Issue Type: Bug
>    Affects Versions: v4.0.1
>            Reporter: huang song
>            Priority: Major
>         Attachments: screenshot-1.png, screenshot-2.png
>
>
> Exception in thread "dag-scheduler-event-loop" java.lang.NoSuchMethodError: 
> org.apache.hadoop.hive.ql.exec.Utilities.copyTableJobPropertiesToConf(Lorg/apache/hadoop/hive/ql/plan/TableDesc;Lorg/apache/hadoop/conf/Configuration;)V
>  at 
> org.apache.spark.sql.hive.HadoopTableReader$.initializeLocalJobConfFunc(TableReader.scala:399)
>  at 
> org.apache.spark.sql.hive.HadoopTableReader.$anonfun$createOldHadoopRDD$1(TableReader.scala:314)
>  at 
> org.apache.spark.sql.hive.HadoopTableReader.$anonfun$createOldHadoopRDD$1$adapted(TableReader.scala:314)
>  at org.apache.spark.rdd.HadoopRDD.$anonfun$getJobConf$8(HadoopRDD.scala:181)
>  at 
> org.apache.spark.rdd.HadoopRDD.$anonfun$getJobConf$8$adapted(HadoopRDD.scala:181)
>  at scala.Option.foreach(Option.scala:407)
>  at org.apache.spark.rdd.HadoopRDD.$anonfun$getJobConf$6(HadoopRDD.scala:181)
>  at scala.Option.getOrElse(Option.scala:189)
>  at org.apache.spark.rdd.HadoopRDD.getJobConf(HadoopRDD.scala:178)
>  at org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:201)
>  at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300)
>  at scala.Option.getOrElse(Option.scala:189)
>  at org.apache.spark.rdd.RDD.partitions(RDD.scala:296)
>  at 
> org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
>  at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300)
>  at scala.Option.getOrElse(Option.scala:189)
>  at org.apache.spark.rdd.RDD.partitions(RDD.scala:296)
>  at 
> org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
>  at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300)
>  at scala.Option.getOrElse(Option.scala:189)
>  at org.apache.spark.rdd.RDD.partitions(RDD.scala:296)
>  at 
> org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
>  at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:300)
>  at scala.Option.getOrElse(Option.scala:189)
>  at org.apache.spark.rdd.RDD.partitions(RDD.scala:296)
>  at 
> org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:49)
>  at org.apache.spark



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to