[ 
https://issues.apache.org/jira/browse/HUDI-1568?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Gary Li closed HUDI-1568.
-------------------------

> Issues w/ spark_bundle_2.12 : NoSuchMethodError: 'void 
> org.apache.spark.sql.execution.datasources.InMemoryFileIndex.<init>
> --------------------------------------------------------------------------------------------------------------------------
>
>                 Key: HUDI-1568
>                 URL: https://issues.apache.org/jira/browse/HUDI-1568
>             Project: Apache Hudi
>          Issue Type: Bug
>          Components: Spark Integration
>            Reporter: sivabalan narayanan
>            Assignee: sivabalan narayanan
>            Priority: Major
>              Labels: sev:critical, user-support-issues
>             Fix For: 0.8.0
>
>
> I tried Quick Start with hudi-spark-bundle_2.12 and it fails w/
> NoSuchMethodError: 'void 
> org.apache.spark.sql.execution.datasources.InMemoryFileIndex.<init> during 
> read.
> This is happening only for MOR and not for COW. 
> Command used to invoke spark shell
>   
> spark-3.0.1-bin-hadoop2.7/bin/spark-shell \
>  --packages 
> org.apache.hudi:hudi-spark-bundle_2.12:0.7.0,org.apache.spark:spark-avro_2.12:3.0.1
>  \
>  --conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
>   my local spark is spark-3.0.1.
> {code:java}
> . // usual steps as given in quick start utils.
> .
> .
> scala> df.write.format("hudi").
>      |   options(getQuickstartWriteConfigs).
>      |   option(PRECOMBINE_FIELD_OPT_KEY, "ts").
>      |   option(RECORDKEY_FIELD_OPT_KEY, "uuid").
>      |   option(PARTITIONPATH_FIELD_OPT_KEY, "partitionpath").
>      |   option(TABLE_NAME, tableName).
>      |   option("hoodie.datasource.write.table.type","MERGE_ON_READ")
>      |   mode(Overwrite).
>      |   save(basePath)
> val tripsSnapshotDF = spark.
>   read.
>   format("hudi").
>   load(basePath + "/*/*/*/*")
> java.lang.NoSuchMethodError: 'void 
> org.apache.spark.sql.execution.datasources.InMemoryFileIndex.<init>(org.apache.spark.sql.SparkSession,
>  scala.collection.Seq, scala.collection.immutable.Map, scala.Option, 
> org.apache.spark.sql.execution.datasources.FileStatusCache)'
>   at 
> org.apache.hudi.HoodieSparkUtils$.createInMemoryFileIndex(HoodieSparkUtils.scala:89)
>   at 
> org.apache.hudi.MergeOnReadSnapshotRelation.buildFileIndex(MergeOnReadSnapshotRelation.scala:127)
>   at 
> org.apache.hudi.MergeOnReadSnapshotRelation.<init>(MergeOnReadSnapshotRelation.scala:72)
>   at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:89)
>   at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:53)
>   at 
> org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:344)
>   at 
> org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:297)
>   at 
> org.apache.spark.sql.DataFrameReader.$anonfun$load$2(DataFrameReader.scala:286)
>   at scala.Option.getOrElse(Option.scala:189)
>   at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:286)
>   at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:232)
>   ... 62 elided
> {code}
>  
>  
>  



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to