[ 
https://issues.apache.org/jira/browse/HUDI-2183?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

min.jia updated HUDI-2183:
--------------------------
    Description: 
Hive synchronization meta,spark write hudi

spark verison   2.4.6

hudi:0.8.0

hoodie.datasource.hive_sync.use_jdbc = false

 
{code:java}
//代码占位符
{code}
User class threw exception: java.lang.NoClassDefFoundError: 
org/json/JSONException at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeCreateTable(SemanticAnalyzer.java:10847)
 at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genResolvedParseTree(SemanticAnalyzer.java:10047)
 at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:10128)
 at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:10013)
 at 
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:227)
 at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:424) at 
org.apache.hadoop.hive.ql.Driver.compile(Driver.java:308) at 
org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1122) at 
org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1170) at 
org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059) at 
org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049) at 
org.apache.hudi.hive.HoodieHiveClient.updateHiveSQLs(HoodieHiveClient.java:401) 
at 
org.apache.hudi.hive.HoodieHiveClient.updateHiveSQLUsingHiveDriver(HoodieHiveClient.java:384)
 at 
org.apache.hudi.hive.HoodieHiveClient.updateHiveSQL(HoodieHiveClient.java:374) 
at org.apache.hudi.hive.HoodieHiveClient.createTable(HoodieHiveClient.java:263) 
at org.apache.hudi.hive.HiveSyncTool.syncSchema(HiveSyncTool.java:199) at 
org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:154) at 
org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:108) at 
org.apache.hudi.HoodieSparkSqlWriter$.org$apache$hudi$HoodieSparkSqlWriter$$syncHive(HoodieSparkSqlWriter.scala:391)
 at 
org.apache.hudi.HoodieSparkSqlWriter$$anonfun$metaSync$2.apply(HoodieSparkSqlWriter.scala:440)
 at 
org.apache.hudi.HoodieSparkSqlWriter$$anonfun$metaSync$2.apply(HoodieSparkSqlWriter.scala:436)
 at scala.collection.mutable.HashSet.foreach(HashSet.scala:78) at 
org.apache.hudi.HoodieSparkSqlWriter$.metaSync(HoodieSparkSqlWriter.scala:436) 
at 
org.apache.hudi.HoodieSparkSqlWriter$.commitAndPerformPostOperations(HoodieSparkSqlWriter.scala:497)
 at org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:222) 
at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:145) at 
org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:86)
 at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at 
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
 at 
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80) at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
 at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
 at 
org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
 at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
 at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
 at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:676) 
at 
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:285) 
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:271) at 
org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:229) at 
com.dmall.data.ingest.launcher.HudiHiveAllIngestApp$.syncTable(HudiHiveAllIngestApp.scala:79)
 at 
com.dmall.data.ingest.launcher.HudiHiveAllIngestApp$.main(HudiHiveAllIngestApp.scala:43)
 at 
com.dmall.data.ingest.launcher.HudiHiveAllIngestApp.main(HudiHiveAllIngestApp.scala)
 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 at java.lang.reflect.Method.invoke(Method.java:498) at 
org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:684)
 Caused by: java.lang.ClassNotFoundException: org.json.JSONException at 
java.net.URLClassLoader.findClass(URLClassLoader.java:381) at 
java.lang.ClassLoader.loadClass(ClassLoader.java:424) at 
sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349) at 
java.lang.ClassLoader.loadClass(ClassLoader.java:357) ... 55 more

  was:
Hive synchronization meta,spark write hudi

spark verison   2.4.6

hudi:0.8.0

hoodie.datasource.hive_sync.use_jdbc = false
{noformat}
这里的文字没有任何格式修饰{noformat}
val writer = convertDF.write.format("hudi") 
.option(DataSourceWriteOptions.OPERATION_OPT_KEY, "bulk_insert") 
.option(DataSourceWriteOptions.PRECOMBINE_FIELD_OPT_KEY, "_unidata_version") 
.option(DataSourceWriteOptions.RECORDKEY_FIELD_OPT_KEY, "unidata_vkey_id") 
.option(HoodieWriteConfig.TABLE_NAME, parameter.hudiTable) 
.option(DataSourceWriteOptions.META_SYNC_ENABLED_OPT_KEY, "true") 
.option(DataSourceWriteOptions.HIVE_DATABASE_OPT_KEY, parameter.hudiDb) 
.option(DataSourceWriteOptions.HIVE_TABLE_OPT_KEY, parameter.hudiTable) 
.option(DataSourceWriteOptions.HIVE_USE_JDBC_OPT_KEY, "false") 
.option(HoodieWriteConfig.BULKINSERT_PARALLELISM, 12)

 
{code:java}
//代码占位符
{code}
User class threw exception: java.lang.NoClassDefFoundError: 
org/json/JSONException at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeCreateTable(SemanticAnalyzer.java:10847)
 at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genResolvedParseTree(SemanticAnalyzer.java:10047)
 at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:10128)
 at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:10013)
 at 
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:227)
 at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:424) at 
org.apache.hadoop.hive.ql.Driver.compile(Driver.java:308) at 
org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1122) at 
org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1170) at 
org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059) at 
org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049) at 
org.apache.hudi.hive.HoodieHiveClient.updateHiveSQLs(HoodieHiveClient.java:401) 
at 
org.apache.hudi.hive.HoodieHiveClient.updateHiveSQLUsingHiveDriver(HoodieHiveClient.java:384)
 at 
org.apache.hudi.hive.HoodieHiveClient.updateHiveSQL(HoodieHiveClient.java:374) 
at org.apache.hudi.hive.HoodieHiveClient.createTable(HoodieHiveClient.java:263) 
at org.apache.hudi.hive.HiveSyncTool.syncSchema(HiveSyncTool.java:199) at 
org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:154) at 
org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:108) at 
org.apache.hudi.HoodieSparkSqlWriter$.org$apache$hudi$HoodieSparkSqlWriter$$syncHive(HoodieSparkSqlWriter.scala:391)
 at 
org.apache.hudi.HoodieSparkSqlWriter$$anonfun$metaSync$2.apply(HoodieSparkSqlWriter.scala:440)
 at 
org.apache.hudi.HoodieSparkSqlWriter$$anonfun$metaSync$2.apply(HoodieSparkSqlWriter.scala:436)
 at scala.collection.mutable.HashSet.foreach(HashSet.scala:78) at 
org.apache.hudi.HoodieSparkSqlWriter$.metaSync(HoodieSparkSqlWriter.scala:436) 
at 
org.apache.hudi.HoodieSparkSqlWriter$.commitAndPerformPostOperations(HoodieSparkSqlWriter.scala:497)
 at org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:222) 
at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:145) at 
org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:86)
 at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at 
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
 at 
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80) at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
 at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
 at 
org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
 at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
 at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
 at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:676) 
at 
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:285) 
at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:271) at 
org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:229) at 
com.dmall.data.ingest.launcher.HudiHiveAllIngestApp$.syncTable(HudiHiveAllIngestApp.scala:79)
 at 
com.dmall.data.ingest.launcher.HudiHiveAllIngestApp$.main(HudiHiveAllIngestApp.scala:43)
 at 
com.dmall.data.ingest.launcher.HudiHiveAllIngestApp.main(HudiHiveAllIngestApp.scala)
 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 at java.lang.reflect.Method.invoke(Method.java:498) at 
org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:684)
 Caused by: java.lang.ClassNotFoundException: org.json.JSONException at 
java.net.URLClassLoader.findClass(URLClassLoader.java:381) at 
java.lang.ClassLoader.loadClass(ClassLoader.java:424) at 
sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349) at 
java.lang.ClassLoader.loadClass(ClassLoader.java:357) ... 55 more


> HiveSyncTool  java.lang.NoClassDefFoundError: org/json/JSONException
> --------------------------------------------------------------------
>
>                 Key: HUDI-2183
>                 URL: https://issues.apache.org/jira/browse/HUDI-2183
>             Project: Apache Hudi
>          Issue Type: Bug
>          Components: Spark Integration
>            Reporter: min.jia
>            Priority: Major
>             Fix For: 0.8.0
>
>
> Hive synchronization meta,spark write hudi
> spark verison   2.4.6
> hudi:0.8.0
> hoodie.datasource.hive_sync.use_jdbc = false
>  
> {code:java}
> //代码占位符
> {code}
> User class threw exception: java.lang.NoClassDefFoundError: 
> org/json/JSONException at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeCreateTable(SemanticAnalyzer.java:10847)
>  at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genResolvedParseTree(SemanticAnalyzer.java:10047)
>  at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:10128)
>  at 
> org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.analyzeInternal(SemanticAnalyzer.java:10013)
>  at 
> org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:227)
>  at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:424) at 
> org.apache.hadoop.hive.ql.Driver.compile(Driver.java:308) at 
> org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1122) at 
> org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1170) at 
> org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059) at 
> org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049) at 
> org.apache.hudi.hive.HoodieHiveClient.updateHiveSQLs(HoodieHiveClient.java:401)
>  at 
> org.apache.hudi.hive.HoodieHiveClient.updateHiveSQLUsingHiveDriver(HoodieHiveClient.java:384)
>  at 
> org.apache.hudi.hive.HoodieHiveClient.updateHiveSQL(HoodieHiveClient.java:374)
>  at 
> org.apache.hudi.hive.HoodieHiveClient.createTable(HoodieHiveClient.java:263) 
> at org.apache.hudi.hive.HiveSyncTool.syncSchema(HiveSyncTool.java:199) at 
> org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:154) at 
> org.apache.hudi.hive.HiveSyncTool.syncHoodieTable(HiveSyncTool.java:108) at 
> org.apache.hudi.HoodieSparkSqlWriter$.org$apache$hudi$HoodieSparkSqlWriter$$syncHive(HoodieSparkSqlWriter.scala:391)
>  at 
> org.apache.hudi.HoodieSparkSqlWriter$$anonfun$metaSync$2.apply(HoodieSparkSqlWriter.scala:440)
>  at 
> org.apache.hudi.HoodieSparkSqlWriter$$anonfun$metaSync$2.apply(HoodieSparkSqlWriter.scala:436)
>  at scala.collection.mutable.HashSet.foreach(HashSet.scala:78) at 
> org.apache.hudi.HoodieSparkSqlWriter$.metaSync(HoodieSparkSqlWriter.scala:436)
>  at 
> org.apache.hudi.HoodieSparkSqlWriter$.commitAndPerformPostOperations(HoodieSparkSqlWriter.scala:497)
>  at 
> org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:222) 
> at org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:145) at 
> org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:45)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:86)
>  at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
>  at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
>  at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at 
> org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
>  at 
> org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80) 
> at 
> org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
>  at 
> org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
>  at 
> org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
>  at 
> org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:676) at 
> org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:285)
>  at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:271) at 
> org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:229) at 
> com.dmall.data.ingest.launcher.HudiHiveAllIngestApp$.syncTable(HudiHiveAllIngestApp.scala:79)
>  at 
> com.dmall.data.ingest.launcher.HudiHiveAllIngestApp$.main(HudiHiveAllIngestApp.scala:43)
>  at 
> com.dmall.data.ingest.launcher.HudiHiveAllIngestApp.main(HudiHiveAllIngestApp.scala)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
> at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498) at 
> org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:684)
>  Caused by: java.lang.ClassNotFoundException: org.json.JSONException at 
> java.net.URLClassLoader.findClass(URLClassLoader.java:381) at 
> java.lang.ClassLoader.loadClass(ClassLoader.java:424) at 
> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349) at 
> java.lang.ClassLoader.loadClass(ClassLoader.java:357) ... 55 more



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to