I followed the README.md guide of hudi-exampes-dbt, installed dbt and dbt-spark 
plugin, setup `spark` profile etc. 
It's ok when execute `dbt debug` command, but met the following error when 
execute `dbt run` command. 
What's wrong? Anything I missed? Thanks!



$ dbt run

02:49:38  Running with dbt=1.2.0

02:49:38  Found 5 models, 10 tests, 0 snapshots, 0 analyses, 293 macros, 0 
operations, 0 seed files, 0 sources, 0 exposures, 0 metrics

02:49:38  

02:49:38  Concurrency: 1 threads (target='dev')

02:49:38  

02:49:38  1 of 5 START incremental model public.hudi_insert_table 
........................ [RUN]

02:49:38  1 of 5 ERROR creating incremental model public.hudi_insert_table 
............... [ERROR in 0.17s]

02:49:38  2 of 5 SKIP relation public.hudi_insert_overwrite_table 
........................ [SKIP]

02:49:38  3 of 5 SKIP relation public.hudi_upsert_table 
.................................. [SKIP]

02:49:38  4 of 5 SKIP relation public.hudi_upsert_partitioned_cow_table 
.................. [SKIP]

02:49:38  5 of 5 SKIP relation public.hudi_upsert_partitioned_mor_table 
.................. [SKIP]

02:49:38  

02:49:38  Finished running 5 incremental models in 0 hours 0 minutes and 0.59 
seconds (0.59s).

02:49:38  

02:49:38  Completed with 1 error and 0 warnings:

02:49:38  

02:49:38  Runtime Error in model hudi_insert_table 
(models/example/hudi_insert_table.sql)

02:49:38    Database Error

02:49:38      org.apache.hive.service.cli.HiveSQLException: Error running 
query: org.apache.hudi.exception.HoodieException: 'hoodie.table.name' must be 
set.

02:49:38      at 
org.apache.spark.sql.hive.thriftserver.HiveThriftServerErrors$.runningQueryError(HiveThriftServerErrors.scala:44)

02:49:38      at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:325)

02:49:38      at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.$anonfun$run$2(SparkExecuteStatementOperation.scala:230)

02:49:38      at 
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)

02:49:38      at 
org.apache.spark.sql.hive.thriftserver.SparkOperation.withLocalProperties(SparkOperation.scala:79)

02:49:38      at 
org.apache.spark.sql.hive.thriftserver.SparkOperation.withLocalProperties$(SparkOperation.scala:63)

02:49:38      at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.withLocalProperties(SparkExecuteStatementOperation.scala:43)

02:49:38      at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.run(SparkExecuteStatementOperation.scala:230)

02:49:38      at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2$$anon$3.run(SparkExecuteStatementOperation.scala:225)

02:49:38      at java.security.AccessController.doPrivileged(Native Method)

02:49:38      at javax.security.auth.Subject.doAs(Subject.java:422)

02:49:38      at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)

02:49:38      at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$2.run(SparkExecuteStatementOperation.scala:239)

02:49:38      at 
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)

02:49:38      at java.util.concurrent.FutureTask.run(FutureTask.java:266)

02:49:38      at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)

02:49:38      at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)

02:49:38      at java.lang.Thread.run(Thread.java:750)

02:49:38      Caused by: org.apache.hudi.exception.HoodieException: 
'hoodie.table.name' must be set.

02:49:38      at 
org.apache.hudi.common.config.HoodieConfig.getStringOrThrow(HoodieConfig.java:218)

02:49:38      at 
org.apache.hudi.HoodieSparkSqlWriter$.write(HoodieSparkSqlWriter.scala:96)

02:49:38      at 
org.apache.hudi.DefaultSource.createRelation(DefaultSource.scala:171)

02:49:38      at 
org.apache.spark.sql.execution.datasources.DataSource.writeAndRead(DataSource.scala:519)

02:49:38      at 
org.apache.spark.sql.execution.command.CreateDataSourceTableAsSelectCommand.saveDataIntoTable(createDataSourceTables.scala:228)

02:49:38      at 
org.apache.spark.sql.execution.command.CreateDataSourceTableAsSelectCommand.run(createDataSourceTables.scala:182)

02:49:38      at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:113)

02:49:38      at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:111)

02:49:38      at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.executeCollect(commands.scala:125)

02:49:38      at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:110)

02:49:38      at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)

02:49:38      at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)

02:49:38      at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)

02:49:38      at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)

02:49:38      at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)

02:49:38      at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:110)

02:49:38      at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:106)

02:49:38      at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:481)

02:49:38      at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:82)

02:49:38      at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:481)

02:49:38      at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)

02:49:38      at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)

02:49:38      at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)

02:49:38      at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)

02:49:38      at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)

02:49:38      at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:457)

02:49:38      at 
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:106)

02:49:38      at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:93)

02:49:38      at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:91)

02:49:38      at org.apache.spark.sql.Dataset.<init>(Dataset.scala:219)

02:49:38      at 
org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99)

02:49:38      at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)

02:49:38      at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)

02:49:38      at 
org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:618)

02:49:38      at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)

02:49:38      at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:613)

02:49:38      at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:651)

02:49:38      at 
org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:291)

02:49:38      ... 16 more

02:49:38      

02:49:38  

02:49:38  Done. PASS=0 WARN=0 ERROR=1 SKIP=4 TOTAL=5


Reply via email to