[ 
https://issues.apache.org/jira/browse/HUDI-4475?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Forward Xu updated HUDI-4475:
-----------------------------
    Summary: fix create table with not exists hoodie properties file  (was: fix 
create table with no hoodie properties file)

> fix create table with not exists hoodie properties file
> -------------------------------------------------------
>
>                 Key: HUDI-4475
>                 URL: https://issues.apache.org/jira/browse/HUDI-4475
>             Project: Apache Hudi
>          Issue Type: Bug
>          Components: spark-sql
>    Affects Versions: 0.12.0
>            Reporter: Forward Xu
>            Assignee: Forward Xu
>            Priority: Major
>              Labels: pull-request-available
>
> connectionId = de331057-be4e-4efa-8a7d-32ea36f81e9a, statementId = 0 -> 
> SQLException: java.util.concurrent.ExecutionException: 
> java.lang.RuntimeException: org.apache.hudi.exception.HoodieIOException: 
> Could not load Hoodie properties from hdfs://XXXXXX/.hoodie/hoodie.properties
>     at 
> org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:256)
>     at 
> org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:125)
>     at 
> org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:78)
>     at 
> org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:668)
>     at 
> org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient$lzycompute(HoodieCatalogTable.scala:75)
>     at 
> org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.metaClient(HoodieCatalogTable.scala:73)
>     at 
> org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig$lzycompute(HoodieCatalogTable.scala:81)
>     at 
> org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable.tableConfig(HoodieCatalogTable.scala:81)
>     at 
> org.apache.spark.sql.hudi.command.CreateHoodieTableCommand$.validateTblProperties(CreateHoodieTableCommand.scala:82)
>     at 
> org.apache.spark.sql.hudi.command.CreateHoodieTableCommand.run(CreateHoodieTableCommand.scala:62)
>     at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
>     at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
>     at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
>     at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:228)
>     at org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3689)
>     at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:105)
>     at 
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:172)
>     at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:92)
>     at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:801)
>     at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:66)
>     at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3687)
>     at org.apache.spark.sql.Dataset.<init>(Dataset.scala:228)
>     at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99)
>     at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:801)
>     at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
>     at 
> org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:623)
>     at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:801)
>     at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:616)
>     at org.apache.livy.thriftserver.session.SqlJob.executeSql(SqlJob.java:87)
>     at org.apache.livy.thriftserver.session.SqlJob.call(SqlJob.java:67)
>     at org.apache.livy.thriftserver.session.SqlJob.call(SqlJob.java:35)
>     at org.apache.livy.rsc.driver.JobWrapper.call(JobWrapper.java:82)
>     at org.apache.livy.rsc.driver.JobWrapper.call(JobWrapper.java:34)
>     at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>     at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>     at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>     at java.lang.Thread.run(Thread.java:748)
> Caused by: java.io.FileNotFoundException: File does not exist: 
> /XXXXXX/.hoodie/hoodie.properties.backup
>     at 
> org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:86)
>     at 
> org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:76)
>     at 
> org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getBlockLocations(FSDirStatAndListingOp.java:156)
>     at 
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1995)
>     at 
> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:797)
>     at 
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:290)
>     at 
> org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>     at 
> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:529)
>     at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
>     at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1039)
>     at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:963)
>     at java.base/java.security.AccessController.doPrivileged(Native Method)
>     at java.base/javax.security.auth.Subject.doAs(Subject.java:423)
>     at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:2034)
>     at org.apache.hadoop.ipc.Server$Handler.run(Server.java:3047)
>     at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>     at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>     at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>     at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>     at 
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:121)
>     at 
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:88)
>     at 
> org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1015)
>     at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1002)
>     at org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:991)
>     at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1229)
>     at 
> org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:350)
>     at 
> org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:346)
>     at 
> org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
>     at 
> org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:358)
>     at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:811)
>     at 
> org.apache.hudi.common.fs.HoodieWrapperFileSystem.open(HoodieWrapperFileSystem.java:460)
>     at 
> org.apache.hudi.common.table.HoodieTableConfig.fetchConfigs(HoodieTableConfig.java:310)
>     at 
> org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:246)
>     ... 36 more



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to