zdl11111 opened a new issue, #10306:
URL: https://github.com/apache/hudi/issues/10306

   
   **Describe the problem you faced**
   When I set metadata.enabled to true by Flink, HUDI cannot delta_commit 
successfully and always restarts the job 
   
   **To Reproduce**
   
   Steps to reproduce the behavior:
   
   1. start flink sql client
   ```
   create table test1 (
       c1 int primary key,
       c2 int,
       c3 int
   ) with (
       'connector' = 'hudi',
       'path' = 'hdfs:/flink/test1',
       'table.type' = 'MERGE_ON_READ',
       'metadata.enabled' = 'true'
   );
   ```
   2.
   ```
   create table datagen1 (
       c1 int,
       c2 int,
       c3 int
   ) with (
       'connector' = 'datagen',
       'number-of-rows' = '300',
       'rows-per-second' = '10'
   );
   ```
   3.
   ```
   SET execution.checkpointing.interval=1000;
   insert into test1 select * from datagen1;
   ```
   
   **Expected behavior**
   
   A clear and concise description of what you expected to happen.
   
   **Environment Description**
   
   * Hudi version : 0.13.1
   
   * Flink version : 0.14
   
   * Hive version :
   
   * Hadoop version : 
   
   * Storage (HDFS/S3/GCS..) : HDFS
   
   * Running on Docker? (yes/no) : no
   
   
   
   **Stacktrace**
   
   ```
   2023-12-12 11:07:55,633 INFO  
org.apache.flink.streaming.api.functions.source.datagen.DataGeneratorSource [] 
- generated 10 rows
   2023-12-12 11:07:55,633 WARN  
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory        [] - I/O error 
constructing remote block reader.
   java.nio.channels.ClosedByInterruptException: null
        at 
java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:202)
 ~[?:1.8.0_382]
        at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:658) 
~[?:1.8.0_382]
        at 
org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:192) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:2946) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:815)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:740)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.build(BlockReaderFactory.java:385)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.getBlockReader(DFSInputStream.java:696) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:655) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:926) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:982) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at java.io.DataInputStream.read(DataInputStream.java:149) ~[?:1.8.0_382]
        at java.io.DataInputStream.read(DataInputStream.java:100) ~[?:1.8.0_382]
        at java.util.Properties$LineReader.readLine(Properties.java:435) 
~[?:1.8.0_382]
        at java.util.Properties.load0(Properties.java:353) ~[?:1.8.0_382]
        at java.util.Properties.load(Properties.java:341) ~[?:1.8.0_382]
        at 
org.apache.hudi.common.table.HoodieTableConfig.fetchConfigs(HoodieTableConfig.java:351)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:284)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:138)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:689)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.access$000(HoodieTableMetaClient.java:81)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:770)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.table.HoodieFlinkTable.create(HoodieFlinkTable.java:62) 
~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.WriteProfile.getTable(WriteProfile.java:138)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.DeltaWriteProfile.getFileSystemView(DeltaWriteProfile.java:93)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.WriteProfile.reload(WriteProfile.java:262)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.BucketAssigner.reload(BucketAssigner.java:210) 
~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.BucketAssignFunction.notifyCheckpointComplete(BucketAssignFunction.java:242)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator.notifyCheckpointComplete(AbstractUdfStreamOperator.java:126)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.notifyCheckpointComplete(StreamOperatorWrapper.java:99)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.notifyCheckpointComplete(RegularOperatorChain.java:152)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.notifyCheckpointComplete(SubtaskCheckpointCoordinatorImpl.java:348)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.notifyCheckpointComplete(StreamTask.java:1426)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointCompleteAsync$16(StreamTask.java:1374)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointOperation$18(StreamTask.java:1406)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$1.runThrowing(StreamTaskActionExecutor.java:50)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90) 
~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMailsWhenDefaultActionUnavailable(MailboxProcessor.java:338)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:324)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:201)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:809)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:761) 
~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
 [flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:937) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at java.lang.Thread.run(Thread.java:750) [?:1.8.0_382]
   2023-12-12 11:07:55,634 WARN  org.apache.hadoop.hdfs.DFSClient               
              [] - Failed to connect to /10.0.196.245:2022 for block, add to 
deadNodes and continue. java.nio.channels.ClosedByInterruptException
   java.nio.channels.ClosedByInterruptException: null
        at 
java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:202)
 ~[?:1.8.0_382]
        at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:658) 
~[?:1.8.0_382]
        at 
org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:192) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:2946) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:815)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:740)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.build(BlockReaderFactory.java:385)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.getBlockReader(DFSInputStream.java:696) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:655) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:926) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:982) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at java.io.DataInputStream.read(DataInputStream.java:149) ~[?:1.8.0_382]
        at java.io.DataInputStream.read(DataInputStream.java:100) ~[?:1.8.0_382]
        at java.util.Properties$LineReader.readLine(Properties.java:435) 
~[?:1.8.0_382]
        at java.util.Properties.load0(Properties.java:353) ~[?:1.8.0_382]
        at java.util.Properties.load(Properties.java:341) ~[?:1.8.0_382]
        at 
org.apache.hudi.common.table.HoodieTableConfig.fetchConfigs(HoodieTableConfig.java:351)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:284)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:138)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:689)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.access$000(HoodieTableMetaClient.java:81)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:770)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.table.HoodieFlinkTable.create(HoodieFlinkTable.java:62) 
~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.WriteProfile.getTable(WriteProfile.java:138)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.DeltaWriteProfile.getFileSystemView(DeltaWriteProfile.java:93)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.WriteProfile.reload(WriteProfile.java:262)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.BucketAssigner.reload(BucketAssigner.java:210) 
~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.BucketAssignFunction.notifyCheckpointComplete(BucketAssignFunction.java:242)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator.notifyCheckpointComplete(AbstractUdfStreamOperator.java:126)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.notifyCheckpointComplete(StreamOperatorWrapper.java:99)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.notifyCheckpointComplete(RegularOperatorChain.java:152)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.notifyCheckpointComplete(SubtaskCheckpointCoordinatorImpl.java:348)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.notifyCheckpointComplete(StreamTask.java:1426)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointCompleteAsync$16(StreamTask.java:1374)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointOperation$18(StreamTask.java:1406)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$1.runThrowing(StreamTaskActionExecutor.java:50)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90) 
~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMailsWhenDefaultActionUnavailable(MailboxProcessor.java:338)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:324)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:201)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:809)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:761) 
~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
 [flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:937) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at java.lang.Thread.run(Thread.java:750) [?:1.8.0_382]
   2023-12-12 11:07:55,634 WARN  
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory        [] - I/O error 
constructing remote block reader.
   java.nio.channels.ClosedByInterruptException: null
        at 
java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:202)
 ~[?:1.8.0_382]
        at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:658) 
~[?:1.8.0_382]
        at 
org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:192) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:2946) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:815)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:740)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.build(BlockReaderFactory.java:385)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.getBlockReader(DFSInputStream.java:696) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:655) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:926) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:982) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at java.io.DataInputStream.read(DataInputStream.java:149) ~[?:1.8.0_382]
        at java.io.DataInputStream.read(DataInputStream.java:100) ~[?:1.8.0_382]
        at java.util.Properties$LineReader.readLine(Properties.java:435) 
~[?:1.8.0_382]
        at java.util.Properties.load0(Properties.java:353) ~[?:1.8.0_382]
        at java.util.Properties.load(Properties.java:341) ~[?:1.8.0_382]
        at 
org.apache.hudi.common.table.HoodieTableConfig.fetchConfigs(HoodieTableConfig.java:351)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:284)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:138)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:689)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.access$000(HoodieTableMetaClient.java:81)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:770)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.table.HoodieFlinkTable.create(HoodieFlinkTable.java:62) 
~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.WriteProfile.getTable(WriteProfile.java:138)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.DeltaWriteProfile.getFileSystemView(DeltaWriteProfile.java:93)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.WriteProfile.reload(WriteProfile.java:262)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.BucketAssigner.reload(BucketAssigner.java:210) 
~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.BucketAssignFunction.notifyCheckpointComplete(BucketAssignFunction.java:242)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator.notifyCheckpointComplete(AbstractUdfStreamOperator.java:126)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.notifyCheckpointComplete(StreamOperatorWrapper.java:99)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.notifyCheckpointComplete(RegularOperatorChain.java:152)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.notifyCheckpointComplete(SubtaskCheckpointCoordinatorImpl.java:348)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.notifyCheckpointComplete(StreamTask.java:1426)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointCompleteAsync$16(StreamTask.java:1374)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointOperation$18(StreamTask.java:1406)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$1.runThrowing(StreamTaskActionExecutor.java:50)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90) 
~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMailsWhenDefaultActionUnavailable(MailboxProcessor.java:338)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:324)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:201)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:809)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:761) 
~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
 [flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:937) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at java.lang.Thread.run(Thread.java:750) [?:1.8.0_382]
   2023-12-12 11:07:55,634 WARN  org.apache.hadoop.hdfs.DFSClient               
              [] - Failed to connect to /10.0.200.131:2022 for block, add to 
deadNodes and continue. java.nio.channels.ClosedByInterruptException
   java.nio.channels.ClosedByInterruptException: null
        at 
java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:202)
 ~[?:1.8.0_382]
        at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:658) 
~[?:1.8.0_382]
        at 
org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:192) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:2946) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:815)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:740)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.build(BlockReaderFactory.java:385)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.getBlockReader(DFSInputStream.java:696) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:655) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:926) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:982) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at java.io.DataInputStream.read(DataInputStream.java:149) ~[?:1.8.0_382]
        at java.io.DataInputStream.read(DataInputStream.java:100) ~[?:1.8.0_382]
        at java.util.Properties$LineReader.readLine(Properties.java:435) 
~[?:1.8.0_382]
        at java.util.Properties.load0(Properties.java:353) ~[?:1.8.0_382]
        at java.util.Properties.load(Properties.java:341) ~[?:1.8.0_382]
        at 
org.apache.hudi.common.table.HoodieTableConfig.fetchConfigs(HoodieTableConfig.java:351)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:284)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:138)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:689)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.access$000(HoodieTableMetaClient.java:81)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:770)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.table.HoodieFlinkTable.create(HoodieFlinkTable.java:62) 
~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.WriteProfile.getTable(WriteProfile.java:138)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.DeltaWriteProfile.getFileSystemView(DeltaWriteProfile.java:93)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.WriteProfile.reload(WriteProfile.java:262)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.BucketAssigner.reload(BucketAssigner.java:210) 
~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.BucketAssignFunction.notifyCheckpointComplete(BucketAssignFunction.java:242)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator.notifyCheckpointComplete(AbstractUdfStreamOperator.java:126)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.notifyCheckpointComplete(StreamOperatorWrapper.java:99)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.notifyCheckpointComplete(RegularOperatorChain.java:152)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.notifyCheckpointComplete(SubtaskCheckpointCoordinatorImpl.java:348)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.notifyCheckpointComplete(StreamTask.java:1426)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointCompleteAsync$16(StreamTask.java:1374)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointOperation$18(StreamTask.java:1406)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$1.runThrowing(StreamTaskActionExecutor.java:50)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90) 
~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMailsWhenDefaultActionUnavailable(MailboxProcessor.java:338)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:324)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:201)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:809)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:761) 
~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
 [flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:937) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at java.lang.Thread.run(Thread.java:750) [?:1.8.0_382]
   2023-12-12 11:07:55,634 WARN  
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory        [] - I/O error 
constructing remote block reader.
   java.nio.channels.ClosedByInterruptException: null
        at 
java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:202)
 ~[?:1.8.0_382]
        at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:658) 
~[?:1.8.0_382]
        at 
org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:192) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:2946) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:815)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:740)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.build(BlockReaderFactory.java:385)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.getBlockReader(DFSInputStream.java:696) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:655) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:926) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:982) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at java.io.DataInputStream.read(DataInputStream.java:149) ~[?:1.8.0_382]
        at java.io.DataInputStream.read(DataInputStream.java:100) ~[?:1.8.0_382]
        at java.util.Properties$LineReader.readLine(Properties.java:435) 
~[?:1.8.0_382]
        at java.util.Properties.load0(Properties.java:353) ~[?:1.8.0_382]
        at java.util.Properties.load(Properties.java:341) ~[?:1.8.0_382]
        at 
org.apache.hudi.common.table.HoodieTableConfig.fetchConfigs(HoodieTableConfig.java:351)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:284)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:138)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:689)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.access$000(HoodieTableMetaClient.java:81)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:770)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.table.HoodieFlinkTable.create(HoodieFlinkTable.java:62) 
~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.WriteProfile.getTable(WriteProfile.java:138)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.DeltaWriteProfile.getFileSystemView(DeltaWriteProfile.java:93)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.WriteProfile.reload(WriteProfile.java:262)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.BucketAssigner.reload(BucketAssigner.java:210) 
~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.BucketAssignFunction.notifyCheckpointComplete(BucketAssignFunction.java:242)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator.notifyCheckpointComplete(AbstractUdfStreamOperator.java:126)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.notifyCheckpointComplete(StreamOperatorWrapper.java:99)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.notifyCheckpointComplete(RegularOperatorChain.java:152)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.notifyCheckpointComplete(SubtaskCheckpointCoordinatorImpl.java:348)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.notifyCheckpointComplete(StreamTask.java:1426)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointCompleteAsync$16(StreamTask.java:1374)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointOperation$18(StreamTask.java:1406)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$1.runThrowing(StreamTaskActionExecutor.java:50)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90) 
~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMailsWhenDefaultActionUnavailable(MailboxProcessor.java:338)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:324)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:201)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:809)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:761) 
~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
 [flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:937) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at java.lang.Thread.run(Thread.java:750) [?:1.8.0_382]
   2023-12-12 11:07:55,634 WARN  org.apache.hadoop.hdfs.DFSClient               
              [] - Failed to connect to /10.0.195.44:2022 for block, add to 
deadNodes and continue. java.nio.channels.ClosedByInterruptException
   java.nio.channels.ClosedByInterruptException: null
        at 
java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:202)
 ~[?:1.8.0_382]
        at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:658) 
~[?:1.8.0_382]
        at 
org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:192) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:2946) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:815)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:740)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.client.impl.BlockReaderFactory.build(BlockReaderFactory.java:385)
 ~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.getBlockReader(DFSInputStream.java:696) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:655) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at 
org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:926) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:982) 
~[flink-shaded-hadoop-2-uber-2.8.3-10.0.jar:2.8.3-10.0]
        at java.io.DataInputStream.read(DataInputStream.java:149) ~[?:1.8.0_382]
        at java.io.DataInputStream.read(DataInputStream.java:100) ~[?:1.8.0_382]
        at java.util.Properties$LineReader.readLine(Properties.java:435) 
~[?:1.8.0_382]
        at java.util.Properties.load0(Properties.java:353) ~[?:1.8.0_382]
        at java.util.Properties.load(Properties.java:341) ~[?:1.8.0_382]
        at 
org.apache.hudi.common.table.HoodieTableConfig.fetchConfigs(HoodieTableConfig.java:351)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableConfig.<init>(HoodieTableConfig.java:284)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.<init>(HoodieTableMetaClient.java:138)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.newMetaClient(HoodieTableMetaClient.java:689)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient.access$000(HoodieTableMetaClient.java:81)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.common.table.HoodieTableMetaClient$Builder.build(HoodieTableMetaClient.java:770)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.table.HoodieFlinkTable.create(HoodieFlinkTable.java:62) 
~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.WriteProfile.getTable(WriteProfile.java:138)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.DeltaWriteProfile.getFileSystemView(DeltaWriteProfile.java:93)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.profile.WriteProfile.reload(WriteProfile.java:262)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.BucketAssigner.reload(BucketAssigner.java:210) 
~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.hudi.sink.partitioner.BucketAssignFunction.notifyCheckpointComplete(BucketAssignFunction.java:242)
 ~[hudi-flink1.14-bundle-0.13.1.jar:0.13.1]
        at 
org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator.notifyCheckpointComplete(AbstractUdfStreamOperator.java:126)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamOperatorWrapper.notifyCheckpointComplete(StreamOperatorWrapper.java:99)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.RegularOperatorChain.notifyCheckpointComplete(RegularOperatorChain.java:152)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.SubtaskCheckpointCoordinatorImpl.notifyCheckpointComplete(SubtaskCheckpointCoordinatorImpl.java:348)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.notifyCheckpointComplete(StreamTask.java:1426)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointCompleteAsync$16(StreamTask.java:1374)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$notifyCheckpointOperation$18(StreamTask.java:1406)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$1.runThrowing(StreamTaskActionExecutor.java:50)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.Mail.run(Mail.java:90) 
~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMailsWhenDefaultActionUnavailable(MailboxProcessor.java:338)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.processMail(MailboxProcessor.java:324)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:201)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:809)
 ~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:761) 
~[flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.runtime.taskmanager.Task.runWithSystemExitMonitoring(Task.java:958)
 [flink-dist_2.12-1.14.3.jar:1.14.3]
        at 
org.apache.flink.runtime.taskmanager.Task.restoreAndInvoke(Task.java:937) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:766) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at org.apache.flink.runtime.taskmanager.Task.run(Task.java:575) 
[flink-dist_2.12-1.14.3.jar:1.14.3]
        at java.lang.Thread.run(Thread.java:750) [?:1.8.0_382]
   2023-12-12 11:07:55,634 WARN  org.apache.hadoop.hdfs.DFSClient               
              [] - No live nodes contain block 
BP-264188597-10.0.196.245-1702346837962:blk_1073742194_1496 after checking 
nodes = 
[DatanodeInfoWithStorage[10.0.196.245:2022,DS-b4a28ea1-e30f-435d-82dc-0e01df297cae,DISK],
 
DatanodeInfoWithStorage[10.0.200.131:2022,DS-56867662-14ea-484c-9645-ad01f1fffbe6,DISK],
 
DatanodeInfoWithStorage[10.0.195.44:2022,DS-a3b7a3dd-29c8-45d5-a8e0-d4d0527095a9,DISK]],
 ignoredNodes = null
   ```
   
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@hudi.apache.org.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to