fujianhua168 opened a new issue, #6297:
URL: https://github.com/apache/hudi/issues/6297

   **Describe the problem you faced**
      i create a cow table named 'hudi_cow_tbl'  and a mor table  named 
'hudi_mor_tbl'  with thd flink sql client mode,  then both insert into row 
data.   after that, i query the two table data, result is : mor table normal, 
but cow table 'hudi_cow_tbl' occure a error. the error as below: 
   
   org.apache.flink.runtime.JobException: Recovery is suppressed by 
NoRestartBackoffTimeStrategy
        at 
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:138)
        at 
org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:82)
        at 
org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:252)
        at 
org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:242)
        at 
org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:233)
        at 
org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:684)
        at 
org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:79)
        at 
org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:444)
        at sun.reflect.GeneratedMethodAccessor38.invoke(Unknown Source)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.lambda$handleRpcInvocation$1(AkkaRpcActor.java:316)
        at 
org.apache.flink.runtime.concurrent.akka.ClassLoadingUtils.runWithContextClassLoader(ClassLoadingUtils.java:83)
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:314)
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:217)
        at 
org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:78)
        at 
org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:163)
        at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:24)
        at akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:20)
        at scala.PartialFunction.applyOrElse(PartialFunction.scala:123)
        at scala.PartialFunction.applyOrElse$(PartialFunction.scala:122)
        at akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:20)
        at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171)
        at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
        at scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:172)
        at akka.actor.Actor.aroundReceive(Actor.scala:537)
        at akka.actor.Actor.aroundReceive$(Actor.scala:535)
        at akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:220)
        at akka.actor.ActorCell.receiveMessage(ActorCell.scala:580)
        at akka.actor.ActorCell.invoke(ActorCell.scala:548)
        at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:270)
        at akka.dispatch.Mailbox.run(Mailbox.scala:231)
        at akka.dispatch.Mailbox.exec(Mailbox.scala:243)
        at java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:289)
        at 
java.util.concurrent.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1056)
        at java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1692)
        at 
java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:157)
   Caused by: java.lang.LinkageError: loader constraint violation: loader 
(instance of sun/misc/Launcher$AppClassLoader) previously initiated loading for 
a different type with name "org/apache/parquet/column/ColumnDescriptor"
        at java.lang.ClassLoader.defineClass1(Native Method)
        at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
        at 
java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
        at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
        at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
        at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
        at java.security.AccessController.doPrivileged(Native Method)
        at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:335)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        at 
org.apache.flink.formats.parquet.vector.reader.AbstractColumnReader.<init>(AbstractColumnReader.java:108)
        at 
org.apache.flink.formats.parquet.vector.reader.BytesColumnReader.<init>(BytesColumnReader.java:35)
        at 
org.apache.hudi.table.format.cow.ParquetSplitReaderUtil.createColumnReader(ParquetSplitReaderUtil.java:332)
        at 
org.apache.hudi.table.format.cow.ParquetSplitReaderUtil.createColumnReader(ParquetSplitReaderUtil.java:297)
        at 
org.apache.hudi.table.format.cow.vector.reader.ParquetColumnarRowSplitReader.readNextRowGroup(ParquetColumnarRowSplitReader.java:329)
        at 
org.apache.hudi.table.format.cow.vector.reader.ParquetColumnarRowSplitReader.nextBatch(ParquetColumnarRowSplitReader.java:305)
        at 
org.apache.hudi.table.format.cow.vector.reader.ParquetColumnarRowSplitReader.ensureBatch(ParquetColumnarRowSplitReader.java:287)
        at 
org.apache.hudi.table.format.cow.vector.reader.ParquetColumnarRowSplitReader.reachedEnd(ParquetColumnarRowSplitReader.java:266)
        at 
org.apache.hudi.table.format.cow.CopyOnWriteInputFormat.reachedEnd(CopyOnWriteInputFormat.java:274)
        at 
org.apache.flink.streaming.api.functions.source.InputFormatSourceFunction.run(InputFormatSourceFunction.java:89)
        at 
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:110)
        at 
org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:67)
        at 
org.apache.flink.streaming.runtime.tasks.SourceStreamTask$LegacySourceFunctionThread.run(SourceStreamTask.java:323)
   
   
   
   **To Reproduce**
    i only reproduce the cow table behavior:
        step1: one session window,execute follow shell command to open a 
yarn-session-cluster: 
                 cd /opt/apache/flink  
                 bin/yarn-session.sh -m yarn-cluster -nm flink_test -qu default
   
        step2:  open the flink sql client at another session window:
                cd /opt/apache/flink
                /bin/sql-client.sh embedded -s yarn-session -j 
./lib/hudi-flink1.14-bundle_2.11-0.11.1.jar shell
   
        step3:   reproduce the error:
             CREATE CATALOG myhive WITH (
               'type' = 'hive',
               'default-database' = 'default',
               'hive-conf-dir' = '/opt/apache/hive/conf/',
               'hadoop-conf-dir'='/opt/apache/hadoop/etc/hadoop/'
             );
             USE CATALOG myhive;
             use flink_demo;
             drop table hudi_cow_tbl;
             CREATE TABLE hudi_cow_tbl(
               uuid VARCHAR(20) PRIMARY KEY NOT ENFORCED,
               name VARCHAR(10),
               age INT,
               ts TIMESTAMP(3)
             )
             WITH (
               'connector' = 'hudi',
               'path' = 
'hdfs://bigbigworld/user/hive/warehouse/flink_demo.db/hudi_cow_tbl'
             );
            INSERT INTO hudi_cow_tbl VALUES
           ('id1','Danny',23,TIMESTAMP '1970-01-01 00:00:01'),
           ('id2','Stephen',33,TIMESTAMP '1970-01-01 00:00:02'),
           ('id3','Julian',53,TIMESTAMP '1970-01-01 00:00:03'),
           ('id4','Fabian',31,TIMESTAMP '1970-01-01 00:00:04'),
           ('id5','Sophia',18,TIMESTAMP '1970-01-01 00:00:05'),
           ('id6','Emma',20,TIMESTAMP '1970-01-01 00:00:06'),
           ('id7','Bob',44,TIMESTAMP '1970-01-01 00:00:07'),
           ('id8','Han',56,TIMESTAMP '1970-01-01 00:00:08');
           select * from hudi_cow_tbl; -- after about 5~8s, have a error above.
           
     
   **Expected behavior**
         why mor table can normal query, but the cow table query error.
      
   **Environment Description**
   * Hudi version :1.11.1
   * Spark version :3.2.1
   * Hive version :3.1.0
   * Hadoop version :3.1.1
   * Storage (HDFS/S3/GCS..) : HDFS
   * Running on Docker? (yes/no) :no 
   * Scala version:2.11
   * flink lib dir as below:
   *
       -rw-r--r-- 1 admin admin    167761 Aug  1 19:48 antlr-runtime-3.5.2.jar
       -rw-r--r-- 1 admin admin     85584 Jan 11  2022 flink-csv-1.14.3.jar
       -rw-r--r-- 1 admin admin 143657701 Jan 11  2022 
flink-dist_2.11-1.14.3.jar
       -rw-r--r-- 1 admin admin    153145 Jan 11  2022 flink-json-1.14.3.jar
       -rw-r--r-- 1 admin admin   7709731 Aug 22  2021 
flink-shaded-zookeeper-3.4.14.jar
       -rw-r--r-- 1 admin admin  48743474 Aug  1 19:43 
flink-sql-connector-hive-3.1.2_2.11-1.14.4.jar
       -rw-r--r-- 1 admin admin  42307236 Jan 11  2022 
flink-table_2.11-1.14.3.jar
       -rw-r--r-- 1 admin admin    210856 Aug  3 20:39 gson-2.3.1.jar
       -rw-r--r-- 1 admin admin   4034639 Aug  1 20:22 hadoop-common-3.1.1.jar
       -rw-r--r-- 1 admin admin   2838974 Aug  1 20:22 
hadoop-common-3.1.1-tests.jar
       -rw-r--r-- 1 admin admin    130026 Aug  1 20:22 hadoop-kms-3.1.1.jar
       -rw-r--r-- 1 admin admin    612158 Aug  1 20:20 
hadoop-mapreduce-client-app-3.1.1.jar
       -rw-r--r-- 1 admin admin    804003 Aug  1 20:20 
hadoop-mapreduce-client-common-3.1.1.jar
       -rw-r--r-- 1 admin admin   1654887 Aug  1 20:20 
hadoop-mapreduce-client-core-3.1.1.jar
       -rw-r--r-- 1 admin admin    215150 Aug  1 20:20 
hadoop-mapreduce-client-hs-3.1.1.jar
       -rw-r--r-- 1 admin admin     45334 Aug  1 20:20 
hadoop-mapreduce-client-hs-plugins-3.1.1.jar
       -rw-r--r-- 1 admin admin     85395 Aug  1 20:20 
hadoop-mapreduce-client-jobclient-3.1.1.jar
       -rw-r--r-- 1 admin admin   1659423 Aug  1 20:20 
hadoop-mapreduce-client-jobclient-3.1.1-tests.jar
       -rw-r--r-- 1 admin admin    126145 Aug  1 20:20 
hadoop-mapreduce-client-nativetask-3.1.1.jar
       -rw-r--r-- 1 admin admin     97157 Aug  1 20:20 
hadoop-mapreduce-client-shuffle-3.1.1.jar
       -rw-r--r-- 1 admin admin     56749 Aug  1 20:20 
hadoop-mapreduce-client-uploader-3.1.1.jar
       -rw-r--r-- 1 admin admin    316297 Aug  1 20:20 
hadoop-mapreduce-examples-3.1.1.jar
       -rw-r--r-- 1 admin admin    201617 Aug  1 20:22 hadoop-nfs-3.1.1.jar
       -rw-r--r-- 1 admin admin  40603465 Aug  1 19:46 hive-exec-3.1.0.jar
       -rw-r--r-- 1 admin admin  94727796 Aug  4 11:22 
hudi-flink1.14-bundle_2.11-0.11.1.jar
       -rw-r--r-- 1 admin admin    313702 Aug  1 19:47 libfb303-0.9.3.jar
       -rw-r--r-- 1 admin admin    208006 Jan  9  2022 log4j-1.2-api-2.17.1.jar
       -rw-r--r-- 1 admin admin    301872 Jan  9  2022 log4j-api-2.17.1.jar
       -rw-r--r-- 1 admin admin   1790452 Jan  9  2022 log4j-core-2.17.1.jar
       -rw-r--r-- 1 admin admin     24279 Jan  9  2022 
log4j-slf4j-impl-2.17.1.jar
   
   
   
   **Stacktrace**
   
   
   
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@hudi.apache.org.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to