[ 
https://issues.apache.org/jira/browse/HIVE-27947?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17795429#comment-17795429
 ] 

Denys Kuzmenko commented on HIVE-27947:
---------------------------------------

we should generate a report with all the tests we need to fix after discovering 
flakiness and disabling them 

> Fix flaky test testBootstrapAcidTablesDuringIncrementalWithConcurrentWrites
> ---------------------------------------------------------------------------
>
>                 Key: HIVE-27947
>                 URL: https://issues.apache.org/jira/browse/HIVE-27947
>             Project: Hive
>          Issue Type: Improvement
>            Reporter: Butao Zhang
>            Priority: Major
>
> {color:#172b4d}testBootstrapAcidTablesDuringIncrementalWithConcurrentWrites 
> is flaky and time-consuming.
> {color}
> {color:#ff0000}*Flaky test report*{color}: 
> [http://ci.hive.apache.org/job/hive-flaky-check/756/testReport/]
>  
> h3. Error Message
> {code:java}
> ORC split generation failed with exception: 
> org.apache.orc.FileFormatException: Malformed ORC file 
> hdfs://localhost:42059/warehouse1/replicated_testbootstrapacidtablesduringincrementalwithconcurrentwrites_1701966660287.db/t1/-tmp.delta_0000003_0000003_0000/000002_0.manifest.
>  Invalid postscript.{code}
> h3. Stacktrace
> {code:java}
> java.lang.RuntimeException: ORC split generation failed with exception: 
> org.apache.orc.FileFormatException: Malformed ORC file 
> hdfs://localhost:42059/warehouse1/replicated_testbootstrapacidtablesduringincrementalwithconcurrentwrites_1701966660287.db/t1/-tmp.delta_0000003_0000003_0000/000002_0.manifest.
>  Invalid postscript. at 
> org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.generateSplitsInfo(OrcInputFormat.java:1884)
>  at 
> org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.getSplits(OrcInputFormat.java:1971)
>  at 
> org.apache.hadoop.hive.ql.io.HiveInputFormat.addSplitsForGroup(HiveInputFormat.java:563)
>  at 
> org.apache.hadoop.hive.ql.io.HiveInputFormat.getSplits(HiveInputFormat.java:857)
>  at 
> org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.getSplits(CombineHiveInputFormat.java:544)
>  at 
> org.apache.hadoop.mapreduce.JobSubmitter.writeOldSplits(JobSubmitter.java:338)
>  at 
> org.apache.hadoop.mapreduce.JobSubmitter.writeSplits(JobSubmitter.java:329) 
> at 
> org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:200)
>  at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1678) at 
> org.apache.hadoop.mapreduce.Job$11.run(Job.java:1675) at 
> java.security.AccessController.doPrivileged(Native Method) at 
> javax.security.auth.Subject.doAs(Subject.java:422) at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1899)
>  at org.apache.hadoop.mapreduce.Job.submit(Job.java:1675) at 
> org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:576) at 
> org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:571) at 
> java.security.AccessController.doPrivileged(Native Method) at 
> javax.security.auth.Subject.doAs(Subject.java:422) at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1899)
>  at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:571) 
> at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:562) at 
> org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:416) at 
> org.apache.hadoop.hive.ql.exec.mr.MapRedTask.execute(MapRedTask.java:158) at 
> org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:214) at 
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105) 
> at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:354) at 
> org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:327) at 
> org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:244) at 
> org.apache.hadoop.hive.ql.Executor.execute(Executor.java:105) at 
> org.apache.hadoop.hive.ql.Driver.execute(Driver.java:367) at 
> org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:205) at 
> org.apache.hadoop.hive.ql.Driver.run(Driver.java:154) at 
> org.apache.hadoop.hive.ql.Driver.run(Driver.java:149) at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:185) at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:228) at 
> org.apache.hadoop.hive.ql.parse.WarehouseInstance.run(WarehouseInstance.java:236)
>  at 
> org.apache.hadoop.hive.ql.parse.BaseReplicationScenariosAcidTables.verifyIncAcidLoad(BaseReplicationScenariosAcidTables.java:244)
>  at 
> org.apache.hadoop.hive.ql.parse.BaseReplicationScenariosAcidTables.verifyIncLoad(BaseReplicationScenariosAcidTables.java:323)
>  at 
> org.apache.hadoop.hive.ql.parse.TestReplicationScenariosAcidTablesBootstrap.testBootstrapAcidTablesDuringIncrementalWithConcurrentWrites(TestReplicationScenariosAcidTablesBootstrap.java:305)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
> at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498) at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
>  at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>  at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
>  at 
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
>  at 
> org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) 
> at 
> org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) 
> at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:61) at 
> org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) at 
> org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
>  at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
>  at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
>  at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) at 
> org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) at 
> org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) at 
> org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) at 
> org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) at 
> org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) 
> at 
> org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) 
> at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) at 
> org.junit.runners.ParentRunner.run(ParentRunner.java:413) at 
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
>  at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273)
>  at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
>  at 
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
>  at 
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:377)
>  at 
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:138) 
> at org.apache.maven.surefire.booter.ForkedBooter.run(ForkedBooter.java:465) 
> at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:451) 
> Caused by: java.util.concurrent.ExecutionException: 
> org.apache.orc.FileFormatException: Malformed ORC file 
> hdfs://localhost:42059/warehouse1/replicated_testbootstrapacidtablesduringincrementalwithconcurrentwrites_1701966660287.db/t1/-tmp.delta_0000003_0000003_0000/000002_0.manifest.
>  Invalid postscript. at 
> java.util.concurrent.FutureTask.report(FutureTask.java:122) at 
> java.util.concurrent.FutureTask.get(FutureTask.java:192) at 
> org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.generateSplitsInfo(OrcInputFormat.java:1878)
>  ... 71 more{code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to