[ 
https://issues.apache.org/jira/browse/FLINK-36443?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Xuyang Zhong resolved FLINK-36443.
----------------------------------
    Resolution: Fixed

> Flaky Test: assertEquals in 
> PushPartitionIntoTableSourceScanRuleTest.testWithUdf
> --------------------------------------------------------------------------------
>
>                 Key: FLINK-36443
>                 URL: https://issues.apache.org/jira/browse/FLINK-36443
>             Project: Flink
>          Issue Type: Bug
>          Components: Table SQL / Planner
>            Reporter: William Lin
>            Assignee: Muhammet Orazov
>            Priority: Minor
>              Labels: pull-request-available
>             Fix For: 2.1.0
>
>
> The assertEquals function in testWithUdf in 
> PushPartitionIntoTableSourceScanRuleTest assumes the partitions in the 
> generated plan have a specific order. However, the order is not specific in 
> some cases, causing this a flaky test.
> {code:java}
> // code placeholder
> [ERROR] 
> org.apache.flink.table.planner.plan.rules.logical.PushPartitionIntoTableSourceScanRuleTest.testWithUdf
>  -- Time elapsed: 0.159 s <<< FAILURE! org.opentest4j.AssertionFailedError: 
> optimized rel plan ==> expected: < LogicalProject(id=[$0], name=[$1], 
> part1=[$2], part2=[$3]) +- LogicalFilter(condition=[>($0, 2)]) +- 
> LogicalTableScan(table=[[test_catalog, test_database, MyTable, 
> partitions=[{part1=A, part2=1}, {part1=C, part2=1}]]]) > but was: < 
> LogicalProject(id=[$0], name=[$1], part1=[$2], part2=[$3]) +- 
> LogicalFilter(condition=[>($0, 2)]) +- LogicalTableScan(table=[[test_catalog, 
> test_database, MyTable, partitions=[{part1=A, part2=1}, {part2=1, 
> part1=C}]]]) > at 
> org.apache.flink.table.planner.utils.DiffRepository.assertEquals(DiffRepository.java:438)
>  at 
> org.apache.flink.table.planner.utils.TableTestUtilBase.assertEqualsOrExpand(TableTestBase.scala:1176)
>  at 
> org.apache.flink.table.planner.utils.TableTestUtilBase.assertPlanEquals(TableTestBase.scala:1091)
>  at 
> org.apache.flink.table.planner.utils.TableTestUtilBase.doVerifyPlan(TableTestBase.scala:921)
>  at 
> org.apache.flink.table.planner.utils.TableTestUtilBase.verifyRelPlan(TableTestBase.scala:467)
>  at 
> org.apache.flink.table.planner.plan.rules.logical.PushPartitionIntoLegacyTableSourceScanRuleTest.testWithUdf(PushPartitionIntoLegacyTableSourceScanRuleTest.scala:179)
>  at java.base/java.lang.reflect.Method.invoke(Method.java:569) at 
> java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.accept(ForEachOps.java:183)
>  at 
> java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:197)
>  at 
> java.base/java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:179)
>  at 
> java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:197)
>  at 
> java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.accept(ForEachOps.java:183)
>  at 
> java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:197)
>  at 
> java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:197)
>  at java.base/java.util.Iterator.forEachRemaining(Iterator.java:133) at 
> scala.collection.convert.Wrappers$IteratorWrapper.forEachRemaining(Wrappers.scala:31)
>  at 
> java.base/java.util.Spliterators$IteratorSpliterator.forEachRemaining(Spliterators.java:1845)
>  at 
> java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509)
>  at 
> java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499)
>  at 
> java.base/java.util.stream.ForEachOps$ForEachOp.evaluateSequential(ForEachOps.java:150)
>  at 
> java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.evaluateSequential(ForEachOps.java:173)
>  at 
> java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
>  at 
> java.base/java.util.stream.ReferencePipeline.forEach(ReferencePipeline.java:596)
>  at 
> java.base/java.util.stream.ReferencePipeline$7$1.accept(ReferencePipeline.java:276)
>  at 
> java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625)
>  at 
> java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509)
>  at 
> java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499)
>  at 
> java.base/java.util.stream.ForEachOps$ForEachOp.evaluateSequential(ForEachOps.java:150)
>  at 
> java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.evaluateSequential(ForEachOps.java:173)
>  at 
> java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
>  at 
> java.base/java.util.stream.ReferencePipeline.forEach(ReferencePipeline.java:596)
>  at 
> java.base/java.util.concurrent.RecursiveAction.exec(RecursiveAction.java:194) 
> at java.base/java.util.concurrent.ForkJoinTask.doExec(ForkJoinTask.java:373) 
> at 
> java.base/java.util.concurrent.ForkJoinPool$WorkQueue.topLevelExec(ForkJoinPool.java:1182)
>  at java.base/java.util.concurrent.ForkJoinPool.scan(ForkJoinPool.java:1655) 
> at 
> java.base/java.util.concurrent.ForkJoinPool.runWorker(ForkJoinPool.java:1622) 
> at 
> java.base/java.util.concurrent.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:165)
>  Here is the line related to this: String expected2Canonical = 
> expected2.replace(Util.LINE_SEPARATOR, "\n"); String actualCanonical = 
> actual.replace(Util.LINE_SEPARATOR, "\n"); 
> Assertions.assertEquals(expected2Canonical, actualCanonical, tag); {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to