[
https://issues.apache.org/jira/browse/SPARK-53756?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
ASF GitHub Bot updated SPARK-53756:
-----------------------------------
Labels: pull-request-available (was: )
> Flaky test: FileStreamSinkSuite."cleanup complete but invalid output for
> aborted job"
> -------------------------------------------------------------------------------------
>
> Key: SPARK-53756
> URL: https://issues.apache.org/jira/browse/SPARK-53756
> Project: Spark
> Issue Type: Improvement
> Components: SQL, Tests
> Affects Versions: 4.0.1
> Reporter: Zhen Wang
> Priority: Major
> Labels: pull-request-available
>
> I found an the flaky test when running spark sql test cases with
> datafusion-comet.
> https://github.com/apache/datafusion-comet/actions/runs/18110382622/job/51535109725
> error details:
> {code:java}
> [info] - cleanup complete but invalid output for aborted job *** FAILED ***
> (430 milliseconds)
> [info] java.io.UncheckedIOException: java.nio.file.NoSuchFileException:
> /__w/datafusion-comet/datafusion-comet/apache-spark/target/tmp/spark-2cf998bb-fd3c-4621-88d8-05e9decad882/output
>
> @#output/.part-00009-cb134e84-5d4a-42d2-a342-b5edc52776ce-c000.snappy.parquet.crc
> [info] at
> java.base/java.nio.file.FileTreeIterator.fetchNextIfNeeded(FileTreeIterator.java:87)
> [info] at
> java.base/java.nio.file.FileTreeIterator.hasNext(FileTreeIterator.java:103)
> [info] at
> java.base/java.util.Spliterators$IteratorSpliterator.tryAdvance(Spliterators.java:1855)
> [info] at
> java.base/java.util.stream.StreamSpliterators$WrappingSpliterator.lambda$initPartialTraversalState$0(StreamSpliterators.java:292)
> [info] at
> java.base/java.util.stream.StreamSpliterators$AbstractWrappingSpliterator.fillBuffer(StreamSpliterators.java:206)
> [info] at
> java.base/java.util.stream.StreamSpliterators$AbstractWrappingSpliterator.doAdvance(StreamSpliterators.java:169)
> [info] at
> java.base/java.util.stream.StreamSpliterators$WrappingSpliterator.tryAdvance(StreamSpliterators.java:298)
> [info] at
> java.base/java.util.Spliterators$1Adapter.hasNext(Spliterators.java:681)
> [info] at
> scala.collection.convert.JavaCollectionWrappers$JIteratorWrapper.hasNext(JavaCollectionWrappers.scala:46)
> [info] at scala.collection.Iterator$$anon$6.hasNext(Iterator.scala:477)
> [info] at scala.collection.Iterator$$anon$9.hasNext(Iterator.scala:583)
> [info] at scala.collection.mutable.Growable.addAll(Growable.scala:61)
> [info] at scala.collection.mutable.Growable.addAll$(Growable.scala:57)
> [info] at scala.collection.immutable.SetBuilderImpl.addAll(Set.scala:405)
> [info] at scala.collection.immutable.Set$.from(Set.scala:362)
> [info] at scala.collection.IterableOnceOps.toSet(IterableOnce.scala:1469)
> [info] at scala.collection.IterableOnceOps.toSet$(IterableOnce.scala:1469)
> [info] at scala.collection.AbstractIterator.toSet(Iterator.scala:1306)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.$anonfun$new$52(FileStreamSinkSuite.scala:538)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.$anonfun$new$52$adapted(FileStreamSinkSuite.scala:505)
> [info] at
> org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1(SQLTestUtils.scala:83)
> [info] at
> org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1$adapted(SQLTestUtils.scala:82)
> [info] at
> org.apache.spark.SparkFunSuite.withTempDir(SparkFunSuite.scala:245)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.org$apache$spark$sql$test$SQLTestUtils$$super$withTempDir(FileStreamSinkSuite.scala:50)
> [info] at
> org.apache.spark.sql.test.SQLTestUtils.withTempDir(SQLTestUtils.scala:82)
> [info] at
> org.apache.spark.sql.test.SQLTestUtils.withTempDir$(SQLTestUtils.scala:81)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.withTempDir(FileStreamSinkSuite.scala:50)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.$anonfun$new$51(FileStreamSinkSuite.scala:505)
> [info] at
> scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
> [info] at
> org.apache.spark.sql.catalyst.SQLConfHelper.withSQLConf(SQLConfHelper.scala:56)
> [info] at
> org.apache.spark.sql.catalyst.SQLConfHelper.withSQLConf$(SQLConfHelper.scala:38)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(FileStreamSinkSuite.scala:50)
> [info] at
> org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:301)
> [info] at
> org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:297)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.withSQLConf(FileStreamSinkSuite.scala:50)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.$anonfun$new$50(FileStreamSinkSuite.scala:505)
> [info] at
> scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
> [info] at
> org.apache.spark.sql.catalyst.util.package$.quietly(package.scala:43)
> [info] at
> org.apache.spark.sql.test.SQLTestUtils.$anonfun$testQuietly$1(SQLTestUtils.scala:119)
> [info] at
> scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
> [info] at org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
> [info] at
> org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)
> [info] at
> org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
> [info] at
> org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
> [info] at org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
> [info] at
> org.apache.spark.SparkFunSuite.$anonfun$test$2(SparkFunSuite.scala:155)
> [info] at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
> [info] at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
> [info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
> [info] at org.scalatest.Transformer.apply(Transformer.scala:22)
> [info] at org.scalatest.Transformer.apply(Transformer.scala:20)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
> [info] at
> org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:227)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
> [info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)
> [info] at
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:69)
> [info] at
> org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
> [info] at
> org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
> [info] at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:69)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
> [info] at
> org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
> [info] at scala.collection.immutable.List.foreach(List.scala:334)
> [info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
> [info] at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
> [info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)
> [info] at
> org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564)
> [info] at org.scalatest.Suite.run(Suite.scala:1114)
> [info] at org.scalatest.Suite.run$(Suite.scala:1096)
> [info] at
> org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
> [info] at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)
> [info] at
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:69)
> [info] at
> org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
> [info] at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
> [info] at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
> [info] at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:69)
> [info] at
> org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321)
> [info] at
> org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517)
> [info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:414)
> [info] at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
> [info] at
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
> [info] at
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
> [info] at java.base/java.lang.Thread.run(Thread.java:840)
> [info] Cause: java.nio.file.NoSuchFileException:
> /__w/datafusion-comet/datafusion-comet/apache-spark/target/tmp/spark-2cf998bb-fd3c-4621-88d8-05e9decad882/output
>
> @#output/.part-00009-cb134e84-5d4a-42d2-a342-b5edc52776ce-c000.snappy.parquet.crc
> [info] at
> java.base/sun.nio.fs.UnixException.translateToIOException(UnixException.java:92)
> [info] at
> java.base/sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:106)
> [info] at
> java.base/sun.nio.fs.UnixException.rethrowAsIOException(UnixException.java:111)
> [info] at
> java.base/sun.nio.fs.UnixFileAttributeViews$Basic.readAttributes(UnixFileAttributeViews.java:55)
> [info] at
> java.base/sun.nio.fs.UnixFileSystemProvider.readAttributes(UnixFileSystemProvider.java:148)
> [info] at
> java.base/sun.nio.fs.LinuxFileSystemProvider.readAttributes(LinuxFileSystemProvider.java:99)
> [info] at java.base/java.nio.file.Files.readAttributes(Files.java:1851)
> [info] at
> java.base/java.nio.file.FileTreeWalker.getAttributes(FileTreeWalker.java:220)
> [info] at
> java.base/java.nio.file.FileTreeWalker.visit(FileTreeWalker.java:277)
> [info] at
> java.base/java.nio.file.FileTreeWalker.next(FileTreeWalker.java:374)
> [info] at
> java.base/java.nio.file.FileTreeIterator.fetchNextIfNeeded(FileTreeIterator.java:83)
> [info] at
> java.base/java.nio.file.FileTreeIterator.hasNext(FileTreeIterator.java:103)
> [info] at
> java.base/java.util.Spliterators$IteratorSpliterator.tryAdvance(Spliterators.java:1855)
> [info] at
> java.base/java.util.stream.StreamSpliterators$WrappingSpliterator.lambda$initPartialTraversalState$0(StreamSpliterators.java:292)
> [info] at
> java.base/java.util.stream.StreamSpliterators$AbstractWrappingSpliterator.fillBuffer(StreamSpliterators.java:206)
> [info] at
> java.base/java.util.stream.StreamSpliterators$AbstractWrappingSpliterator.doAdvance(StreamSpliterators.java:169)
> [info] at
> java.base/java.util.stream.StreamSpliterators$WrappingSpliterator.tryAdvance(StreamSpliterators.java:298)
> [info] at
> java.base/java.util.Spliterators$1Adapter.hasNext(Spliterators.java:681)
> [info] at
> scala.collection.convert.JavaCollectionWrappers$JIteratorWrapper.hasNext(JavaCollectionWrappers.scala:46)
> [info] at scala.collection.Iterator$$anon$6.hasNext(Iterator.scala:477)
> [info] at scala.collection.Iterator$$anon$9.hasNext(Iterator.scala:583)
> [info] at scala.collection.mutable.Growable.addAll(Growable.scala:61)
> [info] at scala.collection.mutable.Growable.addAll$(Growable.scala:57)
> [info] at scala.collection.immutable.SetBuilderImpl.addAll(Set.scala:405)
> [info] at scala.collection.immutable.Set$.from(Set.scala:362)
> [info] at scala.collection.IterableOnceOps.toSet(IterableOnce.scala:1469)
> [info] at scala.collection.IterableOnceOps.toSet$(IterableOnce.scala:1469)
> [info] at scala.collection.AbstractIterator.toSet(Iterator.scala:1306)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.$anonfun$new$52(FileStreamSinkSuite.scala:538)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.$anonfun$new$52$adapted(FileStreamSinkSuite.scala:505)
> [info] at
> org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1(SQLTestUtils.scala:83)
> [info] at
> org.apache.spark.sql.test.SQLTestUtils.$anonfun$withTempDir$1$adapted(SQLTestUtils.scala:82)
> [info] at
> org.apache.spark.SparkFunSuite.withTempDir(SparkFunSuite.scala:245)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.org$apache$spark$sql$test$SQLTestUtils$$super$withTempDir(FileStreamSinkSuite.scala:50)
> [info] at
> org.apache.spark.sql.test.SQLTestUtils.withTempDir(SQLTestUtils.scala:82)
> [info] at
> org.apache.spark.sql.test.SQLTestUtils.withTempDir$(SQLTestUtils.scala:81)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.withTempDir(FileStreamSinkSuite.scala:50)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.$anonfun$new$51(FileStreamSinkSuite.scala:505)
> [info] at
> scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
> [info] at
> org.apache.spark.sql.catalyst.SQLConfHelper.withSQLConf(SQLConfHelper.scala:56)
> [info] at
> org.apache.spark.sql.catalyst.SQLConfHelper.withSQLConf$(SQLConfHelper.scala:38)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(FileStreamSinkSuite.scala:50)
> [info] at
> org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:301)
> [info] at
> org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:297)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.withSQLConf(FileStreamSinkSuite.scala:50)
> [info] at
> org.apache.spark.sql.streaming.FileStreamSinkSuite.$anonfun$new$50(FileStreamSinkSuite.scala:505)
> [info] at
> scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
> [info] at
> org.apache.spark.sql.catalyst.util.package$.quietly(package.scala:43)
> [info] at
> org.apache.spark.sql.test.SQLTestUtils.$anonfun$testQuietly$1(SQLTestUtils.scala:119)
> [info] at
> scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
> [info] at org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
> [info] at
> org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)
> [info] at
> org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
> [info] at
> org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
> [info] at org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
> [info] at
> org.apache.spark.SparkFunSuite.$anonfun$test$2(SparkFunSuite.scala:155)
> [info] at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
> [info] at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
> [info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
> [info] at org.scalatest.Transformer.apply(Transformer.scala:22)
> [info] at org.scalatest.Transformer.apply(Transformer.scala:20)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
> [info] at
> org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:227)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
> [info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)
> [info] at
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:69)
> [info] at
> org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
> [info] at
> org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
> [info] at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:69)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
> [info] at
> org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
> [info] at scala.collection.immutable.List.foreach(List.scala:334)
> [info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
> [info] at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
> [info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268)
> [info] at
> org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564)
> [info] at org.scalatest.Suite.run(Suite.scala:1114)
> [info] at org.scalatest.Suite.run$(Suite.scala:1096)
> [info] at
> org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
> [info] at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273)
> [info] at
> org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272)
> [info] at
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:69)
> [info] at
> org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
> [info] at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
> [info] at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
> [info] at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:69)
> [info] at
> org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321)
> [info] at
> org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517)
> [info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:414)
> [info] at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
> [info] at
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
> [info] at
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
> [info] at java.base/java.lang.Thread.run(Thread.java:840) {code}
--
This message was sent by Atlassian Jira
(v8.20.10#820010)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]