[ https://issues.apache.org/jira/browse/SPARK-50510?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Yang Jie resolved SPARK-50510. ------------------------------ Fix Version/s: 3.5.4 Resolution: Fixed Issue resolved by pull request 49127 [https://github.com/apache/spark/pull/49127] > [3.5] Sporadic ReattachableExecuteSuite failure > ----------------------------------------------- > > Key: SPARK-50510 > URL: https://issues.apache.org/jira/browse/SPARK-50510 > Project: Spark > Issue Type: Bug > Components: Connect > Affects Versions: 3.5.3 > Reporter: Changgyoo Park > Assignee: Changgyoo Park > Priority: Major > Labels: pull-request-available > Fix For: 3.5.4 > > > branch-3.5 daily test: > https://github.com/apache/spark/actions/runs/12178461881/job/33988670611 > branch-3.5 change pipeline: > https://github.com/apache/spark/actions/runs/11933977672/job/33262174405 > [info] - reattach after connection expired *** FAILED *** (222 milliseconds) > [info] "UNKNOWN" did not contain "INVALID_HANDLE.SESSION_NOT_FOUND" > (ReattachableExecuteSuite.scala:74) > [info] org.scalatest.exceptions.TestFailedException: > [info] at > org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:472) > [info] at > org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:471) > [info] at > org.scalatest.Assertions$.newAssertionFailedException(Assertions.scala:1231) > [info] at > org.scalatest.Assertions$AssertionsHelper.macroAssert(Assertions.scala:1295) > [info] at > org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.$anonfun$new$5(ReattachableExecuteSuite.scala:74) > [info] at > org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.$anonfun$new$5$adapted(ReattachableExecuteSuite.scala:67) > [info] at > org.apache.spark.sql.connect.SparkConnectServerTest.withRawBlockingStub(SparkConnectServerTest.scala:206) > [info] at > org.apache.spark.sql.connect.SparkConnectServerTest.withRawBlockingStub$(SparkConnectServerTest.scala:201) > [info] at > org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.withRawBlockingStub(ReattachableExecuteSuite.scala:30) > [info] at > org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.$anonfun$new$4(ReattachableExecuteSuite.scala:67) > [info] at > org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.$anonfun$new$4$adapted(ReattachableExecuteSuite.scala:60) > [info] at > org.apache.spark.sql.connect.SparkConnectServerTest.withClient(SparkConnectServerTest.scala:195) > [info] at > org.apache.spark.sql.connect.SparkConnectServerTest.withClient$(SparkConnectServerTest.scala:187) > [info] at > org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.withClient(ReattachableExecuteSuite.scala:30) > [info] at > org.apache.spark.sql.connect.execution.ReattachableExecuteSuite.$anonfun$new$3(ReattachableExecuteSuite.scala:60) > [info] at > scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) > [info] at org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127) > [info] at > org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282) > [info] at > org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231) > [info] at > org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230) > [info] at org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69) > [info] at > org.apache.spark.SparkFunSuite.$anonfun$test$2(SparkFunSuite.scala:155) > [info] at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) > [info] at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) > [info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) > [info] at org.scalatest.Transformer.apply(Transformer.scala:22) > [info] at org.scalatest.Transformer.apply(Transformer.scala:20) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226) > [info] at > org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:227) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236) > [info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218) > [info] at > org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:69) > [info] at > org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234) > [info] at > org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227) > [info] at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:69) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269) > [info] at > org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413) > [info] at scala.collection.immutable.List.foreach(List.scala:431) > [info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) > [info] at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396) > [info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268) > [info] at > org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564) > [info] at org.scalatest.Suite.run(Suite.scala:1114) > [info] at org.scalatest.Suite.run$(Suite.scala:1096) > [info] at > org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273) > [info] at org.scalatest.SuperEngine.runImpl(Engine.scala:535) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272) > [info] at > org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:69) > [info] at > org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213) > [info] at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) > [info] at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) > [info] at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:69) > [info] at > org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321) > [info] at > org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517) > [info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:414) > [info] at java.util.concurrent.FutureTask.run(FutureTask.java:266) > [info] at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > [info] at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > [info] at java.lang.Thread.run(Thread.java:750) > java.lang.IllegalStateException: > operationId: 594ad2ad-d569-49b2-8daa-c5721972f1b3 with status Analyzed > is not within statuses List(Finished, Failed, Canceled) for event > Closed > > at > org.apache.spark.sql.connect.service.ExecuteEventsManager.assertStatus(ExecuteEventsManager.scala:261) > at > org.apache.spark.sql.connect.service.ExecuteEventsManager.postClosed(ExecuteEventsManager.scala:229) > at > org.apache.spark.sql.connect.service.ExecuteHolder.$anonfun$close$1(ExecuteHolder.scala:240) > at > org.apache.spark.sql.connect.service.ExecuteHolder.$anonfun$close$1$adapted(ExecuteHolder.scala:234) > at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > at java.lang.Thread.run(Thread.java:750) -- This message was sent by Atlassian Jira (v8.20.10#820010) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org