This is an automated email from the ASF dual-hosted git repository. gurwls223 pushed a commit to branch branch-3.4 in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.4 by this push: new 0bb719d338a [SPARK-42515][BUILD][CONNECT][TESTS] Make `write table` in `ClientE2ETestSuite` sbt local test pass 0bb719d338a is described below commit 0bb719d338a4c797f68459149ffd98911f48e26e Author: yangjie01 <yangji...@baidu.com> AuthorDate: Tue Feb 28 12:58:26 2023 +0900 [SPARK-42515][BUILD][CONNECT][TESTS] Make `write table` in `ClientE2ETestSuite` sbt local test pass This pr use `LocalProject("assembly") / Compile / Keys.package` instead of `buildTestDeps` to ensure `${SPARK_HOME}/assembly/target/scala-$SPARK_SCALA_VERSION/jars` is available for testing for `connect-client-jvm` module. On the other hand, this pr also add similar options support for `testOnly` Make `write table` in `ClientE2ETestSuite` sbt local test pass No - Pass GitHub Actions - Manual test: run `test` ``` build/sbt clean "connect-client-jvm/test" ``` **Before** ``` [info] - write table *** FAILED *** (34 milliseconds) [info] io.grpc.StatusRuntimeException: UNKNOWN: org/apache/parquet/hadoop/api/ReadSupport [info] at io.grpc.Status.asRuntimeException(Status.java:535) [info] at io.grpc.stub.ClientCalls$BlockingResponseStream.hasNext(ClientCalls.java:660) [info] at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:45) [info] at scala.collection.Iterator.foreach(Iterator.scala:943) [info] at scala.collection.Iterator.foreach$(Iterator.scala:943) [info] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) [info] at org.apache.spark.sql.SparkSession.execute(SparkSession.scala:169) [info] at org.apache.spark.sql.DataFrameWriter.executeWriteOperation(DataFrameWriter.scala:255) [info] at org.apache.spark.sql.DataFrameWriter.saveAsTable(DataFrameWriter.scala:338) [info] at org.apache.spark.sql.ClientE2ETestSuite.$anonfun$new$13(ClientE2ETestSuite.scala:145) [info] at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) [info] at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1520) [info] at org.apache.spark.sql.connect.client.util.RemoteSparkSession.withTable(RemoteSparkSession.scala:169) [info] at org.apache.spark.sql.connect.client.util.RemoteSparkSession.withTable$(RemoteSparkSession.scala:167) [info] at org.apache.spark.sql.ClientE2ETestSuite.withTable(ClientE2ETestSuite.scala:33) [info] at org.apache.spark.sql.ClientE2ETestSuite.$anonfun$new$12(ClientE2ETestSuite.scala:143) [info] at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) [info] at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) [info] at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) [info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) [info] at org.scalatest.Transformer.apply(Transformer.scala:22) [info] at org.scalatest.Transformer.apply(Transformer.scala:20) [info] at org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226) [info] at org.scalatest.TestSuite.withFixture(TestSuite.scala:196) [info] at org.scalatest.TestSuite.withFixture$(TestSuite.scala:195) [info] at org.scalatest.funsuite.AnyFunSuite.withFixture(AnyFunSuite.scala:1564) [info] at org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224) [info] at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236) [info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236) [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218) [info] at org.scalatest.funsuite.AnyFunSuite.runTest(AnyFunSuite.scala:1564) [info] at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269) [info] at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413) [info] at scala.collection.immutable.List.foreach(List.scala:431) [info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) [info] at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396) [info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475) [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269) [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268) [info] at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564) [info] at org.scalatest.Suite.run(Suite.scala:1114) [info] at org.scalatest.Suite.run$(Suite.scala:1096) [info] at org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564) [info] at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273) [info] at org.scalatest.SuperEngine.runImpl(Engine.scala:535) [info] at org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273) [info] at org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272) [info] at org.apache.spark.sql.ClientE2ETestSuite.org$scalatest$BeforeAndAfterAll$$super$run(ClientE2ETestSuite.scala:33) [info] at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213) [info] at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) [info] at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) [info] at org.apache.spark.sql.ClientE2ETestSuite.run(ClientE2ETestSuite.scala:33) [info] at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321) [info] at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517) [info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413) [info] at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) [info] at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [info] at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [info] at java.base/java.lang.Thread.run(Thread.java:833) Warning: Unable to serialize throwable of type io.grpc.StatusRuntimeException for TestFailed(Ordinal(0, 15),UNKNOWN: org/apache/parquet/hadoop/api/ReadSupport,ClientE2ETestSuite,org.apache.spark.sql.ClientE2ETestSuite,Some(org.apache.spark.sql.ClientE2ETestSuite),write table,write table,Vector(),Vector(),Some(io.grpc.StatusRuntimeException: UNKNOWN: org/apache/parquet/hadoop/api/ReadSupport),Some(34),Some(IndentedText(- write table,write table,0)),Some(SeeStackDepthException),Some(org [...] Warning: Unable to read from client, please check on client for futher details of the problem. [info] - writeTo with create and using *** FAILED *** (27 milliseconds) [info] io.grpc.StatusRuntimeException: UNKNOWN: org/apache/parquet/hadoop/api/ReadSupport [info] at io.grpc.Status.asRuntimeException(Status.java:535) [info] at io.grpc.stub.ClientCalls$BlockingResponseStream.hasNext(ClientCalls.java:660) [info] at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:45) [info] at scala.collection.Iterator.foreach(Iterator.scala:943) [info] at scala.collection.Iterator.foreach$(Iterator.scala:943) [info] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) [info] at org.apache.spark.sql.SparkSession.execute(SparkSession.scala:169) [info] at org.apache.spark.sql.DataFrameWriterV2.executeWriteOperation(DataFrameWriterV2.scala:160) [info] at org.apache.spark.sql.DataFrameWriterV2.create(DataFrameWriterV2.scala:81) [info] at org.apache.spark.sql.ClientE2ETestSuite.$anonfun$new$15(ClientE2ETestSuite.scala:162) [info] at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) [info] at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1520) [info] at org.apache.spark.sql.connect.client.util.RemoteSparkSession.withTable(RemoteSparkSession.scala:169) [info] at org.apache.spark.sql.connect.client.util.RemoteSparkSession.withTable$(RemoteSparkSession.scala:167) [info] at org.apache.spark.sql.ClientE2ETestSuite.withTable(ClientE2ETestSuite.scala:33) [info] at org.apache.spark.sql.ClientE2ETestSuite.$anonfun$new$14(ClientE2ETestSuite.scala:161) [info] at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) [info] at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) [info] at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) [info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) [info] at org.scalatest.Transformer.apply(Transformer.scala:22) [info] at org.scalatest.Transformer.apply(Transformer.scala:20) [info] at org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226) [info] at org.scalatest.TestSuite.withFixture(TestSuite.scala:196) [info] at org.scalatest.TestSuite.withFixture$(TestSuite.scala:195) [info] at org.scalatest.funsuite.AnyFunSuite.withFixture(AnyFunSuite.scala:1564) [info] at org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224) [info] at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236) [info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236) Warning: Unable to serialize throwable of type io.grpc.StatusRuntimeException for TestFailed(Ordinal(0, 17),UNKNOWN: org/apache/parquet/hadoop/api/ReadSupport,ClientE2ETestSuite,org.apache.spark.sql.ClientE2ETestSuite,Some(org.apache.spark.sql.ClientE2ETestSuite),writeTo with create and using,writeTo with create and using,Vector(),Vector(),Some(io.grpc.StatusRuntimeException: UNKNOWN: org/apache/parquet/hadoop/api/ReadSupport),Some(27),Some(IndentedText(- writeTo with create and using [...] [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218) [info] at org.scalatest.funsuite.AnyFunSuite.runTest(AnyFunSuite.scala:1564) [info] at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269) [info] at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413) [info] at scala.collection.immutable.List.foreach(List.scala:431) [info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) [info] at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396) [info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475) [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269) [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268) [info] at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564) [info] at org.scalatest.Suite.run(Suite.scala:1114) [info] at org.scalatest.Suite.run$(Suite.scala:1096) [info] at org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564) [info] at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273) [info] at org.scalatest.SuperEngine.runImpl(Engine.scala:535) [info] at org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273) [info] at org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272) [info] at org.apache.spark.sql.ClientE2ETestSuite.org$scalatest$BeforeAndAfterAll$$super$run(ClientE2ETestSuite.scala:33) [info] at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213) Warning: Unable to read from client, please check on client for futher details of the problem. [info] at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) [info] at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) [info] at org.apache.spark.sql.ClientE2ETestSuite.run(ClientE2ETestSuite.scala:33) [info] at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321) [info] at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517) [info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413) [info] at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) [info] at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [info] at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [info] at java.base/java.lang.Thread.run(Thread.java:833) [info] - writeTo with create and append *** FAILED *** (20 milliseconds) [info] io.grpc.StatusRuntimeException: UNKNOWN: org/apache/parquet/hadoop/api/ReadSupport [info] at io.grpc.Status.asRuntimeException(Status.java:535) [info] at io.grpc.stub.ClientCalls$BlockingResponseStream.hasNext(ClientCalls.java:660) [info] at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:45) [info] at scala.collection.Iterator.foreach(Iterator.scala:943) [info] at scala.collection.Iterator.foreach$(Iterator.scala:943) [info] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431) [info] at org.apache.spark.sql.SparkSession.execute(SparkSession.scala:169) [info] at org.apache.spark.sql.DataFrameWriterV2.executeWriteOperation(DataFrameWriterV2.scala:160) [info] at org.apache.spark.sql.DataFrameWriterV2.create(DataFrameWriterV2.scala:81) [info] at org.apache.spark.sql.ClientE2ETestSuite.$anonfun$new$17(ClientE2ETestSuite.scala:175) [info] at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) [info] at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1520) [info] at org.apache.spark.sql.connect.client.util.RemoteSparkSession.withTable(RemoteSparkSession.scala:169) [info] at org.apache.spark.sql.connect.client.util.RemoteSparkSession.withTable$(RemoteSparkSession.scala:167) [info] at org.apache.spark.sql.ClientE2ETestSuite.withTable(ClientE2ETestSuite.scala:33) [info] at org.apache.spark.sql.ClientE2ETestSuite.$anonfun$new$16(ClientE2ETestSuite.scala:174) [info] at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) [info] at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) [info] at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) [info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) [info] at org.scalatest.Transformer.apply(Transformer.scala:22) [info] at org.scalatest.Transformer.apply(Transformer.scala:20) [info] at org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226) [info] at org.scalatest.TestSuite.withFixture(TestSuite.scala:196) [info] at org.scalatest.TestSuite.withFixture$(TestSuite.scala:195) [info] at org.scalatest.funsuite.AnyFunSuite.withFixture(AnyFunSuite.scala:1564) [info] at org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224) [info] at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236) [info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236) [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218) [info] at org.scalatest.funsuite.AnyFunSuite.runTest(AnyFunSuite.scala:1564) [info] at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269) [info] at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413) [info] at scala.collection.immutable.List.foreach(List.scala:431) [info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) Warning: Unable to read from client, please check on client for futher details of the problem. [info] at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396) Warning: Unable to serialize throwable of type io.grpc.StatusRuntimeException for TestFailed(Ordinal(0, 19),UNKNOWN: org/apache/parquet/hadoop/api/ReadSupport,ClientE2ETestSuite,org.apache.spark.sql.ClientE2ETestSuite,Some(org.apache.spark.sql.ClientE2ETestSuite),writeTo with create and append,writeTo with create and append,Vector(),Vector(),Some(io.grpc.StatusRuntimeException: UNKNOWN: org/apache/parquet/hadoop/api/ReadSupport),Some(20),Some(IndentedText(- writeTo with create and app [...] [info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475) Fatal: Existing as unable to continue running tests, after 3 failing attempts to read event from server socket. [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269) [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268) [info] at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1564) [info] at org.scalatest.Suite.run(Suite.scala:1114) [info] at org.scalatest.Suite.run$(Suite.scala:1096) [info] at org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1564) [info] at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273) [info] at org.scalatest.SuperEngine.runImpl(Engine.scala:535) [info] at org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273) [info] at org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272) [info] at org.apache.spark.sql.ClientE2ETestSuite.org$scalatest$BeforeAndAfterAll$$super$run(ClientE2ETestSuite.scala:33) [info] at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213) [info] at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) [info] at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) [info] at org.apache.spark.sql.ClientE2ETestSuite.run(ClientE2ETestSuite.scala:33) [info] at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321) [info] at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517) [info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413) [info] at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) [info] at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136) [info] at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635) [info] at java.base/java.lang.Thread.run(Thread.java:833) ``` **After** ``` [info] Run completed in 12 seconds, 629 milliseconds. [info] Total number of tests run: 505 [info] Suites: completed 9, aborted 0 [info] Tests: succeeded 505, failed 0, canceled 0, ignored 0, pending 0 [info] All tests passed. ``` run `testOnly` ``` build/sbt clean "connect-client-jvm/testOnly *ClientE2ETestSuite" build/sbt clean "connect-client-jvm/testOnly *CompatibilitySuite" ``` **Before** ``` [info] org.apache.spark.sql.ClientE2ETestSuite *** ABORTED *** (27 milliseconds) [info] java.lang.AssertionError: assertion failed: Failed to find the jar inside folder: /spark/connector/connect/server/target [info] at scala.Predef$.assert(Predef.scala:223) [info] at org.apache.spark.sql.connect.client.util.IntegrationTestUtils$.findJar(IntegrationTestUtils.scala:67) [info] at org.apache.spark.sql.connect.client.util.SparkConnectServerUtils$.sparkConnect$lzycompute(RemoteSparkSession.scala:64) [info] at org.apache.spark.sql.connect.client.util.SparkConnectServerUtils$.sparkConnect(RemoteSparkSession.scala:59) [info] at org.apache.spark.sql.connect.client.util.SparkConnectServerUtils$.start(RemoteSparkSession.scala:90) [info] at org.apache.spark.sql.connect.client.util.RemoteSparkSession.beforeAll(RemoteSparkSession.scala:120) [info] at org.apache.spark.sql.connect.client.util.RemoteSparkSession.beforeAll$(RemoteSparkSession.scala:118) [info] at org.apache.spark.sql.ClientE2ETestSuite.beforeAll(ClientE2ETestSuite.scala:33) [info] at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212) [info] at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) [info] at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) [info] at org.apache.spark.sql.ClientE2ETestSuite.run(ClientE2ETestSuite.scala:33) [info] at org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321) [info] at org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517) [info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413) [info] at java.util.concurrent.FutureTask.run(FutureTask.java:266) [info] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [info] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [info] at java.lang.Thread.run(Thread.java:750) [info] - compatibility MiMa tests *** FAILED *** (27 milliseconds) [info] java.lang.AssertionError: assertion failed: Failed to find the jar inside folder: /spark/connector/connect/client/jvm/target [info] at scala.Predef$.assert(Predef.scala:223) [info] at org.apache.spark.sql.connect.client.util.IntegrationTestUtils$.findJar(IntegrationTestUtils.scala:67) [info] at org.apache.spark.sql.connect.client.CompatibilitySuite.clientJar$lzycompute(CompatibilitySuite.scala:57) [info] at org.apache.spark.sql.connect.client.CompatibilitySuite.clientJar(CompatibilitySuite.scala:53) [info] at org.apache.spark.sql.connect.client.CompatibilitySuite.$anonfun$new$1(CompatibilitySuite.scala:69) [info] at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) [info] at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) [info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) [info] at org.scalatest.Transformer.apply(Transformer.scala:22) [info] at org.scalatest.Transformer.apply(Transformer.scala:20) [info] at org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226) [info] at org.scalatest.TestSuite.withFixture(TestSuite.scala:196) [info] at org.scalatest.TestSuite.withFixture$(TestSuite.scala:195) [info] at org.scalatest.funsuite.AnyFunSuite.withFixture(AnyFunSuite.scala:1564) [info] at org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224) [info] at org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236) [info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236) [info] at org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218) [info] at org.scalatest.funsuite.AnyFunSuite.runTest(AnyFunSuite.scala:1564) ``` **After** ``` [info] Run completed in 13 seconds, 572 milliseconds. [info] Total number of tests run: 17 [info] Suites: completed 1, aborted 0 [info] Tests: succeeded 17, failed 0, canceled 0, ignored 0, pending 0 [info] All tests passed [info] Run completed in 1 second, 578 milliseconds. [info] Total number of tests run: 2 [info] Suites: completed 1, aborted 0 [info] Tests: succeeded 2, failed 0, canceled 0, ignored 0, pending 0 [info] All tests passed. ``` Closes #40136 from LuciferYang/SPARK-42515. Authored-by: yangjie01 <yangji...@baidu.com> Signed-off-by: Hyukjin Kwon <gurwls...@apache.org> (cherry picked from commit 8c90342e71f04a3019f70e43d38d938f09e1b356) Signed-off-by: Hyukjin Kwon <gurwls...@apache.org> --- project/SparkBuild.scala | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 4b077f593fe..80081a68680 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -853,14 +853,16 @@ object SparkConnectClient { }, buildTestDeps := { - (LocalProject("sql") / Compile / Keys.`package`).value - (LocalProject("connect") / assembly).value - (LocalProject("connect-client-jvm") / assembly).value + (LocalProject("assembly") / Compile / Keys.`package`).value }, - // Make sure the connect server assembly jar is available for testing. + // SPARK-42538: Make sure the `${SPARK_HOME}/assembly/target/scala-$SPARK_SCALA_VERSION/jars` is available for testing. + // At the same time, the build of `connect`, `connect-client-jvm` and `sql` will be triggered by `assembly` build, + // so no additional configuration is required. test := ((Test / test) dependsOn (buildTestDeps)).value, + testOnly := ((Test / testOnly) dependsOn (buildTestDeps)).evaluated, + (assembly / test) := { }, (assembly / logLevel) := Level.Info, --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org