This is an automated email from the ASF dual-hosted git repository.

hvanhovell pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 0e6446ac62d [SPARK-43285] Fix ReplE2ESuite consistently failing with 
JDK 17
0e6446ac62d is described below

commit 0e6446ac62d6f3f7568e6e35b42f8e395bf490dc
Author: vicennial <venkata.gud...@databricks.com>
AuthorDate: Tue Apr 25 15:10:57 2023 -0400

    [SPARK-43285] Fix ReplE2ESuite consistently failing with JDK 17
    
    ### What changes were proposed in this pull request?
    
    The timeout duration for the REPL has been increased from 10 -> 30 seconds 
(to address slow start on JDK 17 tests) and the semaphore permits are drained 
after each test (to avoid cascading failures, 
[context](https://github.com/apache/spark/pull/40675#discussion_r1174917132)).
    
    ### Why are the changes needed?
    
    The GA JDK 17 tests consistently fails as described in the [jira 
issue](https://issues.apache.org/jira/browse/SPARK-43285).
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Locally verified by installing and running tests with JDK 17 (both the 
failure and the subsequent fix).
    
    Closes #40948 from vicennial/SPARK-43285.
    
    Authored-by: vicennial <venkata.gud...@databricks.com>
    Signed-off-by: Herman van Hovell <her...@databricks.com>
---
 .../scala/org/apache/spark/sql/application/ReplE2ESuite.scala    | 9 +++++++--
 1 file changed, 7 insertions(+), 2 deletions(-)

diff --git 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/application/ReplE2ESuite.scala
 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/application/ReplE2ESuite.scala
index f0ec28a5a87..af920f8c314 100644
--- 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/application/ReplE2ESuite.scala
+++ 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/application/ReplE2ESuite.scala
@@ -20,13 +20,14 @@ import java.io.{PipedInputStream, PipedOutputStream}
 import java.util.concurrent.{Executors, Semaphore, TimeUnit}
 
 import org.apache.commons.io.output.ByteArrayOutputStream
+import org.scalatest.BeforeAndAfterEach
 
 import org.apache.spark.sql.connect.client.util.RemoteSparkSession
 
-class ReplE2ESuite extends RemoteSparkSession {
+class ReplE2ESuite extends RemoteSparkSession with BeforeAndAfterEach {
 
   private val executorService = Executors.newSingleThreadExecutor()
-  private val TIMEOUT_SECONDS = 10
+  private val TIMEOUT_SECONDS = 30
 
   private var testSuiteOut: PipedOutputStream = _
   private var ammoniteOut: ByteArrayOutputStream = _
@@ -68,6 +69,10 @@ class ReplE2ESuite extends RemoteSparkSession {
     super.afterAll()
   }
 
+  override def afterEach(): Unit = {
+    semaphore.drainPermits()
+  }
+
   def runCommandsInShell(input: String): String = {
     require(input.nonEmpty)
     // Pad the input with a semaphore release so that we know when the 
execution of the provided


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to