[ https://issues.apache.org/jira/browse/SPARK-39682?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Hyukjin Kwon updated SPARK-39682: --------------------------------- Component/s: SQL > Docker IT build broken in branch-3.2 with Scala 2.13 > ---------------------------------------------------- > > Key: SPARK-39682 > URL: https://issues.apache.org/jira/browse/SPARK-39682 > Project: Spark > Issue Type: Test > Components: Build, SQL, Tests > Affects Versions: 3.2.1 > Reporter: Hyukjin Kwon > Priority: Major > > https://github.com/apache/spark/runs/7189971505?check_suite_focus=true > {code} > [info] OracleIntegrationSuite: > [info] org.apache.spark.sql.jdbc.v2.OracleIntegrationSuite *** ABORTED *** (8 > minutes, 1 second) > [info] The code passed to eventually never returned normally. Attempted 426 > times over 7.008370057216667 minutes. Last failure message: IO Error: The > Network Adapter could not establish the connection. > (DockerJDBCIntegrationSuite.scala:166) > [info] org.scalatest.exceptions.TestFailedDueToTimeoutException: > [info] at > org.scalatest.enablers.Retrying$$anon$4.tryTryAgain$2(Retrying.scala:189) > [info] at org.scalatest.enablers.Retrying$$anon$4.retry(Retrying.scala:196) > [info] at > org.scalatest.concurrent.Eventually.eventually(Eventually.scala:313) > [info] at > org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:312) > [info] at > org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.eventually(DockerJDBCIntegrationSuite.scala:95) > [info] at > org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.$anonfun$beforeAll$1(DockerJDBCIntegrationSuite.scala:166) > [info] at > org.apache.spark.sql.jdbc.DockerIntegrationFunSuite.runIfTestsEnabled(DockerIntegrationFunSuite.scala:49) > [info] at > org.apache.spark.sql.jdbc.DockerIntegrationFunSuite.runIfTestsEnabled$(DockerIntegrationFunSuite.scala:47) > [info] at > org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.runIfTestsEnabled(DockerJDBCIntegrationSuite.scala:95) > [info] at > org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.beforeAll(DockerJDBCIntegrationSuite.scala:118) > [info] at > org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212) > [info] at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) > [info] at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) > [info] at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:62) > [info] at > org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:318) > [info] at > org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:513) > [info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413) > [info] at java.util.concurrent.FutureTask.run(FutureTask.java:266) > [info] at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > [info] at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > [info] at java.lang.Thread.run(Thread.java:750) > [info] Cause: java.sql.SQLRecoverableException: IO Error: The Network > Adapter could not establish the connection > [info] at oracle.jdbc.driver.T4CConnection.logon(T4CConnection.java:858) > [info] at > oracle.jdbc.driver.PhysicalConnection.connect(PhysicalConnection.java:793) > [info] at > oracle.jdbc.driver.T4CDriverExtension.getConnection(T4CDriverExtension.java:57) > [info] at oracle.jdbc.driver.OracleDriver.connect(OracleDriver.java:747) > [info] at oracle.jdbc.driver.OracleDriver.connect(OracleDriver.java:562) > [info] at java.sql.DriverManager.getConnection(DriverManager.java:664) > [info] at java.sql.DriverManager.getConnection(DriverManager.java:208) > [info] at > org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.getConnection(DockerJDBCIntegrationSuite.scala:200) > [info] at > org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.$anonfun$beforeAll$3(DockerJDBCIntegrationSuite.scala:167) > [info] at > scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) > [info] at > org.scalatest.enablers.Retrying$$anon$4.makeAValiantAttempt$1(Retrying.scala:154) > [info] at > org.scalatest.enablers.Retrying$$anon$4.tryTryAgain$2(Retrying.scala:166) > [info] at org.scalatest.enablers.Retrying$$anon$4.retry(Retrying.scala:196) > [info] at > org.scalatest.concurrent.Eventually.eventually(Eventually.scala:313) > [info] at > org.scalatest.concurrent.Eventually.eventually$(Eventually.scala:312) > [info] at > org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.eventually(DockerJDBCIntegrationSuite.scala:95) > [info] at > org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.$anonfun$beforeAll$1(DockerJDBCIntegrationSuite.scala:166) > [info] at > org.apache.spark.sql.jdbc.DockerIntegrationFunSuite.runIfTestsEnabled(DockerIntegrationFunSuite.scala:49) > [info] at > org.apache.spark.sql.jdbc.DockerIntegrationFunSuite.runIfTestsEnabled$(DockerIntegrationFunSuite.scala:47) > [info] at > org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.runIfTestsEnabled(DockerJDBCIntegrationSuite.scala:95) > [info] at > org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.beforeAll(DockerJDBCIntegrationSuite.scala:118) > [info] at > org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212) > [info] at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) > [info] at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) > [info] at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:62) > [info] at > org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:318) > [info] at > org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:513) > [info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413) > [info] at java.util.concurrent.FutureTask.run(FutureTask.java:266) > [info] at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > [info] at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > [info] at java.lang.Thread.run(Thread.java:750) > {code} -- This message was sent by Atlassian Jira (v8.20.10#820010) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org