This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 9208c42b3a11 [SPARK-46525][DOCKER][TESTS] Fix docker-integration-tests 
on Apple Sillicon
9208c42b3a11 is described below

commit 9208c42b3a110099d1cc0249b6be364aacff0f2a
Author: Kent Yao <y...@apache.org>
AuthorDate: Wed Jan 3 00:58:11 2024 -0800

    [SPARK-46525][DOCKER][TESTS] Fix docker-integration-tests on Apple Sillicon
    
    ### What changes were proposed in this pull request?
    
    `com.spotify.docker.client` is not going to support Apple Silicons as it 
has already been archived and the 
[jnr-unixsocket](https://mvnrepository.com/artifact/com.github.jnr/jnr-unixsocket)
 0.18 it uses is not compatible with Apple Silicons.
    
    If we run our docker IT tests on Apple Silicons, it will fail like
    
    ```java
    [info] org.apache.spark.sql.jdbc.MariaDBKrbIntegrationSuite *** ABORTED *** 
(2 seconds, 264 milliseconds)
    [info]   com.spotify.docker.client.exceptions.DockerException: 
java.util.concurrent.ExecutionException: 
com.spotify.docker.client.shaded.javax.ws.rs.ProcessingException: 
java.lang.UnsatisfiedLinkError: could not load FFI provider 
jnr.ffi.provider.jffi.Provider
    [info]   at 
com.spotify.docker.client.DefaultDockerClient.propagate(DefaultDockerClient.java:2828)
    [info]   at 
com.spotify.docker.client.DefaultDockerClient.request(DefaultDockerClient.java:2692)
    [info]   at 
com.spotify.docker.client.DefaultDockerClient.ping(DefaultDockerClient.java:574)
    [info]   at 
org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.$anonfun$beforeAll$1(DockerJDBCIntegrationSuite.scala:124)
    [info]   at 
org.apache.spark.sql.jdbc.DockerIntegrationFunSuite.runIfTestsEnabled(DockerIntegrationFunSuite.scala:49)
    [info]   at 
org.apache.spark.sql.jdbc.DockerIntegrationFunSuite.runIfTestsEnabled$(DockerIntegrationFunSuite.scala:47)
    [info]   at 
org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.runIfTestsEnabled(DockerJDBCIntegrationSuite.scala:95)
    [info]   at 
org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.beforeAll(DockerJDBCIntegrationSuite.scala:118)
    [info]   at 
org.apache.spark.sql.jdbc.DockerKrbJDBCIntegrationSuite.super$beforeAll(DockerKrbJDBCIntegrationSuite.scala:65)
    [info]   at 
org.apache.spark.sql.jdbc.DockerKrbJDBCIntegrationSuite.$anonfun$beforeAll$1(DockerKrbJDBCIntegrationSuite.scala:65)
    [info]   at 
org.apache.spark.sql.jdbc.DockerIntegrationFunSuite.runIfTestsEnabled(DockerIntegrationFunSuite.scala:49)
    [info]   at 
org.apache.spark.sql.jdbc.DockerIntegrationFunSuite.runIfTestsEnabled$(DockerIntegrationFunSuite.scala:47)
    [info]   at 
org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.runIfTestsEnabled(DockerJDBCIntegrationSuite.scala:95)
    [info]   at 
org.apache.spark.sql.jdbc.DockerKrbJDBCIntegrationSuite.beforeAll(DockerKrbJDBCIntegrationSuite.scala:44)
    [info]   at 
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
    [info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
    [info]   at 
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
    [info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:69)
    [info]   at 
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321)
    [info]   at 
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517)
    [info]   at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:414)
    [info]   at 
java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
    [info]   at 
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
    [info]   at 
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
    [info]   at java.base/java.lang.Thread.run(Thread.java:840)
    [info]   Cause: java.util.concurrent.ExecutionException: 
com.spotify.docker.client.shaded.javax.ws.rs.ProcessingException: 
java.lang.UnsatisfiedLinkError: could not load FFI provider 
jnr.ffi.provider.jffi.Provider
    [info]   at 
jersey.repackaged.com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:299)
    [info]   at 
jersey.repackaged.com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:286)
    [info]   at 
jersey.repackaged.com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
    [info]   at 
com.spotify.docker.client.DefaultDockerClient.request(DefaultDockerClient.java:2690)
    [info]   at 
com.spotify.docker.client.DefaultDockerClient.ping(DefaultDockerClient.java:574)
    [info]   at 
org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.$anonfun$beforeAll$1(DockerJDBCIntegrationSuite.scala:124)
    [info]   at 
org.apache.spark.sql.jdbc.DockerIntegrationFunSuite.runIfTestsEnabled(DockerIntegrationFunSuite.scala:49)
    [info]   at 
org.apache.spark.sql.jdbc.DockerIntegrationFunSuite.runIfTestsEnabled$(DockerIntegrationFunSuite.scala:47)
    [info]   at 
org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.runIfTestsEnabled(DockerJDBCIntegrationSuite.scala:95)
    [info]   at 
org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.beforeAll(DockerJDBCIntegrationSuite.scala:118)
    [info]   at 
org.apache.spark.sql.jdbc.DockerKrbJDBCIntegrationSuite.super$beforeAll(DockerKrbJDBCIntegrationSuite.scala:65)
    [info]   at 
org.apache.spark.sql.jdbc.DockerKrbJDBCIntegrationSuite.$anonfun$beforeAll$1(DockerKrbJDBCIntegrationSuite.scala:65)
    [info]   at 
org.apache.spark.sql.jdbc.DockerIntegrationFunSuite.runIfTestsEnabled(DockerIntegrationFunSuite.scala:49)
    [info]   at 
org.apache.spark.sql.jdbc.DockerIntegrationFunSuite.runIfTestsEnabled$(DockerIntegrationFunSuite.scala:47)
    [info]   at 
org.apache.spark.sql.jdbc.DockerJDBCIntegrationSuite.runIfTestsEnabled(DockerJDBCIntegrationSuite.scala:95)
    [info]   at 
org.apache.spark.sql.jdbc.DockerKrbJDBCIntegrationSuite.beforeAll(DockerKrbJDBCIntegrationSuite.scala:44)
    [info]   at 
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
    [info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
    [info]   at 
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
    [info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:69)
    [info]   at 
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:321)
    [info]   at 
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:517)
    [info]   at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:414)
    [info]   at 
java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
    [info]   at 
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
    [info]   at 
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
    [info]   at java.base/java.lang.Thread.run(Thread.java:840)
    [info]   Cause: 
com.spotify.docker.client.shaded.javax.ws.rs.ProcessingException: 
java.lang.UnsatisfiedLinkError: could not load FFI provider 
jnr.ffi.provider.jffi.Provider
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.client.ClientRuntime.processFailure(ClientRuntime.java:202)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.client.ClientRuntime.access$400(ClientRuntime.java:79)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.client.ClientRuntime$2.run(ClientRuntime.java:182)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors$1.call(Errors.java:271)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors$1.call(Errors.java:267)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors.process(Errors.java:315)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors.process(Errors.java:297)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors.process(Errors.java:267)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:340)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.client.ClientRuntime$3.run(ClientRuntime.java:210)
    [info]   at 
java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:539)
    [info]   at 
java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
    [info]   at 
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
    [info]   at 
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
    [info]   at java.base/java.lang.Thread.run(Thread.java:840)
    [info]   Cause: java.lang.UnsatisfiedLinkError: could not load FFI provider 
jnr.ffi.provider.jffi.Provider
    [info]   at 
jnr.ffi.provider.InvalidProvider$1.loadLibrary(InvalidProvider.java:48)
    [info]   at jnr.ffi.LibraryLoader.load(LibraryLoader.java:325)
    [info]   at jnr.unixsocket.Native.<clinit>(Native.java:80)
    [info]   at 
jnr.unixsocket.UnixSocketChannel.<init>(UnixSocketChannel.java:101)
    [info]   at jnr.unixsocket.UnixSocketChannel.open(UnixSocketChannel.java:60)
    [info]   at 
com.spotify.docker.client.UnixConnectionSocketFactory.createSocket(UnixConnectionSocketFactory.java:69)
    [info]   at 
com.spotify.docker.client.UnixConnectionSocketFactory.createSocket(UnixConnectionSocketFactory.java:44)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.conn.DefaultHttpClientConnectionOperator.connect(DefaultHttpClientConnectionOperator.java:118)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.conn.PoolingHttpClientConnectionManager.connect(PoolingHttpClientConnectionManager.java:353)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.execchain.MainClientExec.establishRoute(MainClientExec.java:380)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:236)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:184)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:88)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:110)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:184)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:71)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.apache.connector.ApacheConnector.apply(ApacheConnector.java:435)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.apache.connector.ApacheConnector$1.run(ApacheConnector.java:491)
    [info]   at 
java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:539)
    [info]   at 
java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
    [info]   at 
jersey.repackaged.com.google.common.util.concurrent.MoreExecutors$DirectExecutorService.execute(MoreExecutors.java:299)
    [info]   at 
java.base/java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:123)
    [info]   at 
jersey.repackaged.com.google.common.util.concurrent.AbstractListeningExecutorService.submit(AbstractListeningExecutorService.java:50)
    [info]   at 
jersey.repackaged.com.google.common.util.concurrent.AbstractListeningExecutorService.submit(AbstractListeningExecutorService.java:37)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.apache.connector.ApacheConnector.apply(ApacheConnector.java:487)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.client.ClientRuntime$2.run(ClientRuntime.java:178)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors$1.call(Errors.java:271)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors$1.call(Errors.java:267)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors.process(Errors.java:315)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors.process(Errors.java:297)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors.process(Errors.java:267)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:340)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.client.ClientRuntime$3.run(ClientRuntime.java:210)
    [info]   at 
java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:539)
    [info]   at 
java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
    [info]   at 
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
    [info]   at 
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
    [info]   at java.base/java.lang.Thread.run(Thread.java:840)
    [info]   Cause: java.lang.ExceptionInInitializerError:
    [info]   at 
jnr.ffi.provider.jffi.NativeRuntime.getInstance(NativeRuntime.java:58)
    [info]   at jnr.ffi.provider.jffi.Provider.<init>(Provider.java:29)
    [info]   at 
java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native
 Method)
    [info]   at 
java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:77)
    [info]   at 
java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    [info]   at 
java.base/java.lang.reflect.Constructor.newInstanceWithCaller(Constructor.java:499)
    [info]   at 
java.base/java.lang.reflect.ReflectAccess.newInstance(ReflectAccess.java:128)
    [info]   at 
java.base/jdk.internal.reflect.ReflectionFactory.newInstance(ReflectionFactory.java:347)
    [info]   at java.base/java.lang.Class.newInstance(Class.java:645)
    [info]   at 
jnr.ffi.provider.FFIProvider$SystemProviderSingletonHolder.getInstance(FFIProvider.java:68)
    [info]   at 
jnr.ffi.provider.FFIProvider$SystemProviderSingletonHolder.<clinit>(FFIProvider.java:57)
    [info]   at 
jnr.ffi.provider.FFIProvider.getSystemProvider(FFIProvider.java:35)
    [info]   at jnr.ffi.LibraryLoader.create(LibraryLoader.java:73)
    [info]   at jnr.unixsocket.Native.<clinit>(Native.java:76)
    [info]   at 
jnr.unixsocket.UnixSocketChannel.<init>(UnixSocketChannel.java:101)
    [info]   at jnr.unixsocket.UnixSocketChannel.open(UnixSocketChannel.java:60)
    [info]   at 
com.spotify.docker.client.UnixConnectionSocketFactory.createSocket(UnixConnectionSocketFactory.java:69)
    [info]   at 
com.spotify.docker.client.UnixConnectionSocketFactory.createSocket(UnixConnectionSocketFactory.java:44)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.conn.DefaultHttpClientConnectionOperator.connect(DefaultHttpClientConnectionOperator.java:118)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.conn.PoolingHttpClientConnectionManager.connect(PoolingHttpClientConnectionManager.java:353)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.execchain.MainClientExec.establishRoute(MainClientExec.java:380)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:236)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:184)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:88)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:110)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:184)
    [info]   at 
com.spotify.docker.client.shaded.org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:71)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.apache.connector.ApacheConnector.apply(ApacheConnector.java:435)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.apache.connector.ApacheConnector$1.run(ApacheConnector.java:491)
    [info]   at 
java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:539)
    [info]   at 
java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
    [info]   at 
jersey.repackaged.com.google.common.util.concurrent.MoreExecutors$DirectExecutorService.execute(MoreExecutors.java:299)
    [info]   at 
java.base/java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:123)
    [info]   at 
jersey.repackaged.com.google.common.util.concurrent.AbstractListeningExecutorService.submit(AbstractListeningExecutorService.java:50)
    [info]   at 
jersey.repackaged.com.google.common.util.concurrent.AbstractListeningExecutorService.submit(AbstractListeningExecutorService.java:37)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.apache.connector.ApacheConnector.apply(ApacheConnector.java:487)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.client.ClientRuntime$2.run(ClientRuntime.java:178)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors$1.call(Errors.java:271)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors$1.call(Errors.java:267)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors.process(Errors.java:315)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors.process(Errors.java:297)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.internal.Errors.process(Errors.java:267)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:340)
    [info]   at 
com.spotify.docker.client.shaded.org.glassfish.jersey.client.ClientRuntime$3.run(ClientRuntime.java:210)
    [info]   at 
java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:539)
    [info]   at 
java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
    [info]   at 
java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
    [info]   at 
java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
    [info]   at java.base/java.lang.Thread.run(Thread.java:840)
    [info]   Cause: java.lang.IllegalStateException: Can't overwrite cause with 
java.lang.UnsatisfiedLinkError: java.lang.UnsatisfiedLinkError: 
/Users/hzyaoqin/spark/target/tmp/jffi15403099445119552969.dylib: 
dlopen(/Users/hzyaoqin/spark/target/tmp/jffi15403099445119552969.dylib, 
0x0001): tried: 
'/Users/hzyaoqin/spark/target/tmp/jffi15403099445119552969.dylib' (fat file, 
but missing compatible architecture (have 'i386,x86_64', need 'arm64')), 
'/System/Volumes/Preboot/Cryptexes/OS/Users/hz [...]
    ```
    
    In this PR, we use its alternative to enable docker-related tests on Apple 
Chips
    
    ```xml
        <dependency>
          <groupId>com.github.docker-java</groupId>
          <artifactId>docker-java</artifactId>
          <scope>test</scope>
        </dependency>
    ```
    ### Why are the changes needed?
    
    For developers who use Apple Silicons, w/ this patch, they can test 
JDBC/Docker Integration test locally instead of suffering slowness from GitHub 
actions.
    
    ### Does this PR introduce _any_ user-facing change?
    
    dev only
    
    ### How was this patch tested?
    
    docker-integration-tests
    
    Note that, `[Oracle|DB2】IntegrationSuite` is still unavailable even with 
this patch because the upstream image‘s support for arm64 remains a blocker.
    
    FYI, 
https://github.com/gvenzl/oci-oracle-free?tab=readme-ov-file#oracle-database-free-on-apple-m-chips.
    ### Was this patch authored or co-authored using generative AI tooling?
    
    Closes #44509 from yaooqinn/SPARK-46525.
    
    Authored-by: Kent Yao <y...@apache.org>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 connector/docker-integration-tests/pom.xml         |  24 ++--
 .../spark/sql/jdbc/DB2KrbIntegrationSuite.scala    |  15 +--
 .../sql/jdbc/DockerJDBCIntegrationSuite.scala      | 130 ++++++++++++---------
 .../sql/jdbc/MariaDBKrbIntegrationSuite.scala      |  18 +--
 .../sql/jdbc/PostgresKrbIntegrationSuite.scala     |  16 +--
 pom.xml                                            |  25 ++--
 project/SparkBuild.scala                           |   3 +-
 7 files changed, 129 insertions(+), 102 deletions(-)

diff --git a/connector/docker-integration-tests/pom.xml 
b/connector/docker-integration-tests/pom.xml
index 3f73177d7dd4..4cca3ef12ae5 100644
--- a/connector/docker-integration-tests/pom.xml
+++ b/connector/docker-integration-tests/pom.xml
@@ -35,12 +35,6 @@
   </properties>
 
   <dependencies>
-    <dependency>
-      <groupId>com.spotify</groupId>
-      <artifactId>docker-client</artifactId>
-      <scope>test</scope>
-      <classifier>shaded</classifier>
-    </dependency>
     <dependency>
       <groupId>org.apache.httpcomponents</groupId>
       <artifactId>httpclient</artifactId>
@@ -101,14 +95,6 @@
       <artifactId>hadoop-minikdc</artifactId>
       <scope>test</scope>
     </dependency>
-    <!-- Although SPARK-28737 upgraded Jersey to 2.29 for JDK11, 
'com.spotify.docker-client' still
-      uses this repackaged 'jersey-guava'. We add this back for JDK8/JDK11 
testing. -->
-    <dependency>
-      <groupId>org.glassfish.jersey.bundles.repackaged</groupId>
-      <artifactId>jersey-guava</artifactId>
-      <version>2.25.1</version>
-      <scope>test</scope>
-    </dependency>
     <dependency>
       <groupId>org.mariadb.jdbc</groupId>
       <artifactId>mariadb-java-client</artifactId>
@@ -139,5 +125,15 @@
       <artifactId>mysql-connector-j</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>com.github.docker-java</groupId>
+      <artifactId>docker-java</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.github.docker-java</groupId>
+      <artifactId>docker-java-transport-zerodep</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 </project>
diff --git 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala
 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala
index 9b518d61d252..66e2afbb6eff 100644
--- 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala
+++ 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DB2KrbIntegrationSuite.scala
@@ -21,7 +21,7 @@ import java.security.PrivilegedExceptionAction
 import java.sql.Connection
 import javax.security.auth.login.Configuration
 
-import com.spotify.docker.client.messages.{ContainerConfig, HostConfig}
+import com.github.dockerjava.api.model.{AccessMode, Bind, ContainerConfig, 
HostConfig, Volume}
 import org.apache.hadoop.security.{SecurityUtil, UserGroupInformation}
 import 
org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod.KERBEROS
 import org.scalatest.time.SpanSugar._
@@ -66,14 +66,15 @@ class DB2KrbIntegrationSuite extends 
DockerKrbJDBCIntegrationSuite {
     }
 
     override def beforeContainerStart(
-        hostConfigBuilder: HostConfig.Builder,
-        containerConfigBuilder: ContainerConfig.Builder): Unit = {
+        hostConfigBuilder: HostConfig,
+        containerConfigBuilder: ContainerConfig): Unit = {
       copyExecutableResource("db2_krb_setup.sh", initDbDir, replaceIp)
 
-      hostConfigBuilder.appendBinds(
-        HostConfig.Bind.from(initDbDir.getAbsolutePath)
-          .to("/var/custom").readOnly(true).build()
-      )
+      val newBind = new Bind(
+        initDbDir.getAbsolutePath,
+        new Volume("/var/custom"),
+        AccessMode.ro)
+      hostConfigBuilder.withBinds(hostConfigBuilder.getBinds :+ newBind: _*)
     }
   }
 
diff --git 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DockerJDBCIntegrationSuite.scala
 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DockerJDBCIntegrationSuite.scala
index bcad9ae874eb..fde228939dd4 100644
--- 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DockerJDBCIntegrationSuite.scala
+++ 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/DockerJDBCIntegrationSuite.scala
@@ -20,14 +20,18 @@ package org.apache.spark.sql.jdbc
 import java.net.ServerSocket
 import java.sql.{Connection, DriverManager}
 import java.util.Properties
+import java.util.concurrent.TimeUnit
 
 import scala.jdk.CollectionConverters._
 import scala.util.control.NonFatal
 
-import com.spotify.docker.client._
-import com.spotify.docker.client.DockerClient.{ListContainersParam, LogsParam}
-import com.spotify.docker.client.exceptions.ImageNotFoundException
-import com.spotify.docker.client.messages.{ContainerConfig, HostConfig, 
PortBinding}
+import com.github.dockerjava.api.DockerClient
+import com.github.dockerjava.api.async.{ResultCallback, ResultCallbackTemplate}
+import com.github.dockerjava.api.command.CreateContainerResponse
+import com.github.dockerjava.api.exception.NotFoundException
+import com.github.dockerjava.api.model._
+import com.github.dockerjava.core.{DefaultDockerClientConfig, DockerClientImpl}
+import com.github.dockerjava.zerodep.ZerodepDockerHttpClient
 import org.scalatest.concurrent.Eventually
 import org.scalatest.time.SpanSugar._
 
@@ -88,8 +92,8 @@ abstract class DatabaseOnDocker {
    * Optional step before container starts
    */
   def beforeContainerStart(
-      hostConfigBuilder: HostConfig.Builder,
-      containerConfigBuilder: ContainerConfig.Builder): Unit = {}
+      hostConfigBuilder: HostConfig,
+      containerConfigBuilder: ContainerConfig): Unit = {}
 }
 
 abstract class DockerJDBCIntegrationSuite
@@ -111,56 +115,75 @@ abstract class DockerJDBCIntegrationSuite
     sock.close()
     port
   }
-  private var containerId: String = _
+  private var container: CreateContainerResponse = _
   private var pulled: Boolean = false
   protected var jdbcUrl: String = _
 
   override def beforeAll(): Unit = runIfTestsEnabled(s"Prepare for 
${this.getClass.getName}") {
     super.beforeAll()
     try {
-      docker = DefaultDockerClient.fromEnv.build()
+      val config = DefaultDockerClientConfig.createDefaultConfigBuilder.build
+      val httpClient = new ZerodepDockerHttpClient.Builder()
+        .dockerHost(config.getDockerHost)
+        .sslConfig(config.getSSLConfig)
+        .build()
+      docker = DockerClientImpl.getInstance(config, httpClient)
       // Check that Docker is actually up
       try {
-        docker.ping()
+        docker.pingCmd().exec()
       } catch {
         case NonFatal(e) =>
           log.error("Exception while connecting to Docker. Check whether 
Docker is running.")
           throw e
       }
-      // Ensure that the Docker image is installed:
       try {
-        docker.inspectImage(db.imageName)
+        // Ensure that the Docker image is installed:
+        docker.inspectImageCmd(db.imageName).exec()
       } catch {
-        case e: ImageNotFoundException =>
+        case e: NotFoundException =>
           log.warn(s"Docker image ${db.imageName} not found; pulling image 
from registry")
-          docker.pull(db.imageName)
+          docker.pullImageCmd(db.imageName)
+            .start()
+            .awaitCompletion(connectionTimeout.value.toSeconds, 
TimeUnit.SECONDS)
           pulled = true
       }
-      val hostConfigBuilder = HostConfig.builder()
-        .privileged(db.privileged)
-        .networkMode("bridge")
-        .ipcMode(if (db.usesIpc) "host" else "")
-        .portBindings(
-          Map(s"${db.jdbcPort}/tcp" -> List(PortBinding.of(dockerIp, 
externalPort)).asJava).asJava)
-      // Create the database container:
-      val containerConfigBuilder = ContainerConfig.builder()
-        .image(db.imageName)
-        .networkDisabled(false)
-        .env(db.env.map { case (k, v) => s"$k=$v" }.toSeq.asJava)
-        .exposedPorts(s"${db.jdbcPort}/tcp")
-      if (db.getEntryPoint.isDefined) {
-        containerConfigBuilder.entrypoint(db.getEntryPoint.get)
-      }
-      if (db.getStartupProcessName.isDefined) {
-        containerConfigBuilder.cmd(db.getStartupProcessName.get)
+
+      docker.pullImageCmd(db.imageName)
+        .start()
+        .awaitCompletion(connectionTimeout.value.toSeconds, TimeUnit.SECONDS)
+
+      val hostConfig = HostConfig
+        .newHostConfig()
+        .withNetworkMode("bridge")
+        .withPrivileged(db.privileged)
+        
.withPortBindings(PortBinding.parse(s"$dockerIp:$externalPort:${db.jdbcPort}"))
+
+      if (db.usesIpc) {
+        hostConfig.withIpcMode("host")
       }
-      db.beforeContainerStart(hostConfigBuilder, containerConfigBuilder)
-      containerConfigBuilder.hostConfig(hostConfigBuilder.build())
-      val config = containerConfigBuilder.build()
+
+      val containerConfig = new ContainerConfig()
+
+      db.beforeContainerStart(hostConfig, containerConfig)
+
       // Create the database container:
-      containerId = docker.createContainer(config).id
+      val createContainerCmd = docker.createContainerCmd(db.imageName)
+        .withHostConfig(hostConfig)
+        .withExposedPorts(ExposedPort.tcp(db.jdbcPort))
+        .withEnv(db.env.map { case (k, v) => s"$k=$v" }.toList.asJava)
+        .withNetworkDisabled(false)
+
+
+      db.getEntryPoint.foreach(ep => createContainerCmd.withEntrypoint(ep))
+      db.getStartupProcessName.foreach(n => createContainerCmd.withCmd(n))
+
+      container = createContainerCmd.exec()
       // Start the container and wait until the database can accept JDBC 
connections:
-      docker.startContainer(containerId)
+      docker.startContainerCmd(container.getId).exec()
+      eventually(connectionTimeout, interval(1.second)) {
+        val response = docker.inspectContainerCmd(container.getId).exec()
+        assert(response.getState.getRunning)
+      }
       jdbcUrl = db.getJdbcUrl(dockerIp, externalPort)
       var conn: Connection = null
       eventually(connectionTimeout, interval(1.second)) {
@@ -206,36 +229,35 @@ abstract class DockerJDBCIntegrationSuite
   def dataPreparation(connection: Connection): Unit
 
   private def cleanupContainer(): Unit = {
-    if (docker != null && containerId != null && !keepContainer) {
+    if (docker != null && container != null && !keepContainer) {
       try {
-        docker.killContainer(containerId)
+        docker.killContainerCmd(container.getId).exec()
       } catch {
         case NonFatal(e) =>
-          val exitContainerIds =
-            
docker.listContainers(ListContainersParam.withStatusExited()).asScala.map(_.id())
-          if (exitContainerIds.contains(containerId)) {
-            logWarning(s"Container $containerId already stopped")
-          } else {
-            logWarning(s"Could not stop container $containerId", e)
-          }
+          val response = docker.inspectContainerCmd(container.getId).exec()
+          logWarning(s"Container $container already stopped")
+          val status = 
Option(response).map(_.getState.getStatus).getOrElse("unknown")
+          logWarning(s"Could not stop container $container at stage 
'$status'", e)
       } finally {
         logContainerOutput()
-        docker.removeContainer(containerId)
+        docker.removeContainerCmd(container.getId).exec()
         if (removePulledImage && pulled) {
-          docker.removeImage(db.imageName)
+          docker.removeImageCmd(db.imageName).exec()
         }
       }
     }
   }
 
   private def logContainerOutput(): Unit = {
-    val logStream = docker.logs(containerId, LogsParam.stdout(), 
LogsParam.stderr())
-    try {
-      logInfo("\n\n===== CONTAINER LOGS FOR container Id: " + containerId + " 
=====")
-      logInfo(logStream.readFully())
-      logInfo("\n\n===== END OF CONTAINER LOGS FOR container Id: " + 
containerId + " =====")
-    } finally {
-      logStream.close()
-    }
+    logInfo("\n\n===== CONTAINER LOGS FOR container Id: " + container + " 
=====")
+    docker.logContainerCmd(container.getId)
+      .withStdOut(true)
+      .withStdErr(true)
+      .withFollowStream(true)
+      .withSince(0).exec(
+      new ResultCallbackTemplate[ResultCallback[Frame], Frame] {
+        override def onNext(f: Frame): Unit = logInfo(f.toString)
+      })
+    logInfo("\n\n===== END OF CONTAINER LOGS FOR container Id: " + container + 
" =====")
   }
 }
diff --git 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MariaDBKrbIntegrationSuite.scala
 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MariaDBKrbIntegrationSuite.scala
index 873d5ad1ee43..49c9e3dba0d7 100644
--- 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MariaDBKrbIntegrationSuite.scala
+++ 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MariaDBKrbIntegrationSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.jdbc
 
 import javax.security.auth.login.Configuration
 
-import com.spotify.docker.client.messages.{ContainerConfig, HostConfig}
+import com.github.dockerjava.api.model.{AccessMode, Bind, ContainerConfig, 
HostConfig, Volume}
 
 import 
org.apache.spark.sql.execution.datasources.jdbc.connection.SecureConnectionProvider
 import org.apache.spark.tags.DockerTest
@@ -52,17 +52,17 @@ class MariaDBKrbIntegrationSuite extends 
DockerKrbJDBCIntegrationSuite {
       Some("/docker-entrypoint/mariadb_docker_entrypoint.sh")
 
     override def beforeContainerStart(
-        hostConfigBuilder: HostConfig.Builder,
-        containerConfigBuilder: ContainerConfig.Builder): Unit = {
+        hostConfigBuilder: HostConfig,
+        containerConfigBuilder: ContainerConfig): Unit = {
       copyExecutableResource("mariadb_docker_entrypoint.sh", entryPointDir, 
replaceIp)
       copyExecutableResource("mariadb_krb_setup.sh", initDbDir, replaceIp)
 
-      hostConfigBuilder.appendBinds(
-        HostConfig.Bind.from(entryPointDir.getAbsolutePath)
-          .to("/docker-entrypoint").readOnly(true).build(),
-        HostConfig.Bind.from(initDbDir.getAbsolutePath)
-          .to("/docker-entrypoint-initdb.d").readOnly(true).build()
-      )
+      val binds =
+        Seq(entryPointDir -> "/docker-entrypoint", initDbDir -> 
"/docker-entrypoint-initdb.d")
+          .map { case (from, to) =>
+            new Bind(from.getAbsolutePath, new Volume(to), AccessMode.ro)
+          }
+      hostConfigBuilder.withBinds(hostConfigBuilder.getBinds ++ binds: _*)
     }
   }
 
diff --git 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresKrbIntegrationSuite.scala
 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresKrbIntegrationSuite.scala
index 4debe24754de..baf24b3c1357 100644
--- 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresKrbIntegrationSuite.scala
+++ 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresKrbIntegrationSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.jdbc
 
 import javax.security.auth.login.Configuration
 
-import com.spotify.docker.client.messages.{ContainerConfig, HostConfig}
+import com.github.dockerjava.api.model.{AccessMode, Bind, ContainerConfig, 
HostConfig, Volume}
 
 import 
org.apache.spark.sql.execution.datasources.jdbc.connection.SecureConnectionProvider
 import org.apache.spark.tags.DockerTest
@@ -48,14 +48,14 @@ class PostgresKrbIntegrationSuite extends 
DockerKrbJDBCIntegrationSuite {
       s"jdbc:postgresql://$ip:$port/postgres?user=$principal&gsslib=gssapi"
 
     override def beforeContainerStart(
-        hostConfigBuilder: HostConfig.Builder,
-        containerConfigBuilder: ContainerConfig.Builder): Unit = {
+        hostConfigBuilder: HostConfig,
+        containerConfigBuilder: ContainerConfig): Unit = {
       copyExecutableResource("postgres_krb_setup.sh", initDbDir, replaceIp)
-
-      hostConfigBuilder.appendBinds(
-        HostConfig.Bind.from(initDbDir.getAbsolutePath)
-          .to("/docker-entrypoint-initdb.d").readOnly(true).build()
-      )
+      val newBind = new Bind(
+        initDbDir.getAbsolutePath,
+        new Volume("/docker-entrypoint-initdb.d"),
+        AccessMode.ro)
+      hostConfigBuilder.withBinds(hostConfigBuilder.getBinds :+ newBind: _*)
     }
   }
 
diff --git a/pom.xml b/pom.xml
index 95a70fbf91cb..4ff006f80d79 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1238,22 +1238,31 @@
         <scope>test</scope>
       </dependency>
       <dependency>
-        <groupId>com.spotify</groupId>
-        <artifactId>docker-client</artifactId>
-        <version>8.14.1</version>
+        <groupId>com.github.docker-java</groupId>
+        <artifactId>docker-java</artifactId>
+        <version>3.3.4</version>
         <scope>test</scope>
-        <classifier>shaded</classifier>
         <exclusions>
-          <exclusion>
-            <artifactId>guava</artifactId>
-            <groupId>com.google.guava</groupId>
-          </exclusion>
           <exclusion>
             <groupId>commons-logging</groupId>
             <artifactId>commons-logging</artifactId>
           </exclusion>
+          <exclusion>
+            <groupId>com.github.docker-java</groupId>
+            <artifactId>docker-java-transport-netty</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>com.github.docker-java</groupId>
+            <artifactId>docker-java-transport-jersey</artifactId>
+          </exclusion>
         </exclusions>
       </dependency>
+      <dependency>
+        <groupId>com.github.docker-java</groupId>
+        <artifactId>docker-java-transport-zerodep</artifactId>
+        <version>3.3.4</version>
+        <scope>test</scope>
+      </dependency>
       <dependency>
         <groupId>com.mysql</groupId>
         <artifactId>mysql-connector-j</artifactId>
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index ac86aeee3d28..5523932ac0ad 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -415,8 +415,7 @@ object SparkBuild extends PomBuild {
   /* Protobuf settings */
   enable(SparkProtobuf.settings)(protobuf)
 
-  // SPARK-14738 - Remove docker tests from main Spark build
-  // enable(DockerIntegrationTests.settings)(dockerIntegrationTests)
+  enable(DockerIntegrationTests.settings)(dockerIntegrationTests)
 
   if (!profiles.contains("volcano")) {
     enable(Volcano.settings)(kubernetes)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org


Reply via email to