This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 07e22811bfcb [SPARK-47089][BUILD][TESTS] Migrate `mockito 4` to 
`mockito 5`
07e22811bfcb is described below

commit 07e22811bfcb57c5ccabdfd83976f3611a793eb7
Author: panbingkun <panbing...@baidu.com>
AuthorDate: Mon Feb 19 10:55:36 2024 -0800

    [SPARK-47089][BUILD][TESTS] Migrate `mockito 4` to `mockito 5`
    
    ### What changes were proposed in this pull request?
    The pr aims to migrate `mockito 4` to `mockito 5`.
    
    ### Why are the changes needed?
    This is the prerequisite behavior for upgrading `scalatest mockito` to 
version `3.2.18`.
    `Scalatest mockito 3.2.18` no longer supports `mockito 4`.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Pass GA.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #45158 from panbingkun/SPARK-47089.
    
    Authored-by: panbingkun <panbing...@baidu.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 mllib-local/pom.xml                                |  5 -----
 mllib/pom.xml                                      |  5 -----
 pom.xml                                            | 16 +++++-----------
 resource-managers/kubernetes/core/pom.xml          | 11 ++++++++++-
 .../spark/deploy/k8s/submit/ClientSuite.scala      | 12 ++++++------
 .../cluster/k8s/ExecutorPodsAllocatorSuite.scala   | 22 ++++++++++++----------
 sql/core/pom.xml                                   |  5 -----
 7 files changed, 33 insertions(+), 43 deletions(-)

diff --git a/mllib-local/pom.xml b/mllib-local/pom.xml
index 408aec1ff276..ecfe45f046f2 100644
--- a/mllib-local/pom.xml
+++ b/mllib-local/pom.xml
@@ -52,11 +52,6 @@
       <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-inline</artifactId>
-      <scope>test</scope>
-    </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
       <artifactId>spark-tags_${scala.binary.version}</artifactId>
diff --git a/mllib/pom.xml b/mllib/pom.xml
index 88400e7ba6ac..49a53e9a994f 100644
--- a/mllib/pom.xml
+++ b/mllib/pom.xml
@@ -113,11 +113,6 @@
       <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-inline</artifactId>
-      <scope>test</scope>
-    </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
       <artifactId>spark-streaming_${scala.binary.version}</artifactId>
diff --git a/pom.xml b/pom.xml
index 410cefb0e8d9..427a68f29e14 100644
--- a/pom.xml
+++ b/pom.xml
@@ -412,7 +412,7 @@
     </dependency>
     <dependency>
       <groupId>org.scalatestplus</groupId>
-      <artifactId>mockito-4-11_${scala.binary.version}</artifactId>
+      <artifactId>mockito-5-8_${scala.binary.version}</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
@@ -1156,7 +1156,7 @@
       </dependency>
       <dependency>
         <groupId>org.scalatestplus</groupId>
-        <artifactId>mockito-4-11_${scala.binary.version}</artifactId>
+        <artifactId>mockito-5-8_${scala.binary.version}</artifactId>
         <version>3.2.17.0</version>
         <scope>test</scope>
       </dependency>
@@ -1175,25 +1175,19 @@
       <dependency>
         <groupId>org.mockito</groupId>
         <artifactId>mockito-core</artifactId>
-        <version>4.11.0</version>
-        <scope>test</scope>
-      </dependency>
-      <dependency>
-        <groupId>org.mockito</groupId>
-        <artifactId>mockito-inline</artifactId>
-        <version>4.11.0</version>
+        <version>5.8.0</version>
         <scope>test</scope>
       </dependency>
       <dependency>
         <groupId>net.bytebuddy</groupId>
         <artifactId>byte-buddy</artifactId>
-        <version>1.14.5</version>
+        <version>1.14.10</version>
         <scope>test</scope>
       </dependency>
       <dependency>
         <groupId>net.bytebuddy</groupId>
         <artifactId>byte-buddy-agent</artifactId>
-        <version>1.14.5</version>
+        <version>1.14.10</version>
         <scope>test</scope>
       </dependency>
       <dependency>
diff --git a/resource-managers/kubernetes/core/pom.xml 
b/resource-managers/kubernetes/core/pom.xml
index afd42f6a82a0..fa0fd454ccc4 100644
--- a/resource-managers/kubernetes/core/pom.xml
+++ b/resource-managers/kubernetes/core/pom.xml
@@ -120,7 +120,16 @@
       <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
-
+    <dependency>
+      <groupId>net.bytebuddy</groupId>
+      <artifactId>byte-buddy</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>net.bytebuddy</groupId>
+      <artifactId>byte-buddy-agent</artifactId>
+      <scope>test</scope>
+    </dependency>
 
     <dependency>
       <groupId>org.jmock</groupId>
diff --git 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala
 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala
index de5244be95e2..c98654d6c02a 100644
--- 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala
+++ 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/ClientSuite.scala
@@ -167,7 +167,7 @@ class ClientSuite extends SparkFunSuite with BeforeAndAfter 
{
 
   private var kconf: KubernetesDriverConf = _
   private var createdPodArgumentCaptor: ArgumentCaptor[Pod] = _
-  private var createdResourcesArgumentCaptor: ArgumentCaptor[HasMetadata] = _
+  private var createdResourcesArgumentCaptor: 
ArgumentCaptor[Array[HasMetadata]] = _
 
   before {
     MockitoAnnotations.openMocks(this).close()
@@ -179,7 +179,7 @@ class ClientSuite extends SparkFunSuite with BeforeAndAfter 
{
     when(podsWithNamespace.withName(POD_NAME)).thenReturn(namedPods)
 
     createdPodArgumentCaptor = ArgumentCaptor.forClass(classOf[Pod])
-    createdResourcesArgumentCaptor = 
ArgumentCaptor.forClass(classOf[HasMetadata])
+    createdResourcesArgumentCaptor = 
ArgumentCaptor.forClass(classOf[Array[HasMetadata]])
     when(podsWithNamespace.resource(fullExpectedPod())).thenReturn(namedPods)
     when(resourceList.forceConflicts()).thenReturn(resourceList)
     when(namedPods.serverSideApply()).thenReturn(podWithOwnerReference())
@@ -189,7 +189,7 @@ class ClientSuite extends SparkFunSuite with BeforeAndAfter 
{
     when(loggingPodStatusWatcher.watchOrStop(sId)).thenReturn(true)
     doReturn(resourceList)
       .when(kubernetesClient)
-      .resourceList(createdResourcesArgumentCaptor.capture())
+      .resourceList(createdResourcesArgumentCaptor.capture(): _*)
   }
 
   test("The client should configure the pod using the builder.") {
@@ -210,7 +210,7 @@ class ClientSuite extends SparkFunSuite with BeforeAndAfter 
{
       kubernetesClient,
       loggingPodStatusWatcher)
     submissionClient.run()
-    val otherCreatedResources = createdResourcesArgumentCaptor.getAllValues
+    val otherCreatedResources = 
createdResourcesArgumentCaptor.getAllValues.asScala.flatten
     assert(otherCreatedResources.size === 2)
     val secrets = 
otherCreatedResources.toArray.filter(_.isInstanceOf[Secret]).toSeq
     assert(secrets === ADDITIONAL_RESOURCES_WITH_OWNER_REFERENCES)
@@ -246,7 +246,7 @@ class ClientSuite extends SparkFunSuite with BeforeAndAfter 
{
       kubernetesClient,
       loggingPodStatusWatcher)
     submissionClient.run()
-    val otherCreatedResources = createdResourcesArgumentCaptor.getAllValues
+    val otherCreatedResources = 
createdResourcesArgumentCaptor.getAllValues.asScala.flatten
 
     // 2 for pre-resource creation/update, 1 for resource creation, 1 for 
config map
     assert(otherCreatedResources.size === 4)
@@ -326,7 +326,7 @@ class ClientSuite extends SparkFunSuite with BeforeAndAfter 
{
       kubernetesClient,
       loggingPodStatusWatcher)
     submissionClient.run()
-    val otherCreatedResources = createdResourcesArgumentCaptor.getAllValues
+    val otherCreatedResources = 
createdResourcesArgumentCaptor.getAllValues.asScala.flatten
 
     val configMaps = otherCreatedResources.toArray
       .filter(_.isInstanceOf[ConfigMap]).map(_.asInstanceOf[ConfigMap])
diff --git 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocatorSuite.scala
 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocatorSuite.scala
index eb9246a2a5f2..f1ef96b942fe 100644
--- 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocatorSuite.scala
+++ 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocatorSuite.scala
@@ -124,10 +124,12 @@ class ExecutorPodsAllocatorSuite extends SparkFunSuite 
with BeforeAndAfter {
     
when(podsWithNamespace.withName(driverPodName)).thenReturn(driverPodOperations)
     when(podsWithNamespace.resource(any())).thenReturn(podResource)
     when(podsWithNamespace.withLabel(anyString(), 
anyString())).thenReturn(labeledPods)
-    when(podsWithNamespace.withLabelIn(anyString(), 
any())).thenReturn(labeledPods)
+    when(podsWithNamespace.withLabelIn(
+      anyString(), any(classOf[Array[String]]): _*)).thenReturn(labeledPods)
     when(podsWithNamespace.withField(anyString(), 
anyString())).thenReturn(labeledPods)
     when(labeledPods.withLabel(anyString(), 
anyString())).thenReturn(labeledPods)
-    when(labeledPods.withLabelIn(anyString(), any())).thenReturn(labeledPods)
+    when(labeledPods.withLabelIn(
+      anyString(), any(classOf[Array[String]]): _*)).thenReturn(labeledPods)
     when(labeledPods.withField(anyString(), 
anyString())).thenReturn(labeledPods)
     when(driverPodOperations.get).thenReturn(driverPod)
     when(driverPodOperations.waitUntilReady(any(), 
any())).thenReturn(driverPod)
@@ -212,7 +214,7 @@ class ExecutorPodsAllocatorSuite extends SparkFunSuite with 
BeforeAndAfter {
       .withLabel(SPARK_ROLE_LABEL, SPARK_POD_EXECUTOR_ROLE))
       .thenReturn(podOperations)
     when(podOperations
-      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any()))
+      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any(classOf[Array[String]]): 
_*))
       .thenReturn(podOperations)
 
     val startTime = Instant.now.toEpochMilli
@@ -362,7 +364,7 @@ class ExecutorPodsAllocatorSuite extends SparkFunSuite with 
BeforeAndAfter {
       .withLabel(SPARK_ROLE_LABEL, SPARK_POD_EXECUTOR_ROLE))
       .thenReturn(labeledPods)
     when(labeledPods
-      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any()))
+      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any(classOf[Array[String]]): 
_*))
       .thenReturn(labeledPods)
 
     val startTime = Instant.now.toEpochMilli
@@ -432,7 +434,7 @@ class ExecutorPodsAllocatorSuite extends SparkFunSuite with 
BeforeAndAfter {
       .withLabel(SPARK_ROLE_LABEL, SPARK_POD_EXECUTOR_ROLE))
       .thenReturn(labeledPods)
     when(labeledPods
-      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any()))
+      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any(classOf[Array[String]]): 
_*))
       .thenReturn(labeledPods)
 
     val startTime = Instant.now.toEpochMilli
@@ -477,7 +479,7 @@ class ExecutorPodsAllocatorSuite extends SparkFunSuite with 
BeforeAndAfter {
       .withLabel(SPARK_ROLE_LABEL, SPARK_POD_EXECUTOR_ROLE))
       .thenReturn(labeledPods)
     when(labeledPods
-      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any()))
+      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any(classOf[Array[String]]): 
_*))
       .thenReturn(labeledPods)
 
     val startTime = Instant.now.toEpochMilli
@@ -560,7 +562,7 @@ class ExecutorPodsAllocatorSuite extends SparkFunSuite with 
BeforeAndAfter {
       .withLabel(SPARK_ROLE_LABEL, SPARK_POD_EXECUTOR_ROLE))
       .thenReturn(labeledPods)
     when(labeledPods
-      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any()))
+      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any(classOf[Array[String]]): 
_*))
       .thenReturn(labeledPods)
 
     val startTime = Instant.now.toEpochMilli
@@ -642,7 +644,7 @@ class ExecutorPodsAllocatorSuite extends SparkFunSuite with 
BeforeAndAfter {
       .withLabel(SPARK_ROLE_LABEL, SPARK_POD_EXECUTOR_ROLE))
       .thenReturn(labeledPods)
     when(labeledPods
-      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any()))
+      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any(classOf[Array[String]]): 
_*))
       .thenReturn(labeledPods)
 
     val startTime = Instant.now.toEpochMilli
@@ -794,7 +796,7 @@ class ExecutorPodsAllocatorSuite extends SparkFunSuite with 
BeforeAndAfter {
       .withLabel(SPARK_ROLE_LABEL, SPARK_POD_EXECUTOR_ROLE))
       .thenReturn(labeledPods)
     when(labeledPods
-      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any()))
+      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any(classOf[Array[String]]): 
_*))
       .thenReturn(labeledPods)
 
     val startTime = Instant.now.toEpochMilli
@@ -890,7 +892,7 @@ class ExecutorPodsAllocatorSuite extends SparkFunSuite with 
BeforeAndAfter {
       .withLabel(SPARK_ROLE_LABEL, SPARK_POD_EXECUTOR_ROLE))
       .thenReturn(labeledPods)
     when(labeledPods
-      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any()))
+      .withLabelIn(meq(SPARK_EXECUTOR_ID_LABEL), any(classOf[Array[String]]): 
_*))
       .thenReturn(labeledPods)
 
     val startTime = Instant.now.toEpochMilli
diff --git a/sql/core/pom.xml b/sql/core/pom.xml
index 1ef41f9b43da..8b1b51352a20 100644
--- a/sql/core/pom.xml
+++ b/sql/core/pom.xml
@@ -213,11 +213,6 @@
       <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-inline</artifactId>
-      <scope>test</scope>
-    </dependency>
     <dependency>
       <groupId>org.seleniumhq.selenium</groupId>
       <artifactId>selenium-java</artifactId>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to