Github user liyinan926 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19468#discussion_r152373927
  
    --- Diff: 
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/KubernetesClusterSchedulerBackendSuite.scala
 ---
    @@ -0,0 +1,439 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *    http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.spark.scheduler.cluster.k8s
    +
    +import java.util.concurrent.{ExecutorService, ScheduledExecutorService, 
TimeUnit}
    +
    +import io.fabric8.kubernetes.api.model.{DoneablePod, Pod, PodBuilder, 
PodList}
    +import io.fabric8.kubernetes.client.{KubernetesClient, Watch, Watcher}
    +import io.fabric8.kubernetes.client.Watcher.Action
    +import io.fabric8.kubernetes.client.dsl.{FilterWatchListDeletable, 
MixedOperation, NonNamespaceOperation, PodResource}
    +import org.mockito.{AdditionalAnswers, ArgumentCaptor, Mock, 
MockitoAnnotations}
    +import org.mockito.Matchers.{any, eq => mockitoEq}
    +import org.mockito.Mockito.{doNothing, never, times, verify, when}
    +import org.scalatest.BeforeAndAfter
    +import org.scalatest.mock.MockitoSugar._
    +import scala.collection.JavaConverters._
    +import scala.concurrent.Future
    +
    +import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
    +import org.apache.spark.deploy.k8s.config._
    +import org.apache.spark.deploy.k8s.constants._
    +import org.apache.spark.rpc._
    +import org.apache.spark.scheduler.{ExecutorExited, LiveListenerBus, 
SlaveLost, TaskSchedulerImpl}
    +import 
org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages.{RegisterExecutor,
 RemoveExecutor}
    +import org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend
    +import org.apache.spark.util.ThreadUtils
    +
    +class KubernetesClusterSchedulerBackendSuite
    +    extends SparkFunSuite with BeforeAndAfter {
    +
    +  private val APP_ID = "test-spark-app"
    +  private val DRIVER_POD_NAME = "spark-driver-pod"
    +  private val NAMESPACE = "test-namespace"
    +  private val SPARK_DRIVER_HOST = "localhost"
    +  private val SPARK_DRIVER_PORT = 7077
    +  private val POD_ALLOCATION_INTERVAL = 60L
    +  private val DRIVER_URL = RpcEndpointAddress(
    +      SPARK_DRIVER_HOST, SPARK_DRIVER_PORT, 
CoarseGrainedSchedulerBackend.ENDPOINT_NAME).toString
    +  private val FIRST_EXECUTOR_POD = new PodBuilder()
    +    .withNewMetadata()
    +      .withName("pod1")
    +      .endMetadata()
    +    .withNewSpec()
    +      .withNodeName("node1")
    +      .endSpec()
    +    .withNewStatus()
    +      .withHostIP("192.168.99.100")
    +      .endStatus()
    +    .build()
    +  private val SECOND_EXECUTOR_POD = new PodBuilder()
    +    .withNewMetadata()
    +      .withName("pod2")
    +      .endMetadata()
    +    .withNewSpec()
    +      .withNodeName("node2")
    +      .endSpec()
    +    .withNewStatus()
    +      .withHostIP("192.168.99.101")
    +      .endStatus()
    +    .build()
    +
    +  private type PODS = MixedOperation[Pod, PodList, DoneablePod, 
PodResource[Pod, DoneablePod]]
    +  private type LABELED_PODS = FilterWatchListDeletable[
    +      Pod, PodList, java.lang.Boolean, Watch, Watcher[Pod]]
    +  private type IN_NAMESPACE_PODS = NonNamespaceOperation[
    +      Pod, PodList, DoneablePod, PodResource[Pod, DoneablePod]]
    --- End diff --
    
    Done.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to