Github user ssuchter commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20697#discussion_r191487129
  
    --- Diff: 
resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala
 ---
    @@ -0,0 +1,116 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *    http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.spark.deploy.k8s.integrationtest
    +
    +import java.nio.file.{Path, Paths}
    +import java.util.UUID
    +
    +import scala.collection.JavaConverters._
    +import scala.collection.mutable
    +
    +import io.fabric8.kubernetes.client.DefaultKubernetesClient
    +import org.scalatest.concurrent.Eventually
    +
    +private[spark] class KubernetesTestComponents(defaultClient: 
DefaultKubernetesClient) {
    +
    +  val namespaceOption = 
Option(System.getProperty("spark.kubernetes.test.namespace"))
    +  val hasUserSpecifiedNamespace = namespaceOption.isDefined
    +  val namespace = 
namespaceOption.getOrElse(UUID.randomUUID().toString.replaceAll("-", ""))
    +  private val serviceAccountName =
    +    Option(System.getProperty("spark.kubernetes.test.serviceAccountName"))
    +      .getOrElse("default")
    +  val kubernetesClient = defaultClient.inNamespace(namespace)
    +  val clientConfig = kubernetesClient.getConfiguration
    +
    +  def createNamespace(): Unit = {
    +    defaultClient.namespaces.createNew()
    +      .withNewMetadata()
    +      .withName(namespace)
    +      .endMetadata()
    +      .done()
    +  }
    +
    +  def deleteNamespace(): Unit = {
    +    defaultClient.namespaces.withName(namespace).delete()
    +    Eventually.eventually(KubernetesSuite.TIMEOUT, 
KubernetesSuite.INTERVAL) {
    +      val namespaceList = defaultClient
    +        .namespaces()
    +        .list()
    +        .getItems
    +        .asScala
    +      require(!namespaceList.exists(_.getMetadata.getName == namespace))
    +    }
    +  }
    +
    +  def newSparkAppConf(): SparkAppConf = {
    +    new SparkAppConf()
    +      .set("spark.master", s"k8s://${kubernetesClient.getMasterUrl}")
    +      .set("spark.kubernetes.namespace", namespace)
    +      .set("spark.executor.memory", "500m")
    +      .set("spark.executor.cores", "1")
    +      .set("spark.executors.instances", "1")
    +      .set("spark.app.name", "spark-test-app")
    +      .set("spark.ui.enabled", "true")
    +      .set("spark.testing", "false")
    +      .set("spark.kubernetes.submission.waitAppCompletion", "false")
    +      .set("spark.kubernetes.authenticate.driver.serviceAccountName", 
serviceAccountName)
    +  }
    +}
    +
    +private[spark] class SparkAppConf {
    +
    +  private val map = mutable.Map[String, String]()
    +
    +  def set(key: String, value: String): SparkAppConf = {
    +    map.put(key, value)
    +    this
    +  }
    +
    +  def get(key: String): String = map.getOrElse(key, "")
    +
    +  def setJars(jars: Seq[String]): Unit = set("spark.jars", 
jars.mkString(","))
    +
    +  override def toString: String = map.toString
    +
    +  def toStringArray: Iterable[String] = map.toList.flatMap(t => 
List("--conf", s"${t._1}=${t._2}"))
    +}
    +
    +private[spark] case class SparkAppArguments(
    +    mainAppResource: String,
    +    mainClass: String,
    +    appArgs: Array[String])
    +
    +private[spark] object SparkAppLauncher extends Logging {
    --- End diff --
    
    This one is so much smaller (< 10 lines of executable code) than 
SparkLauncher, I think we should not try to switch in this CL.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to