Github user tnachen commented on a diff in the pull request: https://github.com/apache/spark/pull/20451#discussion_r200020286 --- Diff: resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/KubernetesClusterManager.scala --- @@ -18,43 +18,68 @@ package org.apache.spark.scheduler.cluster.k8s import java.io.File -import io.fabric8.kubernetes.client.Config +import io.fabric8.kubernetes.client.{Config, KubernetesClient} -import org.apache.spark.{SparkContext, SparkException} +import org.apache.spark.{SparkContext, SparkConf} import org.apache.spark.deploy.k8s.{KubernetesUtils, SparkKubernetesClientFactory} import org.apache.spark.deploy.k8s.Config._ import org.apache.spark.deploy.k8s.Constants._ import org.apache.spark.internal.Logging import org.apache.spark.scheduler.{ExternalClusterManager, SchedulerBackend, TaskScheduler, TaskSchedulerImpl} import org.apache.spark.util.ThreadUtils -private[spark] class KubernetesClusterManager extends ExternalClusterManager with Logging { +trait ManagerSpecificHandlers { + def createKubernetesClient(sparkConf: SparkConf): KubernetesClient + } - override def canCreate(masterURL: String): Boolean = masterURL.startsWith("k8s") +private[spark] class KubernetesClusterManager extends ExternalClusterManager + with ManagerSpecificHandlers with Logging { - override def createTaskScheduler(sc: SparkContext, masterURL: String): TaskScheduler = { - if (masterURL.startsWith("k8s") && - sc.deployMode == "client" && - !sc.conf.get(KUBERNETES_DRIVER_SUBMIT_CHECK).getOrElse(false)) { - throw new SparkException("Client mode is currently not supported for Kubernetes.") + class InClusterHandlers extends ManagerSpecificHandlers { + override def createKubernetesClient(sparkConf: SparkConf): KubernetesClient = + SparkKubernetesClientFactory.createKubernetesClient( + KUBERNETES_MASTER_INTERNAL_URL, + Some(sparkConf.get(KUBERNETES_NAMESPACE)), + APISERVER_AUTH_DRIVER_MOUNTED_CONF_PREFIX, --- End diff -- Why do we need a separate conf prefix as well?
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org