Repository: spark
Updated Branches:
  refs/heads/master 7f6d10a73 -> fb3636b48


[SPARK-22807][SCHEDULER] Remove config that says docker and replace with 
container

## What changes were proposed in this pull request?
Changes discussed in 
https://github.com/apache/spark/pull/19946#discussion_r157063535
docker -> container, since with CRI, we are not limited to running only docker 
images.

## How was this patch tested?
Manual testing

Author: foxish <ramanath...@google.com>

Closes #19995 from foxish/make-docker-container.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/fb3636b4
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/fb3636b4
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/fb3636b4

Branch: refs/heads/master
Commit: fb3636b482be3d0940345b1528c1d5090bbc25e6
Parents: 7f6d10a
Author: foxish <ramanath...@google.com>
Authored: Mon Dec 18 11:29:32 2017 -0800
Committer: Marcelo Vanzin <van...@cloudera.com>
Committed: Mon Dec 18 11:29:32 2017 -0800

----------------------------------------------------------------------
 .../org/apache/spark/deploy/SparkSubmitSuite.scala |  4 ++--
 .../scala/org/apache/spark/deploy/k8s/Config.scala | 17 ++++++++---------
 .../DriverConfigurationStepsOrchestrator.scala     |  4 ++--
 .../submit/steps/BaseDriverConfigurationStep.scala | 12 ++++++------
 .../scheduler/cluster/k8s/ExecutorPodFactory.scala | 12 ++++++------
 ...DriverConfigurationStepsOrchestratorSuite.scala |  6 +++---
 .../steps/BaseDriverConfigurationStepSuite.scala   |  8 ++++----
 .../cluster/k8s/ExecutorPodFactorySuite.scala      |  2 +-
 8 files changed, 32 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/fb3636b4/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index 2eb8a1f..27dd435 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -396,7 +396,7 @@ class SparkSubmitSuite
       "--class", "org.SomeClass",
       "--driver-memory", "4g",
       "--conf", "spark.kubernetes.namespace=spark",
-      "--conf", "spark.kubernetes.driver.docker.image=bar",
+      "--conf", "spark.kubernetes.driver.container.image=bar",
       "/home/thejar.jar",
       "arg1")
     val appArgs = new SparkSubmitArguments(clArgs)
@@ -412,7 +412,7 @@ class SparkSubmitSuite
     conf.get("spark.executor.memory") should be ("5g")
     conf.get("spark.driver.memory") should be ("4g")
     conf.get("spark.kubernetes.namespace") should be ("spark")
-    conf.get("spark.kubernetes.driver.docker.image") should be ("bar")
+    conf.get("spark.kubernetes.driver.container.image") should be ("bar")
   }
 
   test("handles confs with flag equivalents") {

http://git-wip-us.apache.org/repos/asf/spark/blob/fb3636b4/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
----------------------------------------------------------------------
diff --git 
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
 
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
index f35fb38..04aadb4 100644
--- 
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
+++ 
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
@@ -30,21 +30,20 @@ private[spark] object Config extends Logging {
       .stringConf
       .createWithDefault("default")
 
-  val DRIVER_DOCKER_IMAGE =
-    ConfigBuilder("spark.kubernetes.driver.docker.image")
-      .doc("Docker image to use for the driver. Specify this using the 
standard Docker tag format.")
+  val DRIVER_CONTAINER_IMAGE =
+    ConfigBuilder("spark.kubernetes.driver.container.image")
+      .doc("Container image to use for the driver.")
       .stringConf
       .createOptional
 
-  val EXECUTOR_DOCKER_IMAGE =
-    ConfigBuilder("spark.kubernetes.executor.docker.image")
-      .doc("Docker image to use for the executors. Specify this using the 
standard Docker tag " +
-        "format.")
+  val EXECUTOR_CONTAINER_IMAGE =
+    ConfigBuilder("spark.kubernetes.executor.container.image")
+      .doc("Container image to use for the executors.")
       .stringConf
       .createOptional
 
-  val DOCKER_IMAGE_PULL_POLICY =
-    ConfigBuilder("spark.kubernetes.docker.image.pullPolicy")
+  val CONTAINER_IMAGE_PULL_POLICY =
+    ConfigBuilder("spark.kubernetes.container.image.pullPolicy")
       .doc("Kubernetes image pull policy. Valid values are Always, Never, and 
IfNotPresent.")
       .stringConf
       .checkValues(Set("Always", "Never", "IfNotPresent"))

http://git-wip-us.apache.org/repos/asf/spark/blob/fb3636b4/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestrator.scala
----------------------------------------------------------------------
diff --git 
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestrator.scala
 
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestrator.scala
index c563fc5..1411e6f 100644
--- 
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestrator.scala
+++ 
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestrator.scala
@@ -49,7 +49,7 @@ private[spark] class DriverConfigurationStepsOrchestrator(
     s"$appName-$uuid".toLowerCase.replaceAll("\\.", "-")
   }
 
-  private val dockerImagePullPolicy = 
submissionSparkConf.get(DOCKER_IMAGE_PULL_POLICY)
+  private val imagePullPolicy = 
submissionSparkConf.get(CONTAINER_IMAGE_PULL_POLICY)
   private val jarsDownloadPath = 
submissionSparkConf.get(JARS_DOWNLOAD_LOCATION)
   private val filesDownloadPath = 
submissionSparkConf.get(FILES_DOWNLOAD_LOCATION)
 
@@ -72,7 +72,7 @@ private[spark] class DriverConfigurationStepsOrchestrator(
       kubernetesAppId,
       kubernetesResourceNamePrefix,
       allDriverLabels,
-      dockerImagePullPolicy,
+      imagePullPolicy,
       appName,
       mainClass,
       appArgs,

http://git-wip-us.apache.org/repos/asf/spark/blob/fb3636b4/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/steps/BaseDriverConfigurationStep.scala
----------------------------------------------------------------------
diff --git 
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/steps/BaseDriverConfigurationStep.scala
 
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/steps/BaseDriverConfigurationStep.scala
index ba2a11b..c335fcc 100644
--- 
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/steps/BaseDriverConfigurationStep.scala
+++ 
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/steps/BaseDriverConfigurationStep.scala
@@ -34,7 +34,7 @@ private[spark] class BaseDriverConfigurationStep(
     kubernetesAppId: String,
     kubernetesResourceNamePrefix: String,
     driverLabels: Map[String, String],
-    dockerImagePullPolicy: String,
+    imagePullPolicy: String,
     appName: String,
     mainClass: String,
     appArgs: Array[String],
@@ -46,9 +46,9 @@ private[spark] class BaseDriverConfigurationStep(
   private val driverExtraClasspath = submissionSparkConf.get(
     DRIVER_CLASS_PATH)
 
-  private val driverDockerImage = submissionSparkConf
-    .get(DRIVER_DOCKER_IMAGE)
-    .getOrElse(throw new SparkException("Must specify the driver Docker 
image"))
+  private val driverContainerImage = submissionSparkConf
+    .get(DRIVER_CONTAINER_IMAGE)
+    .getOrElse(throw new SparkException("Must specify the driver container 
image"))
 
   // CPU settings
   private val driverCpuCores = 
submissionSparkConf.getOption("spark.driver.cores").getOrElse("1")
@@ -110,8 +110,8 @@ private[spark] class BaseDriverConfigurationStep(
 
     val driverContainer = new ContainerBuilder(driverSpec.driverContainer)
       .withName(DRIVER_CONTAINER_NAME)
-      .withImage(driverDockerImage)
-      .withImagePullPolicy(dockerImagePullPolicy)
+      .withImage(driverContainerImage)
+      .withImagePullPolicy(imagePullPolicy)
       .addAllToEnv(driverCustomEnvs.asJava)
       .addToEnv(driverExtraClasspathEnv.toSeq: _*)
       .addNewEnv()

http://git-wip-us.apache.org/repos/asf/spark/blob/fb3636b4/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactory.scala
----------------------------------------------------------------------
diff --git 
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactory.scala
 
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactory.scala
index 9d8f3b9..7022615 100644
--- 
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactory.scala
+++ 
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactory.scala
@@ -72,10 +72,10 @@ private[spark] class ExecutorPodFactoryImpl(sparkConf: 
SparkConf)
       sparkConf,
       KUBERNETES_NODE_SELECTOR_PREFIX)
 
-  private val executorDockerImage = sparkConf
-    .get(EXECUTOR_DOCKER_IMAGE)
-    .getOrElse(throw new SparkException("Must specify the executor Docker 
image"))
-  private val dockerImagePullPolicy = sparkConf.get(DOCKER_IMAGE_PULL_POLICY)
+  private val executorContainerImage = sparkConf
+    .get(EXECUTOR_CONTAINER_IMAGE)
+    .getOrElse(throw new SparkException("Must specify the executor container 
image"))
+  private val imagePullPolicy = sparkConf.get(CONTAINER_IMAGE_PULL_POLICY)
   private val blockManagerPort = sparkConf
     .getInt("spark.blockmanager.port", DEFAULT_BLOCKMANAGER_PORT)
 
@@ -166,8 +166,8 @@ private[spark] class ExecutorPodFactoryImpl(sparkConf: 
SparkConf)
 
     val executorContainer = new ContainerBuilder()
       .withName("executor")
-      .withImage(executorDockerImage)
-      .withImagePullPolicy(dockerImagePullPolicy)
+      .withImage(executorContainerImage)
+      .withImagePullPolicy(imagePullPolicy)
       .withNewResources()
         .addToRequests("memory", executorMemoryQuantity)
         .addToLimits("memory", executorMemoryLimitQuantity)

http://git-wip-us.apache.org/repos/asf/spark/blob/fb3636b4/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestratorSuite.scala
----------------------------------------------------------------------
diff --git 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestratorSuite.scala
 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestratorSuite.scala
index c7291d4..98f9f27 100644
--- 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestratorSuite.scala
+++ 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestratorSuite.scala
@@ -17,7 +17,7 @@
 package org.apache.spark.deploy.k8s.submit
 
 import org.apache.spark.{SparkConf, SparkFunSuite}
-import org.apache.spark.deploy.k8s.Config.DRIVER_DOCKER_IMAGE
+import org.apache.spark.deploy.k8s.Config.DRIVER_CONTAINER_IMAGE
 import org.apache.spark.deploy.k8s.submit.steps._
 
 class DriverConfigurationStepsOrchestratorSuite extends SparkFunSuite {
@@ -32,7 +32,7 @@ class DriverConfigurationStepsOrchestratorSuite extends 
SparkFunSuite {
 
   test("Base submission steps with a main app resource.") {
     val sparkConf = new SparkConf(false)
-      .set(DRIVER_DOCKER_IMAGE, DRIVER_IMAGE)
+      .set(DRIVER_CONTAINER_IMAGE, DRIVER_IMAGE)
     val mainAppResource = 
JavaMainAppResource("local:///var/apps/jars/main.jar")
     val orchestrator = new DriverConfigurationStepsOrchestrator(
       NAMESPACE,
@@ -54,7 +54,7 @@ class DriverConfigurationStepsOrchestratorSuite extends 
SparkFunSuite {
 
   test("Base submission steps without a main app resource.") {
     val sparkConf = new SparkConf(false)
-      .set(DRIVER_DOCKER_IMAGE, DRIVER_IMAGE)
+      .set(DRIVER_CONTAINER_IMAGE, DRIVER_IMAGE)
     val orchestrator = new DriverConfigurationStepsOrchestrator(
       NAMESPACE,
       APP_ID,

http://git-wip-us.apache.org/repos/asf/spark/blob/fb3636b4/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/steps/BaseDriverConfigurationStepSuite.scala
----------------------------------------------------------------------
diff --git 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/steps/BaseDriverConfigurationStepSuite.scala
 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/steps/BaseDriverConfigurationStepSuite.scala
index 83c5f98..f7c1b31 100644
--- 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/steps/BaseDriverConfigurationStepSuite.scala
+++ 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/steps/BaseDriverConfigurationStepSuite.scala
@@ -30,7 +30,7 @@ class BaseDriverConfigurationStepSuite extends SparkFunSuite {
   private val APP_ID = "spark-app-id"
   private val RESOURCE_NAME_PREFIX = "spark"
   private val DRIVER_LABELS = Map("labelkey" -> "labelvalue")
-  private val DOCKER_IMAGE_PULL_POLICY = "IfNotPresent"
+  private val CONTAINER_IMAGE_PULL_POLICY = "IfNotPresent"
   private val APP_NAME = "spark-test"
   private val MAIN_CLASS = "org.apache.spark.examples.SparkPi"
   private val APP_ARGS = Array("arg1", "arg2", "arg 3")
@@ -47,7 +47,7 @@ class BaseDriverConfigurationStepSuite extends SparkFunSuite {
       .set(KUBERNETES_DRIVER_LIMIT_CORES, "4")
       .set(org.apache.spark.internal.config.DRIVER_MEMORY.key, "256M")
       .set(org.apache.spark.internal.config.DRIVER_MEMORY_OVERHEAD, 200L)
-      .set(DRIVER_DOCKER_IMAGE, "spark-driver:latest")
+      .set(DRIVER_CONTAINER_IMAGE, "spark-driver:latest")
       .set(s"$KUBERNETES_DRIVER_ANNOTATION_PREFIX$CUSTOM_ANNOTATION_KEY", 
CUSTOM_ANNOTATION_VALUE)
       .set(s"$KUBERNETES_DRIVER_ENV_KEY$DRIVER_CUSTOM_ENV_KEY1", 
"customDriverEnv1")
       .set(s"$KUBERNETES_DRIVER_ENV_KEY$DRIVER_CUSTOM_ENV_KEY2", 
"customDriverEnv2")
@@ -56,7 +56,7 @@ class BaseDriverConfigurationStepSuite extends SparkFunSuite {
       APP_ID,
       RESOURCE_NAME_PREFIX,
       DRIVER_LABELS,
-      DOCKER_IMAGE_PULL_POLICY,
+      CONTAINER_IMAGE_PULL_POLICY,
       APP_NAME,
       MAIN_CLASS,
       APP_ARGS,
@@ -71,7 +71,7 @@ class BaseDriverConfigurationStepSuite extends SparkFunSuite {
 
     assert(preparedDriverSpec.driverContainer.getName === 
DRIVER_CONTAINER_NAME)
     assert(preparedDriverSpec.driverContainer.getImage === 
"spark-driver:latest")
-    assert(preparedDriverSpec.driverContainer.getImagePullPolicy === 
DOCKER_IMAGE_PULL_POLICY)
+    assert(preparedDriverSpec.driverContainer.getImagePullPolicy === 
CONTAINER_IMAGE_PULL_POLICY)
 
     assert(preparedDriverSpec.driverContainer.getEnv.size === 7)
     val envs = preparedDriverSpec.driverContainer

http://git-wip-us.apache.org/repos/asf/spark/blob/fb3636b4/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactorySuite.scala
----------------------------------------------------------------------
diff --git 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactorySuite.scala
 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactorySuite.scala
index 1c7717c..3a55d7c 100644
--- 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactorySuite.scala
+++ 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactorySuite.scala
@@ -50,7 +50,7 @@ class ExecutorPodFactorySuite extends SparkFunSuite with 
BeforeAndAfter with Bef
     baseConf = new SparkConf()
       .set(KUBERNETES_DRIVER_POD_NAME, driverPodName)
       .set(KUBERNETES_EXECUTOR_POD_NAME_PREFIX, executorPrefix)
-      .set(EXECUTOR_DOCKER_IMAGE, executorImage)
+      .set(EXECUTOR_CONTAINER_IMAGE, executorImage)
   }
 
   test("basic executor pod has reasonable defaults") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to