[
https://issues.apache.org/jira/browse/SPARK-25220?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel=16727009#comment-16727009
]
ASF GitHub Bot commented on SPARK-25220:
vanzin closed pull request #22212: [SPARK-25220] Seperate kubernetes node
selector config between driver and executors.
URL: https://github.com/apache/spark/pull/22212
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git a/docs/running-on-kubernetes.md b/docs/running-on-kubernetes.md
index 8f84ca044e163..fd9606ebbd1cf 100644
--- a/docs/running-on-kubernetes.md
+++ b/docs/running-on-kubernetes.md
@@ -663,11 +663,21 @@ specific to Spark on Kubernetes.
- spark.kubernetes.node.selector.[labelKey]
+ spark.kubernetes.driver.selector.[labelKey]
(none)
-Adds to the node selector of the driver pod and executor pods, with key
labelKey and the value as the
-configuration's value. For example, setting
spark.kubernetes.node.selector.identifier to
myIdentifier
+Adds to the node selector of the driver pod, with key
labelKey and the value as the
+configuration's value. For example, setting
spark.kubernetes.driver.selector.identifier to
myIdentifier
+will result in the driver pod and executors having a node selector with
key identifier and value
+ myIdentifier. Multiple node selector keys can be added by
setting multiple configurations with this prefix.
+
+
+
+ spark.kubernetes.executor.selector.[labelKey]
+ (none)
+
+Adds to the node selector of the executor pods, with key
labelKey and the value as the
+configuration's value. For example, setting
spark.kubernetes.executor.selector.identifier to
myIdentifier
will result in the driver pod and executors having a node selector with
key identifier and value
myIdentifier. Multiple node selector keys can be added by
setting multiple configurations with this prefix.
diff --git
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
index 1b582fe53624a..5677cd6392c1f 100644
---
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
+++
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
@@ -228,14 +228,14 @@ private[spark] object Config extends Logging {
val KUBERNETES_AUTH_SUBMISSION_CONF_PREFIX =
"spark.kubernetes.authenticate.submission"
- val KUBERNETES_NODE_SELECTOR_PREFIX = "spark.kubernetes.node.selector."
-
+ val KUBERNETES_DRIVER_NODE_SELECTOR_PREFIX =
"spark.kubernetes.driver.selector."
val KUBERNETES_DRIVER_LABEL_PREFIX = "spark.kubernetes.driver.label."
val KUBERNETES_DRIVER_ANNOTATION_PREFIX =
"spark.kubernetes.driver.annotation."
val KUBERNETES_DRIVER_SECRETS_PREFIX = "spark.kubernetes.driver.secrets."
val KUBERNETES_DRIVER_SECRET_KEY_REF_PREFIX =
"spark.kubernetes.driver.secretKeyRef."
val KUBERNETES_DRIVER_VOLUMES_PREFIX = "spark.kubernetes.driver.volumes."
+ val KUBERNETES_EXECUTOR_NODE_SELECTOR_PREFIX =
"spark.kubernetes.executor.selector."
val KUBERNETES_EXECUTOR_LABEL_PREFIX = "spark.kubernetes.executor.label."
val KUBERNETES_EXECUTOR_ANNOTATION_PREFIX =
"spark.kubernetes.executor.annotation."
val KUBERNETES_EXECUTOR_SECRETS_PREFIX = "spark.kubernetes.executor.secrets."
diff --git
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesConf.scala
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesConf.scala
index 3aa35d419073f..3fe21498406a7 100644
---
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesConf.scala
+++
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesConf.scala
@@ -94,8 +94,11 @@ private[spark] case class KubernetesConf[T <:
KubernetesRoleSpecificConf](
}
}
- def nodeSelector(): Map[String, String] =
-KubernetesUtils.parsePrefixedKeyValuePairs(sparkConf,
KUBERNETES_NODE_SELECTOR_PREFIX)
+ def driverSelector(): Map[String, String] =
+KubernetesUtils.parsePrefixedKeyValuePairs(sparkConf,
KUBERNETES_DRIVER_NODE_SELECTOR_PREFIX)
+
+ def executorSelector(): Map[String, String] =
+KubernetesUtils.parsePrefixedKeyValuePairs(sparkConf,
KUBERNETES_EXECUTOR_NODE_SELECTOR_PREFIX)
def get[T](config: ConfigEntry[T]): T = sparkConf.get(config)
diff --git
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/BasicDriverFeatureStep.scala