This is an automated email from the ASF dual-hosted git repository. dongjoon pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 068d53b [SPARK-37735][K8S] Add appId interface to KubernetesConf 068d53b is described below commit 068d53bd5d89c96bf0cdb05d3ec7f2f023cf3875 Author: Yikun Jiang <yikunk...@gmail.com> AuthorDate: Thu Dec 30 13:48:58 2021 -0800 [SPARK-37735][K8S] Add appId interface to KubernetesConf ### What changes were proposed in this pull request? Add `appId` interface to KubernetesConf ### Why are the changes needed? The `appId` now can be only accessed in `KuberntesDriverConf` and `KubernetesExecutorConf`, but can't be accesssed in `KubernetesConf`. Some user featurestep are using `KubernetesConf` as init constructor parameter in order to share the featurestep between driver and executor. One of cases is customized feature step (such as volcano, yunikorn) is using appId as job identification. So, we'd better add appId to KubernetesConf to help such featurestep access appId. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? UT Closes #35015 from Yikun/SPARK-36057. Authored-by: Yikun Jiang <yikunk...@gmail.com> Signed-off-by: Dongjoon Hyun <dongj...@apache.org> --- .../main/scala/org/apache/spark/deploy/k8s/KubernetesConf.scala | 1 + .../scala/org/apache/spark/deploy/k8s/KubernetesConfSuite.scala | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesConf.scala b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesConf.scala index 8130402..46086fa 100644 --- a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesConf.scala +++ b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesConf.scala @@ -43,6 +43,7 @@ private[spark] abstract class KubernetesConf(val sparkConf: SparkConf) { def secretNamesToMountPaths: Map[String, String] def volumes: Seq[KubernetesVolumeSpec] def schedulerName: String + def appId: String def appName: String = get("spark.app.name", "spark") diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/KubernetesConfSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/KubernetesConfSuite.scala index 119bcb0..1b3aaa5 100644 --- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/KubernetesConfSuite.scala +++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/KubernetesConfSuite.scala @@ -218,6 +218,14 @@ class KubernetesConfSuite extends SparkFunSuite { assert(driverConf.schedulerName === "driverScheduler") } + test("SPARK-37735: access appId in KubernetesConf") { + val sparkConf = new SparkConf(false) + val driverConf = KubernetesTestConf.createDriverConf(sparkConf) + val execConf = KubernetesTestConf.createExecutorConf(sparkConf) + assert(driverConf.asInstanceOf[KubernetesConf].appId === KubernetesTestConf.APP_ID) + assert(execConf.asInstanceOf[KubernetesConf].appId === KubernetesTestConf.APP_ID) + } + test("SPARK-36566: get app name label") { assert(KubernetesConf.getAppNameLabel(" Job+Spark-Pi 2021") === "job-spark-pi-2021") assert(KubernetesConf.getAppNameLabel("a" * 63) === "a" * 63) --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org