This is an automated email from the ASF dual-hosted git repository. srowen pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 41a2a7daeee [SPARK-44650][CORE] `spark.executor.defaultJavaOptions` Check illegal java options 41a2a7daeee is described below commit 41a2a7daeee0a25d39f30364a694becf54ab37e7 Author: sychen <syc...@ctrip.com> AuthorDate: Sun Aug 6 08:24:40 2023 -0500 [SPARK-44650][CORE] `spark.executor.defaultJavaOptions` Check illegal java options ### What changes were proposed in this pull request? ### Why are the changes needed? Command ```bash ./bin/spark-shell --conf spark.executor.extraJavaOptions='-Dspark.foo=bar' ``` Error ``` spark.executor.extraJavaOptions is not allowed to set Spark options (was '-Dspark.foo=bar'). Set them directly on a SparkConf or in a properties file when using ./bin/spark-submit. ``` Command ```bash ./bin/spark-shell --conf spark.executor.defaultJavaOptions='-Dspark.foo=bar' ``` Start up normally. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? local test & add UT ``` ./bin/spark-shell --conf spark.executor.defaultJavaOptions='-Dspark.foo=bar' ``` ``` spark.executor.defaultJavaOptions is not allowed to set Spark options (was '-Dspark.foo=bar'). Set them directly on a SparkConf or in a properties file when using ./bin/spark-submit. ``` Closes #42313 from cxzl25/SPARK-44650. Authored-by: sychen <syc...@ctrip.com> Signed-off-by: Sean Owen <sro...@gmail.com> --- .../main/scala/org/apache/spark/SparkConf.scala | 25 +++++++++++----------- .../scala/org/apache/spark/SparkConfSuite.scala | 14 ++++++++++++ 2 files changed, 27 insertions(+), 12 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala index 813a14acd19..8c054d24b10 100644 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala @@ -503,8 +503,6 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria logWarning(msg) } - val executorOptsKey = EXECUTOR_JAVA_OPTIONS.key - // Used by Yarn in 1.1 and before sys.props.get("spark.driver.libraryPath").foreach { value => val warning = @@ -518,16 +516,19 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging with Seria } // Validate spark.executor.extraJavaOptions - getOption(executorOptsKey).foreach { javaOpts => - if (javaOpts.contains("-Dspark")) { - val msg = s"$executorOptsKey is not allowed to set Spark options (was '$javaOpts'). " + - "Set them directly on a SparkConf or in a properties file when using ./bin/spark-submit." - throw new Exception(msg) - } - if (javaOpts.contains("-Xmx")) { - val msg = s"$executorOptsKey is not allowed to specify max heap memory settings " + - s"(was '$javaOpts'). Use spark.executor.memory instead." - throw new Exception(msg) + Seq(EXECUTOR_JAVA_OPTIONS.key, "spark.executor.defaultJavaOptions").foreach { executorOptsKey => + getOption(executorOptsKey).foreach { javaOpts => + if (javaOpts.contains("-Dspark")) { + val msg = s"$executorOptsKey is not allowed to set Spark options (was '$javaOpts'). " + + "Set them directly on a SparkConf or in a properties file " + + "when using ./bin/spark-submit." + throw new Exception(msg) + } + if (javaOpts.contains("-Xmx")) { + val msg = s"$executorOptsKey is not allowed to specify max heap memory settings " + + s"(was '$javaOpts'). Use spark.executor.memory instead." + throw new Exception(msg) + } } } diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala index 74fd7816221..75e22e1418b 100644 --- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala @@ -498,6 +498,20 @@ class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSyst } } } + + test("SPARK-44650: spark.executor.defaultJavaOptions Check illegal java options") { + val conf = new SparkConf() + conf.validateSettings() + conf.set(EXECUTOR_JAVA_OPTIONS.key, "-Dspark.foo=bar") + intercept[Exception] { + conf.validateSettings() + } + conf.remove(EXECUTOR_JAVA_OPTIONS.key) + conf.set("spark.executor.defaultJavaOptions", "-Dspark.foo=bar") + intercept[Exception] { + conf.validateSettings() + } + } } class Class1 {} --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org