This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 360adc2250b [SPARK-45197][CORE] Make `StandaloneRestServer` add 
`JavaModuleOptions` to drivers
360adc2250b is described below

commit 360adc2250bccdb0fbe559dcd1fc4b6b4c7c1d7a
Author: Dongjoon Hyun <dh...@apple.com>
AuthorDate: Mon Sep 18 04:10:22 2023 -0700

    [SPARK-45197][CORE] Make `StandaloneRestServer` add `JavaModuleOptions` to 
drivers
    
    ### What changes were proposed in this pull request?
    
    This PR aims to make `StandaloneRestServer` add `JavaModuleOptions` to 
drivers by default.
    
    ### Why are the changes needed?
    
    Since Apache Spark 3.3.0 (SPARK-36796, #34153), `SparkContext` adds 
`JavaModuleOptions` by default.
    
    We had better add `JavaModuleOptions` when `StandaloneRestServer` receives 
submissions via REST API, too. Otherwise, it fails like the following if the 
users don't set it manually.
    
    **SUBMISSION**
    ```bash
    $ SPARK_MASTER_OPTS="-Dspark.master.rest.enabled=true" sbin/start-master.sh
    $ curl -s -k -XPOST http://yourserver:6066/v1/submissions/create \
        --header "Content-Type:application/json;charset=UTF-8" \
        --data '{
          "appResource": "",
          "sparkProperties": {
            "spark.master": "local[2]",
            "spark.app.name": "Test 1",
            "spark.submit.deployMode": "cluster",
            "spark.jars": 
"/Users/dongjoon/APACHE/spark-release/spark-3.5.0-bin-hadoop3/examples/jars/spark-examples_2.12-3.5.0.jar"
          },
          "clientSparkVersion": "",
          "mainClass": "org.apache.spark.examples.SparkPi",
          "environmentVariables": {},
          "action": "CreateSubmissionRequest",
          "appArgs": []
        }'
    ```
    
    **DRIVER `stderr` LOG**
    ```
    Exception in thread "main" java.lang.reflect.InvocationTargetException
    ...
    at org.apache.spark.deploy.worker.DriverWrapper.main(DriverWrapper.scala)
    Caused by: java.lang.IllegalAccessError: class 
org.apache.spark.storage.StorageUtils$
    (in unnamed module 0x6d7a93c9) cannot access class sun.nio.ch.DirectBuffer
    (in module java.base) because module java.base does not export sun.nio.ch
    to unnamed module 0x6d7a93c9
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Pass the CIs with the newly added test case.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #42975 from dongjoon-hyun/SPARK-45197.
    
    Authored-by: Dongjoon Hyun <dh...@apple.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../apache/spark/deploy/rest/StandaloneRestServer.scala    | 14 +++++++++-----
 .../spark/deploy/rest/StandaloneRestSubmitSuite.scala      | 12 ++++++++++++
 .../java/org/apache/spark/launcher/JavaModuleOptions.java  |  8 ++++++++
 3 files changed, 29 insertions(+), 5 deletions(-)

diff --git 
a/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala 
b/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala
index c060ef9da8c..a298e4f6dbf 100644
--- 
a/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala
+++ 
b/core/src/main/scala/org/apache/spark/deploy/rest/StandaloneRestServer.scala
@@ -24,7 +24,7 @@ import org.apache.spark.{SPARK_VERSION => sparkVersion, 
SparkConf}
 import org.apache.spark.deploy.{Command, DeployMessages, DriverDescription}
 import org.apache.spark.deploy.ClientArguments._
 import org.apache.spark.internal.config
-import org.apache.spark.launcher.SparkLauncher
+import org.apache.spark.launcher.{JavaModuleOptions, SparkLauncher}
 import org.apache.spark.resource.ResourceUtils
 import org.apache.spark.rpc.RpcEndpointRef
 import org.apache.spark.util.Utils
@@ -124,7 +124,10 @@ private[rest] class StandaloneSubmitRequestServlet(
    * fields used by python applications since python is not supported in 
standalone
    * cluster mode yet.
    */
-  private def buildDriverDescription(request: CreateSubmissionRequest): 
DriverDescription = {
+  private[rest] def buildDriverDescription(
+      request: CreateSubmissionRequest,
+      masterUrl: String,
+      masterRestPort: Int): DriverDescription = {
     // Required fields, including the main class because python is not yet 
supported
     val appResource = Option(request.appResource).getOrElse {
       throw new SubmitRestMissingFieldException("Application jar is missing.")
@@ -149,7 +152,6 @@ private[rest] class StandaloneSubmitRequestServlet(
     // the driver.
     val masters = sparkProperties.get("spark.master")
     val (_, masterPort) = Utils.extractHostPortFromSparkUrl(masterUrl)
-    val masterRestPort = this.conf.get(config.MASTER_REST_SERVER_PORT)
     val updatedMasters = masters.map(
       _.replace(s":$masterRestPort", s":$masterPort")).getOrElse(masterUrl)
     val appArgs = request.appArgs
@@ -167,7 +169,8 @@ private[rest] class StandaloneSubmitRequestServlet(
       .getOrElse(Seq.empty)
     val extraJavaOpts = 
driverExtraJavaOptions.map(Utils.splitCommandString).getOrElse(Seq.empty)
     val sparkJavaOpts = Utils.sparkJavaOpts(conf)
-    val javaOpts = sparkJavaOpts ++ defaultJavaOpts ++ extraJavaOpts
+    val javaModuleOptions = JavaModuleOptions.defaultModuleOptionArray().toSeq
+    val javaOpts = javaModuleOptions ++ sparkJavaOpts ++ defaultJavaOpts ++ 
extraJavaOpts
     val command = new Command(
       "org.apache.spark.deploy.worker.DriverWrapper",
       Seq("{{WORKER_URL}}", "{{USER_JAR}}", mainClass) ++ appArgs, // args to 
the DriverWrapper
@@ -194,7 +197,8 @@ private[rest] class StandaloneSubmitRequestServlet(
       responseServlet: HttpServletResponse): SubmitRestProtocolResponse = {
     requestMessage match {
       case submitRequest: CreateSubmissionRequest =>
-        val driverDescription = buildDriverDescription(submitRequest)
+        val driverDescription = buildDriverDescription(
+          submitRequest, masterUrl, conf.get(config.MASTER_REST_SERVER_PORT))
         val response = 
masterEndpoint.askSync[DeployMessages.SubmitDriverResponse](
           DeployMessages.RequestSubmitDriver(driverDescription))
         val submitResponse = new CreateSubmissionResponse
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
 
b/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
index 696765297e7..0ad14de4ab5 100644
--- 
a/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/deploy/rest/StandaloneRestSubmitSuite.scala
@@ -413,6 +413,18 @@ class StandaloneRestSubmitSuite extends SparkFunSuite {
     assert(filteredVariables == Map("SPARK_VAR" -> "1", "MESOS_VAR" -> "1"))
   }
 
+  test("SPARK-45197: Make StandaloneRestServer add JavaModuleOptions to 
drivers") {
+    val request = new CreateSubmissionRequest
+    request.appResource = ""
+    request.mainClass = ""
+    request.appArgs = Array.empty[String]
+    request.sparkProperties = Map.empty[String, String]
+    request.environmentVariables = Map.empty[String, String]
+    val servlet = new StandaloneSubmitRequestServlet(null, null, null)
+    val desc = servlet.buildDriverDescription(request, "spark://master:7077", 
6066)
+    assert(desc.command.javaOpts.exists(_.startsWith("--add-opens")))
+  }
+
   /* --------------------- *
    |     Helper methods    |
    * --------------------- */
diff --git 
a/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java 
b/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java
index 013dde2766f..77895a09020 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java
@@ -50,4 +50,12 @@ public class JavaModuleOptions {
     public static String defaultModuleOptions() {
       return String.join(" ", DEFAULT_MODULE_OPTIONS);
     }
+
+    /**
+     * Returns the default Java option array related to `--add-opens' and
+     * `-XX:+IgnoreUnrecognizedVMOptions` used by Spark.
+     */
+    public static String[] defaultModuleOptionArray() {
+      return DEFAULT_MODULE_OPTIONS;
+    }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to