Github user squito commented on a diff in the pull request:

    https://github.com/apache/spark/pull/4216#discussion_r23710115
  
    --- Diff: core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala ---
    @@ -71,21 +81,64 @@ object SparkSubmit {
         if (appArgs.verbose) {
           printStream.println(appArgs)
         }
    -    val (childArgs, classpath, sysProps, mainClass) = 
createLaunchEnv(appArgs)
    -    launch(childArgs, classpath, sysProps, mainClass, appArgs.verbose)
    +    appArgs.action match {
    +      case SparkSubmitAction.SUBMIT => submit(appArgs)
    +      case SparkSubmitAction.KILL => kill(appArgs)
    +      case SparkSubmitAction.REQUEST_STATUS => requestStatus(appArgs)
    +    }
    +  }
    +
    +  /**
    +   * Kill an existing driver using the stable REST protocol. Standalone 
cluster mode only.
    +   */
    +  private def kill(args: SparkSubmitArguments): Unit = {
    +    new StandaloneRestClient().killDriver(args.master, args.driverToKill)
    +  }
    +
    +  /**
    +   * Request the status of an existing driver using the stable REST 
protocol.
    +   * Standalone cluster mode only.
    +   */
    +  private def requestStatus(args: SparkSubmitArguments): Unit = {
    +    new StandaloneRestClient().requestDriverStatus(args.master, 
args.driverToRequestStatusFor)
       }
     
       /**
    -   * @return a tuple containing
    -   *           (1) the arguments for the child process,
    -   *           (2) a list of classpath entries for the child,
    -   *           (3) a list of system properties and env vars, and
    -   *           (4) the main class for the child
    +   * Submit the application using the provided parameters.
    +   *
    +   * This runs in two steps. First, we prepare the launch environment by 
setting up
    +   * the appropriate classpath, system properties, and application 
arguments for
    +   * running the child main class based on the cluster manager and the 
deploy mode.
    +   * Second, we use this launch environment to invoke the main method of 
the child
    +   * main class.
    +   *
    +   * As of Spark 1.3, a stable REST-based application submission gateway 
is introduced.
    +   * If this is enabled, then we will run standalone cluster mode by 
passing the submit
    +   * parameters directly to a REST client, which will submit the 
application using the
    +   * REST protocol instead.
        */
    -  private[spark] def createLaunchEnv(args: SparkSubmitArguments)
    -      : (ArrayBuffer[String], ArrayBuffer[String], Map[String, String], 
String) = {
    +  private[spark] def submit(args: SparkSubmitArguments): Unit = {
    +    val (childArgs, childClasspath, sysProps, childMainClass) = 
prepareSubmitEnvironment(args)
    +    if (args.isStandaloneCluster && args.isRestEnabled) {
    +      printStream.println("Running standalone cluster mode using the 
stable REST protocol.")
    +      new StandaloneRestClient().submitDriver(args)
    +    } else {
    +      runMain(childArgs, childClasspath, sysProps, childMainClass)
    +    }
    +  }
     
    -    // Values to return
    +  /**
    +   * Prepare the environment for submitting an application.
    +   * This returns a 4-tuple:
    +   *   (1) the arguments for the child process,
    +   *   (2) a list of classpath entries for the child,
    +   *   (3) a list of system properties and env vars, and
    +   *   (4) the main class for the child
    +   * Exposed for testing.
    --- End diff --
    
    and it also modified the `args` that are passed in, right?
    which is a little confusing as a side effect, why not just return a new, 
modified args?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to