Github user felixcheung commented on a diff in the pull request:

    https://github.com/apache/spark/pull/9390#discussion_r43847118
  
    --- Diff: core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala ---
    @@ -362,22 +363,41 @@ object SparkSubmit {
           }
         }
     
    -    // In YARN mode for an R app, add the SparkR package archive to 
archives
    -    // that can be distributed with the job
    +    // In YARN mode for an R app, add the SparkR package archive and the R 
package
    +    // archive containing all of the built R libraries to archives so that 
they can
    +    // be distributed with the job
         if (args.isR && clusterManager == YARN) {
    -      val rPackagePath = RUtils.localSparkRPackagePath
    -      if (rPackagePath.isEmpty) {
    +      val sparkRPackagePath = RUtils.localSparkRPackagePath
    +      if (sparkRPackagePath.isEmpty) {
             printErrorAndExit("SPARK_HOME does not exist for R application in 
YARN mode.")
           }
    -      val rPackageFile =
    -        RPackageUtils.zipRLibraries(new File(rPackagePath.get), 
SPARKR_PACKAGE_ARCHIVE)
    -      if (!rPackageFile.exists()) {
    +      val sparkRPackageFile = new File(sparkRPackagePath.get, 
SPARKR_PACKAGE_ARCHIVE)
    +      if (!sparkRPackageFile.exists()) {
             printErrorAndExit(s"$SPARKR_PACKAGE_ARCHIVE does not exist for R 
application in YARN mode.")
           }
    -      val localURI = Utils.resolveURI(rPackageFile.getAbsolutePath)
    +      val sparkRPackageURI = 
Utils.resolveURI(sparkRPackageFile.getAbsolutePath).toString
     
    +      // Distribute the SparkR package.
           // Assigns a symbol link name "sparkr" to the shipped package.
    -      args.archives = mergeFileLists(args.archives, localURI.toString + 
"#sparkr")
    +      args.archives = mergeFileLists(args.archives, sparkRPackageURI + 
"#sparkr")
    +
    +      // Distribute the R package archive containing all the built R 
packages.
    +      if (!RUtils.rPackages.isEmpty) {
    +        val rPackageFile =
    +          RPackageUtils.zipRLibraries(new File(RUtils.rPackages.get), 
R_PACKAGE_ARCHIVE)
    +        if (!rPackageFile.exists()) {
    +          printErrorAndExit("Failed to zip all the built R packages.")
    +        }
    +
    +        val rPackageURI = 
Utils.resolveURI(rPackageFile.getAbsolutePath).toString
    +        // Assigns a symbol link name "rpkg" to the shipped package.
    +        args.archives = mergeFileLists(args.archives, rPackageURI + 
"#rpkg")
    +      }
    +    }
    +
    +    // TODO: Support distributing R packages with standalone cluster
    +    if (args.isR && clusterManager == STANDALONE && 
!RUtils.rPackages.isEmpty) {
    --- End diff --
    
    what about MESOS?


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to