Github user andrewor14 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/2350#discussion_r17641136
  
    --- Diff: 
yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala ---
    @@ -598,46 +675,44 @@ object ClientBase extends Logging {
        * @param fileName  Alternate name for the file (optional).
        * @param env       Map holding the environment variables.
        */
    -  private def addFileToClasspath(path: String, fileName: String,
    -      env: HashMap[String, String]) : Unit = {
    +  private def addFileToClasspath(
    +      path: String,
    +      fileName: String,
    +      env: HashMap[String, String]): Unit = {
         if (path != null) {
           scala.util.control.Exception.ignoring(classOf[URISyntaxException]) {
    -        val localPath = getLocalPath(path)
    -        if (localPath != null) {
    -          addClasspathEntry(localPath, env)
    +        val uri = new URI(path)
    +        if (uri.getScheme == LOCAL_SCHEME) {
    +          addClasspathEntry(uri.getPath, env)
               return
             }
           }
         }
         if (fileName != null) {
    -      addClasspathEntry(Environment.PWD.$() + Path.SEPARATOR + fileName, 
env);
    +      addClasspathEntry(Environment.PWD.$() + Path.SEPARATOR + fileName, 
env)
         }
       }
     
       /**
    -   * Returns the local path if the URI is a "local:" URI, or null 
otherwise.
    +   * Add the given path to the classpath entry of the given environment 
map.
    +   * If the classpath is already set, this appends the new path to the 
existing classpath.
        */
    -  private def getLocalPath(resource: String): String = {
    -    val uri = new URI(resource)
    -    if (LOCAL_SCHEME.equals(uri.getScheme())) {
    -      return uri.getPath()
    -    }
    -    null
    -  }
    -
    -  private def addClasspathEntry(path: String, env: HashMap[String, 
String]) =
    -    YarnSparkHadoopUtil.addToEnvironment(env, Environment.CLASSPATH.name, 
path,
    -            File.pathSeparator)
    +  private def addClasspathEntry(path: String, env: HashMap[String, 
String]): Unit =
    +    YarnSparkHadoopUtil.addPathToEnvironment(env, 
Environment.CLASSPATH.name, path)
     
       /**
        * Get the list of namenodes the user may access.
        */
    -  private[yarn] def getNameNodesToAccess(sparkConf: SparkConf): Set[Path] 
= {
    -    sparkConf.get("spark.yarn.access.namenodes", 
"").split(",").map(_.trim()).filter(!_.isEmpty)
    -      .map(new Path(_)).toSet
    +  def getNameNodesToAccess(sparkConf: SparkConf): Set[Path] = {
    --- End diff --
    
    Yeah, this would inherit the privacy of the class. It's simpler if we don't 
define `private[spark]` here again when it doesn't do anything extra.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to