Repository: spark
Updated Branches:
  refs/heads/master 93338807a -> bcb0258ae


[SPARK-16080][YARN] Set correct link name for conf archive in executors.

This makes sure the files are in the executor's classpath as they're
expected to be. Also update the unit test to make sure the files are
there as expected.

Author: Marcelo Vanzin <van...@cloudera.com>

Closes #13792 from vanzin/SPARK-16080.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/bcb0258a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/bcb0258a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/bcb0258a

Branch: refs/heads/master
Commit: bcb0258ae62f23f71a067c1304232f272d7374aa
Parents: 9333880
Author: Marcelo Vanzin <van...@cloudera.com>
Authored: Tue Jun 21 12:48:06 2016 -0500
Committer: Tom Graves <tgra...@yahoo-inc.com>
Committed: Tue Jun 21 12:48:06 2016 -0500

----------------------------------------------------------------------
 .../apache/spark/deploy/yarn/ApplicationMaster.scala  | 14 ++++++++++----
 .../apache/spark/deploy/yarn/YarnClusterSuite.scala   |  8 ++++++++
 2 files changed, 18 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/bcb0258a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
----------------------------------------------------------------------
diff --git 
a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala 
b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
index 4df90d7..847d1de 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala
@@ -160,11 +160,17 @@ private[spark] class ApplicationMaster(
     }
 
     // Distribute the conf archive to executors.
-    sparkConf.get(CACHED_CONF_ARCHIVE).foreach { uri =>
-      val fs = FileSystem.get(new URI(uri), yarnConf)
+    sparkConf.get(CACHED_CONF_ARCHIVE).foreach { path =>
+      val uri = new URI(path)
+      val fs = FileSystem.get(uri, yarnConf)
       val status = fs.getFileStatus(new Path(uri))
-      setupDistributedCache(uri, LocalResourceType.ARCHIVE, 
status.getModificationTime().toString,
-        status.getLen.toString, LocalResourceVisibility.PRIVATE.name())
+      // SPARK-16080: Make sure to use the correct name for the destination 
when distributing the
+      // conf archive to executors.
+      val destUri = new URI(uri.getScheme(), uri.getRawSchemeSpecificPart(),
+        Client.LOCALIZED_CONF_DIR)
+      setupDistributedCache(destUri.toString(), LocalResourceType.ARCHIVE,
+        status.getModificationTime().toString, status.getLen.toString,
+        LocalResourceVisibility.PRIVATE.name())
     }
 
     // Clean up the configuration so it doesn't show up in the Web UI (since 
it's really noisy).

http://git-wip-us.apache.org/repos/asf/spark/blob/bcb0258a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
----------------------------------------------------------------------
diff --git 
a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala 
b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
index c465604..4ce33e0 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
@@ -292,6 +292,14 @@ private object YarnClusterDriver extends Logging with 
Matchers {
       sc.stop()
     }
 
+    // Verify that the config archive is correctly placed in the classpath of 
all containers.
+    val confFile = "/" + Client.SPARK_CONF_FILE
+    assert(getClass().getResource(confFile) != null)
+    val configFromExecutors = sc.parallelize(1 to 4, 4)
+      .map { _ => 
Option(getClass().getResource(confFile)).map(_.toString).orNull }
+      .collect()
+    assert(configFromExecutors.find(_ == null) === None)
+
     // verify log urls are present
     val listeners = sc.listenerBus.findListenersByClass[SaveExecutorInfo]
     assert(listeners.size === 1)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to