Repository: spark
Updated Branches:
  refs/heads/branch-1.4 b2684557f -> c0fbd6781


[SPARK-8410] [SPARK-8475] remove previous ivy resolution when using spark-submit

This PR also includes re-ordering the order that repositories are used when 
resolving packages. User provided repositories will be prioritized.

cc andrewor14

Author: Burak Yavuz <brk...@gmail.com>

Closes #7089 from brkyvz/delete-prev-ivy-resolution and squashes the following 
commits:

a21f95a [Burak Yavuz] remove previous ivy resolution when using spark-submit

(cherry picked from commit d7f796da45d9a7c76ee4c29a9e0661ef76d8028a)
Signed-off-by: Andrew Or <and...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c0fbd678
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c0fbd678
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c0fbd678

Branch: refs/heads/branch-1.4
Commit: c0fbd6781eb47e930af0f1009779e36da85a6b65
Parents: b268455
Author: Burak Yavuz <brk...@gmail.com>
Authored: Mon Jun 29 17:27:02 2015 -0700
Committer: Andrew Or <and...@databricks.com>
Committed: Mon Jun 29 17:27:08 2015 -0700

----------------------------------------------------------------------
 .../org/apache/spark/deploy/SparkSubmit.scala   | 37 ++++++++++++--------
 .../spark/deploy/SparkSubmitUtilsSuite.scala    |  6 ++--
 2 files changed, 26 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/c0fbd678/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala 
b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index fe73459..43631ee 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -800,6 +800,20 @@ private[spark] object SparkSubmitUtils {
     val cr = new ChainResolver
     cr.setName("list")
 
+    val repositoryList = remoteRepos.getOrElse("")
+    // add any other remote repositories other than maven central
+    if (repositoryList.trim.nonEmpty) {
+      repositoryList.split(",").zipWithIndex.foreach { case (repo, i) =>
+        val brr: IBiblioResolver = new IBiblioResolver
+        brr.setM2compatible(true)
+        brr.setUsepoms(true)
+        brr.setRoot(repo)
+        brr.setName(s"repo-${i + 1}")
+        cr.add(brr)
+        printStream.println(s"$repo added as a remote repository with the 
name: ${brr.getName}")
+      }
+    }
+
     val localM2 = new IBiblioResolver
     localM2.setM2compatible(true)
     localM2.setRoot(m2Path.toURI.toString)
@@ -830,20 +844,6 @@ private[spark] object SparkSubmitUtils {
     sp.setRoot("http://dl.bintray.com/spark-packages/maven";)
     sp.setName("spark-packages")
     cr.add(sp)
-
-    val repositoryList = remoteRepos.getOrElse("")
-    // add any other remote repositories other than maven central
-    if (repositoryList.trim.nonEmpty) {
-      repositoryList.split(",").zipWithIndex.foreach { case (repo, i) =>
-        val brr: IBiblioResolver = new IBiblioResolver
-        brr.setM2compatible(true)
-        brr.setUsepoms(true)
-        brr.setRoot(repo)
-        brr.setName(s"repo-${i + 1}")
-        cr.add(brr)
-        printStream.println(s"$repo added as a remote repository with the 
name: ${brr.getName}")
-      }
-    }
     cr
   }
 
@@ -973,6 +973,15 @@ private[spark] object SparkSubmitUtils {
 
         // A Module descriptor must be specified. Entries are dummy strings
         val md = getModuleDescriptor
+        // clear ivy resolution from previous launches. The resolution file is 
usually at
+        // ~/.ivy2/org.apache.spark-spark-submit-parent-default.xml. In 
between runs, this file
+        // leads to confusion with Ivy when the files can no longer be found 
at the repository
+        // declared in that file/
+        val mdId = md.getModuleRevisionId
+        val previousResolution = new File(ivySettings.getDefaultCache,
+          s"${mdId.getOrganisation}-${mdId.getName}-$ivyConfName.xml")
+        if (previousResolution.exists) previousResolution.delete
+
         md.setDefaultConf(ivyConfName)
 
         // Add exclusion rules for Spark and Scala Library

http://git-wip-us.apache.org/repos/asf/spark/blob/c0fbd678/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
index 8003b71..5559470 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
@@ -77,9 +77,9 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with 
BeforeAndAfterAll {
     assert(resolver2.getResolvers.size() === 7)
     val expected = repos.split(",").map(r => s"$r/")
     resolver2.getResolvers.toArray.zipWithIndex.foreach { case (resolver: 
AbstractResolver, i) =>
-      if (i > 3) {
-        assert(resolver.getName === s"repo-${i - 3}")
-        assert(resolver.asInstanceOf[IBiblioResolver].getRoot === expected(i - 
4))
+      if (i < 3) {
+        assert(resolver.getName === s"repo-${i + 1}")
+        assert(resolver.asInstanceOf[IBiblioResolver].getRoot === expected(i))
       }
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to