Revert "[SPARK-7224] added mock repository generator for --packages tests"
This reverts commit 7dacc08ab36188991a001df23880167433844767. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c6d9a429 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c6d9a429 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c6d9a429 Branch: refs/heads/master Commit: c6d9a429421561508e8adbb4892954381bc33a90 Parents: 58d6584 Author: Patrick Wendell <patr...@databricks.com> Authored: Fri May 1 13:01:43 2015 -0700 Committer: Patrick Wendell <patr...@databricks.com> Committed: Fri May 1 13:01:43 2015 -0700 ---------------------------------------------------------------------- .../main/scala/org/apache/spark/TestUtils.scala | 27 +- .../org/apache/spark/deploy/SparkSubmit.scala | 129 +++++---- .../org/apache/spark/deploy/IvyTestUtils.scala | 262 ------------------- .../apache/spark/deploy/SparkSubmitSuite.scala | 25 +- .../spark/deploy/SparkSubmitUtilsSuite.scala | 57 ++-- 5 files changed, 97 insertions(+), 403 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/c6d9a429/core/src/main/scala/org/apache/spark/TestUtils.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/TestUtils.scala b/core/src/main/scala/org/apache/spark/TestUtils.scala index fe6320b..398ca41 100644 --- a/core/src/main/scala/org/apache/spark/TestUtils.scala +++ b/core/src/main/scala/org/apache/spark/TestUtils.scala @@ -105,18 +105,23 @@ private[spark] object TestUtils { URI.create(s"string:///${name.replace(".", "/")}${SOURCE.extension}") } - private[spark] class JavaSourceFromString(val name: String, val code: String) + private class JavaSourceFromString(val name: String, val code: String) extends SimpleJavaFileObject(createURI(name), SOURCE) { override def getCharContent(ignoreEncodingErrors: Boolean): String = code } - /** Creates a compiled class with the source file. Class file will be placed in destDir. */ + /** Creates a compiled class with the given name. Class file will be placed in destDir. */ def createCompiledClass( className: String, destDir: File, - sourceFile: JavaSourceFromString, - classpathUrls: Seq[URL]): File = { + toStringValue: String = "", + baseClass: String = null, + classpathUrls: Seq[URL] = Seq()): File = { val compiler = ToolProvider.getSystemJavaCompiler + val extendsText = Option(baseClass).map { c => s" extends ${c}" }.getOrElse("") + val sourceFile = new JavaSourceFromString(className, + "public class " + className + extendsText + " implements java.io.Serializable {" + + " @Override public String toString() { return \"" + toStringValue + "\"; }}") // Calling this outputs a class file in pwd. It's easier to just rename the file than // build a custom FileManager that controls the output location. @@ -139,18 +144,4 @@ private[spark] object TestUtils { assert(out.exists(), "Destination file not moved: " + out.getAbsolutePath()) out } - - /** Creates a compiled class with the given name. Class file will be placed in destDir. */ - def createCompiledClass( - className: String, - destDir: File, - toStringValue: String = "", - baseClass: String = null, - classpathUrls: Seq[URL] = Seq()): File = { - val extendsText = Option(baseClass).map { c => s" extends ${c}" }.getOrElse("") - val sourceFile = new JavaSourceFromString(className, - "public class " + className + extendsText + " implements java.io.Serializable {" + - " @Override public String toString() { return \"" + toStringValue + "\"; }}") - createCompiledClass(className, destDir, sourceFile, classpathUrls) - } } http://git-wip-us.apache.org/repos/asf/spark/blob/c6d9a429/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 0d149e7..b8ae4af 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -20,7 +20,6 @@ package org.apache.spark.deploy import java.io.{File, PrintStream} import java.lang.reflect.{InvocationTargetException, Modifier, UndeclaredThrowableException} import java.net.URL -import java.nio.file.{Path => JavaPath} import java.security.PrivilegedExceptionAction import scala.collection.mutable.{ArrayBuffer, HashMap, Map} @@ -709,9 +708,7 @@ private[deploy] object SparkSubmitUtils { * @param artifactId the artifactId of the coordinate * @param version the version of the coordinate */ - private[deploy] case class MavenCoordinate(groupId: String, artifactId: String, version: String) { - override def toString: String = s"$groupId:$artifactId:$version" - } + private[deploy] case class MavenCoordinate(groupId: String, artifactId: String, version: String) /** * Extracts maven coordinates from a comma-delimited string. Coordinates should be provided @@ -734,10 +731,6 @@ private[deploy] object SparkSubmitUtils { } } - /** Path of the local Maven cache. */ - private[spark] def m2Path: JavaPath = new File(System.getProperty("user.home"), - ".m2" + File.separator + "repository" + File.separator).toPath - /** * Extracts maven coordinates from a comma-delimited string * @param remoteRepos Comma-delimited string of remote repositories @@ -751,7 +744,8 @@ private[deploy] object SparkSubmitUtils { val localM2 = new IBiblioResolver localM2.setM2compatible(true) - localM2.setRoot(m2Path.toUri.toString) + val m2Path = ".m2" + File.separator + "repository" + File.separator + localM2.setRoot(new File(System.getProperty("user.home"), m2Path).toURI.toString) localM2.setUsepoms(true) localM2.setName("local-m2-cache") cr.add(localM2) @@ -876,72 +870,69 @@ private[deploy] object SparkSubmitUtils { "" } else { val sysOut = System.out - try { - // To prevent ivy from logging to system out - System.setOut(printStream) - val artifacts = extractMavenCoordinates(coordinates) - // Default configuration name for ivy - val ivyConfName = "default" - // set ivy settings for location of cache - val ivySettings: IvySettings = new IvySettings - // Directories for caching downloads through ivy and storing the jars when maven coordinates - // are supplied to spark-submit - val alternateIvyCache = ivyPath.getOrElse("") - val packagesDirectory: File = - if (alternateIvyCache.trim.isEmpty) { - new File(ivySettings.getDefaultIvyUserDir, "jars") - } else { - ivySettings.setDefaultIvyUserDir(new File(alternateIvyCache)) - ivySettings.setDefaultCache(new File(alternateIvyCache, "cache")) - new File(alternateIvyCache, "jars") - } - printStream.println( - s"Ivy Default Cache set to: ${ivySettings.getDefaultCache.getAbsolutePath}") - printStream.println(s"The jars for the packages stored in: $packagesDirectory") - // create a pattern matcher - ivySettings.addMatcher(new GlobPatternMatcher) - // create the dependency resolvers - val repoResolver = createRepoResolvers(remoteRepos, ivySettings) - ivySettings.addResolver(repoResolver) - ivySettings.setDefaultResolver(repoResolver.getName) - - val ivy = Ivy.newInstance(ivySettings) - // Set resolve options to download transitive dependencies as well - val resolveOptions = new ResolveOptions - resolveOptions.setTransitive(true) - val retrieveOptions = new RetrieveOptions - // Turn downloading and logging off for testing - if (isTest) { - resolveOptions.setDownload(false) - resolveOptions.setLog(LogOptions.LOG_QUIET) - retrieveOptions.setLog(LogOptions.LOG_QUIET) + // To prevent ivy from logging to system out + System.setOut(printStream) + val artifacts = extractMavenCoordinates(coordinates) + // Default configuration name for ivy + val ivyConfName = "default" + // set ivy settings for location of cache + val ivySettings: IvySettings = new IvySettings + // Directories for caching downloads through ivy and storing the jars when maven coordinates + // are supplied to spark-submit + val alternateIvyCache = ivyPath.getOrElse("") + val packagesDirectory: File = + if (alternateIvyCache.trim.isEmpty) { + new File(ivySettings.getDefaultIvyUserDir, "jars") } else { - resolveOptions.setDownload(true) + ivySettings.setDefaultIvyUserDir(new File(alternateIvyCache)) + ivySettings.setDefaultCache(new File(alternateIvyCache, "cache")) + new File(alternateIvyCache, "jars") } + printStream.println( + s"Ivy Default Cache set to: ${ivySettings.getDefaultCache.getAbsolutePath}") + printStream.println(s"The jars for the packages stored in: $packagesDirectory") + // create a pattern matcher + ivySettings.addMatcher(new GlobPatternMatcher) + // create the dependency resolvers + val repoResolver = createRepoResolvers(remoteRepos, ivySettings) + ivySettings.addResolver(repoResolver) + ivySettings.setDefaultResolver(repoResolver.getName) + + val ivy = Ivy.newInstance(ivySettings) + // Set resolve options to download transitive dependencies as well + val resolveOptions = new ResolveOptions + resolveOptions.setTransitive(true) + val retrieveOptions = new RetrieveOptions + // Turn downloading and logging off for testing + if (isTest) { + resolveOptions.setDownload(false) + resolveOptions.setLog(LogOptions.LOG_QUIET) + retrieveOptions.setLog(LogOptions.LOG_QUIET) + } else { + resolveOptions.setDownload(true) + } - // A Module descriptor must be specified. Entries are dummy strings - val md = getModuleDescriptor - md.setDefaultConf(ivyConfName) + // A Module descriptor must be specified. Entries are dummy strings + val md = getModuleDescriptor + md.setDefaultConf(ivyConfName) - // Add exclusion rules for Spark and Scala Library - addExclusionRules(ivySettings, ivyConfName, md) - // add all supplied maven artifacts as dependencies - addDependenciesToIvy(md, artifacts, ivyConfName) + // Add exclusion rules for Spark and Scala Library + addExclusionRules(ivySettings, ivyConfName, md) + // add all supplied maven artifacts as dependencies + addDependenciesToIvy(md, artifacts, ivyConfName) - // resolve dependencies - val rr: ResolveReport = ivy.resolve(md, resolveOptions) - if (rr.hasError) { - throw new RuntimeException(rr.getAllProblemMessages.toString) - } - // retrieve all resolved dependencies - ivy.retrieve(rr.getModuleDescriptor.getModuleRevisionId, - packagesDirectory.getAbsolutePath + File.separator + - "[organization]_[artifact]-[revision].[ext]", - retrieveOptions.setConfs(Array(ivyConfName))) - resolveDependencyPaths(rr.getArtifacts.toArray, packagesDirectory) - } finally { - System.setOut(sysOut) + // resolve dependencies + val rr: ResolveReport = ivy.resolve(md, resolveOptions) + if (rr.hasError) { + throw new RuntimeException(rr.getAllProblemMessages.toString) } + // retrieve all resolved dependencies + ivy.retrieve(rr.getModuleDescriptor.getModuleRevisionId, + packagesDirectory.getAbsolutePath + File.separator + + "[organization]_[artifact]-[revision].[ext]", + retrieveOptions.setConfs(Array(ivyConfName))) + System.setOut(sysOut) + resolveDependencyPaths(rr.getArtifacts.toArray, packagesDirectory) } } } http://git-wip-us.apache.org/repos/asf/spark/blob/c6d9a429/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala ---------------------------------------------------------------------- diff --git a/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala b/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala deleted file mode 100644 index 529f91e..0000000 --- a/core/src/test/scala/org/apache/spark/deploy/IvyTestUtils.scala +++ /dev/null @@ -1,262 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.spark.deploy - -import java.io.{File, FileInputStream, FileOutputStream} -import java.nio.file.{Files, Path} -import java.util.jar.{JarEntry, JarOutputStream} - -import org.apache.spark.TestUtils.{createCompiledClass, JavaSourceFromString} - -import com.google.common.io.ByteStreams - -import org.apache.commons.io.FileUtils - -import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate - -private[deploy] object IvyTestUtils { - - /** - * Create the path for the jar and pom from the maven coordinate. Extension should be `jar` - * or `pom`. - */ - private def pathFromCoordinate( - artifact: MavenCoordinate, - prefix: Path, - ext: String, - useIvyLayout: Boolean): Path = { - val groupDirs = artifact.groupId.replace(".", File.separator) - val artifactDirs = artifact.artifactId - val artifactPath = - if (!useIvyLayout) { - Seq(groupDirs, artifactDirs, artifact.version).mkString(File.separator) - } else { - Seq(groupDirs, artifactDirs, artifact.version, ext + "s").mkString(File.separator) - } - new File(prefix.toFile, artifactPath).toPath - } - - private def artifactName(artifact: MavenCoordinate, ext: String = ".jar"): String = { - s"${artifact.artifactId}-${artifact.version}$ext" - } - - /** Write the contents to a file to the supplied directory. */ - private def writeFile(dir: File, fileName: String, contents: String): File = { - val outputFile = new File(dir, fileName) - val outputStream = new FileOutputStream(outputFile) - outputStream.write(contents.toCharArray.map(_.toByte)) - outputStream.close() - outputFile - } - - /** Create an example Python file. */ - private def createPythonFile(dir: File): File = { - val contents = - """def myfunc(x): - | return x + 1 - """.stripMargin - writeFile(dir, "mylib.py", contents) - } - - /** Create a simple testable Class. */ - private def createJavaClass(dir: File, className: String, packageName: String): File = { - val contents = - s"""package $packageName; - | - |import java.lang.Integer; - | - |class $className implements java.io.Serializable { - | - | public $className() {} - | - | public Integer myFunc(Integer x) { - | return x + 1; - | } - |} - """.stripMargin - val sourceFile = - new JavaSourceFromString(new File(dir, className + ".java").getAbsolutePath, contents) - createCompiledClass(className, dir, sourceFile, Seq.empty) - } - - /** Helper method to write artifact information in the pom. */ - private def pomArtifactWriter(artifact: MavenCoordinate, tabCount: Int = 1): String = { - var result = "\n" + " " * tabCount + s"<groupId>${artifact.groupId}</groupId>" - result += "\n" + " " * tabCount + s"<artifactId>${artifact.artifactId}</artifactId>" - result += "\n" + " " * tabCount + s"<version>${artifact.version}</version>" - result - } - - /** Create a pom file for this artifact. */ - private def createPom( - dir: File, - artifact: MavenCoordinate, - dependencies: Option[Seq[MavenCoordinate]]): File = { - var content = """ - |<?xml version="1.0" encoding="UTF-8"?> - |<project xmlns="http://maven.apache.org/POM/4.0.0" - | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" - | xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 - | http://maven.apache.org/xsd/maven-4.0.0.xsd"> - | <modelVersion>4.0.0</modelVersion> - """.stripMargin.trim - content += pomArtifactWriter(artifact) - content += dependencies.map { deps => - val inside = deps.map { dep => - "\t<dependency>" + pomArtifactWriter(dep, 3) + "\n\t</dependency>" - }.mkString("\n") - "\n <dependencies>\n" + inside + "\n </dependencies>" - }.getOrElse("") - content += "\n</project>" - writeFile(dir, artifactName(artifact, ".pom"), content.trim) - } - - /** Create the jar for the given maven coordinate, using the supplied files. */ - private def packJar( - dir: File, - artifact: MavenCoordinate, - files: Seq[(String, File)]): File = { - val jarFile = new File(dir, artifactName(artifact)) - val jarFileStream = new FileOutputStream(jarFile) - val jarStream = new JarOutputStream(jarFileStream, new java.util.jar.Manifest()) - - for (file <- files) { - val jarEntry = new JarEntry(file._1) - jarStream.putNextEntry(jarEntry) - - val in = new FileInputStream(file._2) - ByteStreams.copy(in, jarStream) - in.close() - } - jarStream.close() - jarFileStream.close() - - jarFile - } - - /** - * Creates a jar and pom file, mocking a Maven repository. The root path can be supplied with - * `tempDir`, dependencies can be created into the same repo, and python files can also be packed - * inside the jar. - * - * @param artifact The maven coordinate to generate the jar and pom for. - * @param dependencies List of dependencies this artifact might have to also create jars and poms. - * @param tempDir The root folder of the repository - * @param useIvyLayout whether to mock the Ivy layout for local repository testing - * @param withPython Whether to pack python files inside the jar for extensive testing. - * @return Root path of the repository - */ - private def createLocalRepository( - artifact: MavenCoordinate, - dependencies: Option[Seq[MavenCoordinate]] = None, - tempDir: Option[Path] = None, - useIvyLayout: Boolean = false, - withPython: Boolean = false): Path = { - // Where the root of the repository exists, and what Ivy will search in - val tempPath = tempDir.getOrElse(Files.createTempDirectory(null)) - // Create directory if it doesn't exist - Files.createDirectories(tempPath) - // Where to create temporary class files and such - val root = Files.createTempDirectory(tempPath, null).toFile - try { - val jarPath = pathFromCoordinate(artifact, tempPath, "jar", useIvyLayout) - Files.createDirectories(jarPath) - val className = "MyLib" - - val javaClass = createJavaClass(root, className, artifact.groupId) - // A tuple of files representation in the jar, and the file - val javaFile = (artifact.groupId.replace(".", "/") + "/" + javaClass.getName, javaClass) - val allFiles = - if (withPython) { - val pythonFile = createPythonFile(root) - Seq(javaFile, (pythonFile.getName, pythonFile)) - } else { - Seq(javaFile) - } - val jarFile = packJar(jarPath.toFile, artifact, allFiles) - assert(jarFile.exists(), "Problem creating Jar file") - val pomPath = pathFromCoordinate(artifact, tempPath, "pom", useIvyLayout) - Files.createDirectories(pomPath) - val pomFile = createPom(pomPath.toFile, artifact, dependencies) - assert(pomFile.exists(), "Problem creating Pom file") - } finally { - FileUtils.deleteDirectory(root) - } - tempPath - } - - /** - * Creates a suite of jars and poms, with or without dependencies, mocking a maven repository. - * @param artifact The main maven coordinate to generate the jar and pom for. - * @param dependencies List of dependencies this artifact might have to also create jars and poms. - * @param rootDir The root folder of the repository (like `~/.m2/repositories`) - * @param useIvyLayout whether to mock the Ivy layout for local repository testing - * @param withPython Whether to pack python files inside the jar for extensive testing. - * @return Root path of the repository. Will be `rootDir` if supplied. - */ - private[deploy] def createLocalRepositoryForTests( - artifact: MavenCoordinate, - dependencies: Option[String], - rootDir: Option[Path], - useIvyLayout: Boolean = false, - withPython: Boolean = false): Path = { - val deps = dependencies.map(SparkSubmitUtils.extractMavenCoordinates) - val mainRepo = createLocalRepository(artifact, deps, rootDir, useIvyLayout, withPython) - deps.foreach { seq => seq.foreach { dep => - createLocalRepository(dep, None, Some(mainRepo), useIvyLayout, withPython = false) - }} - mainRepo - } - - /** - * Creates a repository for a test, and cleans it up afterwards. - * - * @param artifact The main maven coordinate to generate the jar and pom for. - * @param dependencies List of dependencies this artifact might have to also create jars and poms. - * @param rootDir The root folder of the repository (like `~/.m2/repositories`) - * @param useIvyLayout whether to mock the Ivy layout for local repository testing - * @param withPython Whether to pack python files inside the jar for extensive testing. - * @return Root path of the repository. Will be `rootDir` if supplied. - */ - private[deploy] def withRepository( - artifact: MavenCoordinate, - dependencies: Option[String], - rootDir: Option[Path], - useIvyLayout: Boolean = false, - withPython: Boolean = false)(f: String => Unit): Unit = { - val repo = createLocalRepositoryForTests(artifact, dependencies, rootDir, useIvyLayout, - withPython) - try { - f(repo.toUri.toString) - } finally { - // Clean up - if (repo.toString.contains(".m2") || repo.toString.contains(".ivy2")) { - FileUtils.deleteDirectory(new File(repo.toFile, - artifact.groupId.replace(".", File.separator) + File.separator + artifact.artifactId)) - dependencies.map(SparkSubmitUtils.extractMavenCoordinates).foreach { seq => - seq.foreach { dep => - FileUtils.deleteDirectory(new File(repo.toFile, - dep.artifactId.replace(".", File.separator))) - } - } - } else { - FileUtils.deleteDirectory(repo.toFile) - } - } - } -} http://git-wip-us.apache.org/repos/asf/spark/blob/c6d9a429/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala ---------------------------------------------------------------------- diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala index 8360b94..029a115 100644 --- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala @@ -30,7 +30,6 @@ import org.scalatest.time.SpanSugar._ import org.apache.spark._ import org.apache.spark.deploy.SparkSubmit._ -import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate import org.apache.spark.util.{ResetSystemProperties, Utils} // Note: this suite mixes in ResetSystemProperties because SparkSubmit.main() sets a bunch @@ -337,20 +336,16 @@ class SparkSubmitSuite extends FunSuite with Matchers with ResetSystemProperties ignore("includes jars passed in through --packages") { val unusedJar = TestUtils.createJarWithClasses(Seq.empty) - val main = MavenCoordinate("my.great.lib", "mylib", "0.1") - val dep = MavenCoordinate("my.great.dep", "mylib", "0.1") - IvyTestUtils.withRepository(main, Some(dep.toString), None) { repo => - val args = Seq( - "--class", JarCreationTest.getClass.getName.stripSuffix("$"), - "--name", "testApp", - "--master", "local-cluster[2,1,512]", - "--packages", Seq(main, dep).mkString(","), - "--repositories", repo, - "--conf", "spark.ui.enabled=false", - unusedJar.toString, - "my.great.lib.MyLib", "my.great.dep.MyLib") - runSparkSubmit(args) - } + val packagesString = "com.databricks:spark-csv_2.10:0.1,com.databricks:spark-avro_2.10:0.1" + val args = Seq( + "--class", JarCreationTest.getClass.getName.stripSuffix("$"), + "--name", "testApp", + "--master", "local-cluster[2,1,512]", + "--packages", packagesString, + "--conf", "spark.ui.enabled=false", + unusedJar.toString, + "com.databricks.spark.csv.DefaultSource", "com.databricks.spark.avro.DefaultSource") + runSparkSubmit(args) } test("resolves command line argument paths correctly") { http://git-wip-us.apache.org/repos/asf/spark/blob/c6d9a429/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala ---------------------------------------------------------------------- diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala index cc79ee7..1b2b699 100644 --- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala @@ -19,15 +19,15 @@ package org.apache.spark.deploy import java.io.{PrintStream, OutputStream, File} +import org.apache.ivy.core.settings.IvySettings + import scala.collection.mutable.ArrayBuffer + import org.scalatest.{BeforeAndAfterAll, FunSuite} import org.apache.ivy.core.module.descriptor.MDArtifact -import org.apache.ivy.core.settings.IvySettings import org.apache.ivy.plugins.resolver.IBiblioResolver -import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate - class SparkSubmitUtilsSuite extends FunSuite with BeforeAndAfterAll { private val noOpOutputStream = new OutputStream { @@ -89,7 +89,7 @@ class SparkSubmitUtilsSuite extends FunSuite with BeforeAndAfterAll { } test("ivy path works correctly") { - val ivyPath = "dummy" + File.separator + "ivy" + val ivyPath = "dummy/ivy" val md = SparkSubmitUtils.getModuleDescriptor val artifacts = for (i <- 0 until 3) yield new MDArtifact(md, s"jar-$i", "jar", "jar") var jPaths = SparkSubmitUtils.resolveDependencyPaths(artifacts.toArray, new File(ivyPath)) @@ -98,38 +98,17 @@ class SparkSubmitUtilsSuite extends FunSuite with BeforeAndAfterAll { assert(index >= 0) jPaths = jPaths.substring(index + ivyPath.length) } - val main = MavenCoordinate("my.awesome.lib", "mylib", "0.1") - IvyTestUtils.withRepository(main, None, None) { repo => - // end to end - val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, Option(repo), - Option(ivyPath), true) - assert(jarPath.indexOf(ivyPath) >= 0, "should use non-default ivy path") - } + // end to end + val jarPath = SparkSubmitUtils.resolveMavenCoordinates( + "com.databricks:spark-csv_2.10:0.1", None, Option(ivyPath), true) + assert(jarPath.indexOf(ivyPath) >= 0, "should use non-default ivy path") } - test("search for artifact at local repositories") { - val main = new MavenCoordinate("my.awesome.lib", "mylib", "0.1") - // Local M2 repository - IvyTestUtils.withRepository(main, None, Some(SparkSubmitUtils.m2Path)) { repo => - val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, None, None, true) - assert(jarPath.indexOf("mylib") >= 0, "should find artifact") - } - // Local Ivy Repository - val settings = new IvySettings - val ivyLocal = new File(settings.getDefaultIvyUserDir, "local" + File.separator) - IvyTestUtils.withRepository(main, None, Some(ivyLocal.toPath), true) { repo => - val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, None, None, true) - assert(jarPath.indexOf("mylib") >= 0, "should find artifact") - } - // Local ivy repository with modified home - val dummyIvyPath = "dummy" + File.separator + "ivy" - val dummyIvyLocal = new File(dummyIvyPath, "local" + File.separator) - IvyTestUtils.withRepository(main, None, Some(dummyIvyLocal.toPath), true) { repo => - val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, None, - Some(dummyIvyPath), true) - assert(jarPath.indexOf("mylib") >= 0, "should find artifact") - assert(jarPath.indexOf(dummyIvyPath) >= 0, "should be in new ivy path") - } + test("search for artifact at other repositories") { + val path = SparkSubmitUtils.resolveMavenCoordinates("com.agimatec:agimatec-validation:0.9.3", + Option("https://oss.sonatype.org/content/repositories/agimatec/"), None, true) + assert(path.indexOf("agimatec-validation") >= 0, "should find package. If it doesn't, check" + + "if package still exists. If it has been removed, replace the example in this test.") } test("dependency not found throws RuntimeException") { @@ -148,11 +127,11 @@ class SparkSubmitUtilsSuite extends FunSuite with BeforeAndAfterAll { val path = SparkSubmitUtils.resolveMavenCoordinates(coordinates, None, None, true) assert(path === "", "should return empty path") - val main = MavenCoordinate("org.apache.spark", "spark-streaming-kafka-assembly_2.10", "1.2.0") - IvyTestUtils.withRepository(main, None, None) { repo => - val files = SparkSubmitUtils.resolveMavenCoordinates(coordinates + "," + main.toString, - Some(repo), None, true) - assert(files.indexOf(main.artifactId) >= 0, "Did not return artifact") + // Should not exclude the following dependency. Will throw an error, because it doesn't exist, + // but the fact that it is checking means that it wasn't excluded. + intercept[RuntimeException] { + SparkSubmitUtils.resolveMavenCoordinates(coordinates + + ",org.apache.spark:spark-streaming-kafka-assembly_2.10:1.2.0", None, None, true) } } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org