spark git commit: [MINOR] [BUILD] Use custom temp directory during build.

2015-06-05 Thread srowen
Repository: spark
Updated Branches:
  refs/heads/branch-1.3 5b96b6933 -> 5185ea9b4


[MINOR] [BUILD] Use custom temp directory during build.

Even with all the efforts to cleanup the temp directories created by
unit tests, Spark leaves a lot of garbage in /tmp after a test run.
This change overrides java.io.tmpdir to place those files under the
build directory instead.

After an sbt full unit test run, I was left with > 400 MB of temp
files. Since they're now under the build dir, it's much easier to
clean them up.

Also make a slight change to a unit test to make it not pollute the
source directory with test data.

Author: Marcelo Vanzin 

Closes #6653 from vanzin/unit-test-tmp and squashes the following commits:

31e2dd5 [Marcelo Vanzin] Fix tests that depend on each other.
aa92944 [Marcelo Vanzin] [minor] [build] Use custom temp directory during build.

(cherry picked from commit b16b5434ff44c42e4b3a337f9af147669ba44896)
Signed-off-by: Sean Owen 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5185ea9b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5185ea9b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5185ea9b

Branch: refs/heads/branch-1.3
Commit: 5185ea9b4df3ee73807859b70ddfca8f02f1a659
Parents: 5b96b69
Author: Marcelo Vanzin 
Authored: Fri Jun 5 14:11:38 2015 +0200
Committer: Sean Owen 
Committed: Fri Jun 5 14:16:05 2015 +0200

--
 .../spark/deploy/SparkSubmitUtilsSuite.scala| 22 +++-
 pom.xml |  4 +++-
 project/SparkBuild.scala|  1 +
 3 files changed, 16 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/5185ea9b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
--
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
index a49b4db..ceff5d7 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
@@ -28,9 +28,12 @@ import org.scalatest.BeforeAndAfterAll
 
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
+import org.apache.spark.util.Utils
 
 class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
 
+  private var tempIvyPath: String = _
+
   private val noOpOutputStream = new OutputStream {
 def write(b: Int) = {}
   }
@@ -47,6 +50,7 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with 
BeforeAndAfterAll {
 super.beforeAll()
 // We don't want to write logs during testing
 SparkSubmitUtils.printStream = new BufferPrintStream
+tempIvyPath = Utils.createTempDir(namePrefix = "ivy").getAbsolutePath()
   }
 
   test("incorrect maven coordinate throws error") {
@@ -90,21 +94,20 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with 
BeforeAndAfterAll {
   }
 
   test("ivy path works correctly") {
-val ivyPath = "dummy" + File.separator +  "ivy"
 val md = SparkSubmitUtils.getModuleDescriptor
 val artifacts = for (i <- 0 until 3) yield new MDArtifact(md, s"jar-$i", 
"jar", "jar")
-var jPaths = SparkSubmitUtils.resolveDependencyPaths(artifacts.toArray, 
new File(ivyPath))
+var jPaths = SparkSubmitUtils.resolveDependencyPaths(artifacts.toArray, 
new File(tempIvyPath))
 for (i <- 0 until 3) {
-  val index = jPaths.indexOf(ivyPath)
+  val index = jPaths.indexOf(tempIvyPath)
   assert(index >= 0)
-  jPaths = jPaths.substring(index + ivyPath.length)
+  jPaths = jPaths.substring(index + tempIvyPath.length)
 }
 val main = MavenCoordinate("my.awesome.lib", "mylib", "0.1")
 IvyTestUtils.withRepository(main, None, None) { repo =>
   // end to end
   val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, 
Option(repo),
-Option(ivyPath), true)
-  assert(jarPath.indexOf(ivyPath) >= 0, "should use non-default ivy path")
+Option(tempIvyPath), true)
+  assert(jarPath.indexOf(tempIvyPath) >= 0, "should use non-default ivy 
path")
 }
   }
 
@@ -123,13 +126,12 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with 
BeforeAndAfterAll {
   assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
 }
 // Local ivy repository with modified home
-val dummyIvyPath = "dummy" + File.separator + "ivy"
-val dummyIvyLocal = new File(dummyIvyPath, "local" + File.separator)
+val dummyIvyLocal = new File(tempIvyPath, "local" + File.separator)
 IvyTestUtils.withRepository(main, None, Some(dummyIvyLocal), true) { repo 
=>
   val jarPath = SparkSubmitUtils.resolve

spark git commit: [MINOR] [BUILD] Use custom temp directory during build.

2015-06-05 Thread srowen
Repository: spark
Updated Branches:
  refs/heads/branch-1.4 90cf68638 -> 9b3e4c187


[MINOR] [BUILD] Use custom temp directory during build.

Even with all the efforts to cleanup the temp directories created by
unit tests, Spark leaves a lot of garbage in /tmp after a test run.
This change overrides java.io.tmpdir to place those files under the
build directory instead.

After an sbt full unit test run, I was left with > 400 MB of temp
files. Since they're now under the build dir, it's much easier to
clean them up.

Also make a slight change to a unit test to make it not pollute the
source directory with test data.

Author: Marcelo Vanzin 

Closes #6653 from vanzin/unit-test-tmp and squashes the following commits:

31e2dd5 [Marcelo Vanzin] Fix tests that depend on each other.
aa92944 [Marcelo Vanzin] [minor] [build] Use custom temp directory during build.

(cherry picked from commit b16b5434ff44c42e4b3a337f9af147669ba44896)
Signed-off-by: Sean Owen 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9b3e4c18
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9b3e4c18
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9b3e4c18

Branch: refs/heads/branch-1.4
Commit: 9b3e4c187123baa22666b8e119ddd55dafecbf89
Parents: 90cf686
Author: Marcelo Vanzin 
Authored: Fri Jun 5 14:11:38 2015 +0200
Committer: Sean Owen 
Committed: Fri Jun 5 14:12:05 2015 +0200

--
 .../spark/deploy/SparkSubmitUtilsSuite.scala| 22 +++-
 pom.xml |  4 +++-
 project/SparkBuild.scala|  1 +
 3 files changed, 16 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/9b3e4c18/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
--
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
index 8fda5c8..07d261c 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
@@ -28,9 +28,12 @@ import org.apache.ivy.plugins.resolver.IBiblioResolver
 
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
+import org.apache.spark.util.Utils
 
 class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
 
+  private var tempIvyPath: String = _
+
   private val noOpOutputStream = new OutputStream {
 def write(b: Int) = {}
   }
@@ -47,6 +50,7 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with 
BeforeAndAfterAll {
 super.beforeAll()
 // We don't want to write logs during testing
 SparkSubmitUtils.printStream = new BufferPrintStream
+tempIvyPath = Utils.createTempDir(namePrefix = "ivy").getAbsolutePath()
   }
 
   test("incorrect maven coordinate throws error") {
@@ -90,21 +94,20 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with 
BeforeAndAfterAll {
   }
 
   test("ivy path works correctly") {
-val ivyPath = "dummy" + File.separator +  "ivy"
 val md = SparkSubmitUtils.getModuleDescriptor
 val artifacts = for (i <- 0 until 3) yield new MDArtifact(md, s"jar-$i", 
"jar", "jar")
-var jPaths = SparkSubmitUtils.resolveDependencyPaths(artifacts.toArray, 
new File(ivyPath))
+var jPaths = SparkSubmitUtils.resolveDependencyPaths(artifacts.toArray, 
new File(tempIvyPath))
 for (i <- 0 until 3) {
-  val index = jPaths.indexOf(ivyPath)
+  val index = jPaths.indexOf(tempIvyPath)
   assert(index >= 0)
-  jPaths = jPaths.substring(index + ivyPath.length)
+  jPaths = jPaths.substring(index + tempIvyPath.length)
 }
 val main = MavenCoordinate("my.awesome.lib", "mylib", "0.1")
 IvyTestUtils.withRepository(main, None, None) { repo =>
   // end to end
   val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, 
Option(repo),
-Option(ivyPath), true)
-  assert(jarPath.indexOf(ivyPath) >= 0, "should use non-default ivy path")
+Option(tempIvyPath), true)
+  assert(jarPath.indexOf(tempIvyPath) >= 0, "should use non-default ivy 
path")
 }
   }
 
@@ -123,13 +126,12 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with 
BeforeAndAfterAll {
   assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
 }
 // Local ivy repository with modified home
-val dummyIvyPath = "dummy" + File.separator + "ivy"
-val dummyIvyLocal = new File(dummyIvyPath, "local" + File.separator)
+val dummyIvyLocal = new File(tempIvyPath, "local" + File.separator)
 IvyTestUtils.withRepository(main, None, Some(dummyIvyLocal), true) { repo 
=>
   val jarPath = SparkSub

spark git commit: [MINOR] [BUILD] Use custom temp directory during build.

2015-06-05 Thread srowen
Repository: spark
Updated Branches:
  refs/heads/master da20c8ca3 -> b16b5434f


[MINOR] [BUILD] Use custom temp directory during build.

Even with all the efforts to cleanup the temp directories created by
unit tests, Spark leaves a lot of garbage in /tmp after a test run.
This change overrides java.io.tmpdir to place those files under the
build directory instead.

After an sbt full unit test run, I was left with > 400 MB of temp
files. Since they're now under the build dir, it's much easier to
clean them up.

Also make a slight change to a unit test to make it not pollute the
source directory with test data.

Author: Marcelo Vanzin 

Closes #6653 from vanzin/unit-test-tmp and squashes the following commits:

31e2dd5 [Marcelo Vanzin] Fix tests that depend on each other.
aa92944 [Marcelo Vanzin] [minor] [build] Use custom temp directory during build.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b16b5434
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b16b5434
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b16b5434

Branch: refs/heads/master
Commit: b16b5434ff44c42e4b3a337f9af147669ba44896
Parents: da20c8c
Author: Marcelo Vanzin 
Authored: Fri Jun 5 14:11:38 2015 +0200
Committer: Sean Owen 
Committed: Fri Jun 5 14:11:38 2015 +0200

--
 .../spark/deploy/SparkSubmitUtilsSuite.scala| 22 +++-
 pom.xml |  4 +++-
 project/SparkBuild.scala|  1 +
 3 files changed, 16 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/b16b5434/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
--
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
index 8fda5c8..07d261c 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
@@ -28,9 +28,12 @@ import org.apache.ivy.plugins.resolver.IBiblioResolver
 
 import org.apache.spark.SparkFunSuite
 import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
+import org.apache.spark.util.Utils
 
 class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
 
+  private var tempIvyPath: String = _
+
   private val noOpOutputStream = new OutputStream {
 def write(b: Int) = {}
   }
@@ -47,6 +50,7 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with 
BeforeAndAfterAll {
 super.beforeAll()
 // We don't want to write logs during testing
 SparkSubmitUtils.printStream = new BufferPrintStream
+tempIvyPath = Utils.createTempDir(namePrefix = "ivy").getAbsolutePath()
   }
 
   test("incorrect maven coordinate throws error") {
@@ -90,21 +94,20 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with 
BeforeAndAfterAll {
   }
 
   test("ivy path works correctly") {
-val ivyPath = "dummy" + File.separator +  "ivy"
 val md = SparkSubmitUtils.getModuleDescriptor
 val artifacts = for (i <- 0 until 3) yield new MDArtifact(md, s"jar-$i", 
"jar", "jar")
-var jPaths = SparkSubmitUtils.resolveDependencyPaths(artifacts.toArray, 
new File(ivyPath))
+var jPaths = SparkSubmitUtils.resolveDependencyPaths(artifacts.toArray, 
new File(tempIvyPath))
 for (i <- 0 until 3) {
-  val index = jPaths.indexOf(ivyPath)
+  val index = jPaths.indexOf(tempIvyPath)
   assert(index >= 0)
-  jPaths = jPaths.substring(index + ivyPath.length)
+  jPaths = jPaths.substring(index + tempIvyPath.length)
 }
 val main = MavenCoordinate("my.awesome.lib", "mylib", "0.1")
 IvyTestUtils.withRepository(main, None, None) { repo =>
   // end to end
   val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, 
Option(repo),
-Option(ivyPath), true)
-  assert(jarPath.indexOf(ivyPath) >= 0, "should use non-default ivy path")
+Option(tempIvyPath), true)
+  assert(jarPath.indexOf(tempIvyPath) >= 0, "should use non-default ivy 
path")
 }
   }
 
@@ -123,13 +126,12 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with 
BeforeAndAfterAll {
   assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
 }
 // Local ivy repository with modified home
-val dummyIvyPath = "dummy" + File.separator + "ivy"
-val dummyIvyLocal = new File(dummyIvyPath, "local" + File.separator)
+val dummyIvyLocal = new File(tempIvyPath, "local" + File.separator)
 IvyTestUtils.withRepository(main, None, Some(dummyIvyLocal), true) { repo 
=>
   val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, 
None,
-Some(dummyIvyPath), true)
+Some(