Repository: spark
Updated Branches:
  refs/heads/master 010c460d6 -> 648553d48


Fix some tests.

- JavaAPISuite was trying to compare a bare path with a URI. Fix by
  extracting the path from the URI, since we know it should be a
  local path anyway/

- b9be1609 excluded the ASM dependency everywhere, but easymock needs
  it (because cglib needs it). So re-add the dependency, with test
  scope this time.

The second one above actually uncovered a weird situation: the maven
test target works, even though I can't find the class sbt complains
about in its classpath. sbt complains with:

  [error] Uncaught exception when running org.apache.spark.util
  .random.RandomSamplerSuite: java.lang.NoClassDefFoundError:
  org/objectweb/asm/Type

To avoid more weirdness caused by that, I explicitly added the asm
dependency to both maven and sbt (for tests only), and verified
the classes don't end up in the final assembly.

Author: Marcelo Vanzin <van...@cloudera.com>

Closes #917 from vanzin/flaky-tests and squashes the following commits:

d022320 [Marcelo Vanzin] Fix some tests.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/648553d4
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/648553d4
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/648553d4

Branch: refs/heads/master
Commit: 648553d48ee1f830406750b50ec4cc322bcf47fe
Parents: 010c460
Author: Marcelo Vanzin <van...@cloudera.com>
Authored: Fri Jun 20 20:05:12 2014 -0700
Committer: Patrick Wendell <pwend...@gmail.com>
Committed: Fri Jun 20 20:05:12 2014 -0700

----------------------------------------------------------------------
 core/pom.xml                                          |  5 +++++
 core/src/test/java/org/apache/spark/JavaAPISuite.java |  5 +++--
 pom.xml                                               |  7 +++++++
 project/SparkBuild.scala                              | 10 ++++++----
 4 files changed, 21 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/648553d4/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index bd6767e..8c23842 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -245,6 +245,11 @@
       <scope>test</scope>
     </dependency>
     <dependency>
+      <groupId>asm</groupId>
+      <artifactId>asm</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>com.novocode</groupId>
       <artifactId>junit-interface</artifactId>
       <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/spark/blob/648553d4/core/src/test/java/org/apache/spark/JavaAPISuite.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/spark/JavaAPISuite.java 
b/core/src/test/java/org/apache/spark/JavaAPISuite.java
index 1d7a7be..b2868b5 100644
--- a/core/src/test/java/org/apache/spark/JavaAPISuite.java
+++ b/core/src/test/java/org/apache/spark/JavaAPISuite.java
@@ -18,6 +18,7 @@
 package org.apache.spark;
 
 import java.io.*;
+import java.net.URI;
 import java.util.*;
 
 import scala.Tuple2;
@@ -768,7 +769,7 @@ public class JavaAPISuite implements Serializable {
   }
 
   @Test
-  public void wholeTextFiles() throws IOException {
+  public void wholeTextFiles() throws Exception {
     byte[] content1 = "spark is easy to use.\n".getBytes("utf-8");
     byte[] content2 = "spark is also easy to use.\n".getBytes("utf-8");
 
@@ -784,7 +785,7 @@ public class JavaAPISuite implements Serializable {
     List<Tuple2<String, String>> result = readRDD.collect();
 
     for (Tuple2<String, String> res : result) {
-      Assert.assertEquals(res._2(), container.get(res._1()));
+      Assert.assertEquals(res._2(), container.get(new 
URI(res._1()).getPath()));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/648553d4/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 0d46bb4..05f76d5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -468,6 +468,13 @@
         <version>3.1</version>
         <scope>test</scope>
       </dependency>
+      <!-- Needed by cglib which is needed by easymock. -->
+      <dependency>
+        <groupId>asm</groupId>
+        <artifactId>asm</artifactId>
+        <version>3.3.1</version>
+        <scope>test</scope>
+      </dependency>
       <dependency>
         <groupId>org.mockito</groupId>
         <artifactId>mockito-all</artifactId>

http://git-wip-us.apache.org/repos/asf/spark/blob/648553d4/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 7bb39dc..55a2aa0 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -293,7 +293,9 @@ object SparkBuild extends Build {
         "com.novocode"      % "junit-interface"        % "0.10"   % "test",
         "org.easymock"      % "easymockclassextension" % "3.1"    % "test",
         "org.mockito"       % "mockito-all"            % "1.9.0"  % "test",
-        "junit"             % "junit"                  % "4.10"   % "test"
+        "junit"             % "junit"                  % "4.10"   % "test",
+        // Needed by cglib which is needed by easymock.
+        "asm"               % "asm"                    % "3.3.1"  % "test"
     ),
 
     testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
@@ -461,7 +463,7 @@ object SparkBuild extends Build {
 
   def toolsSettings = sharedSettings ++ Seq(
     name := "spark-tools",
-    libraryDependencies <+= scalaVersion(v => "org.scala-lang"  % 
"scala-compiler" % v ),
+    libraryDependencies <+= scalaVersion(v => "org.scala-lang"  % 
"scala-compiler" % v),
     libraryDependencies <+= scalaVersion(v => "org.scala-lang"  % 
"scala-reflect"  % v )
   ) ++ assemblySettings ++ extraAssemblySettings
 
@@ -630,9 +632,9 @@ object SparkBuild extends Build {
     scalaVersion := "2.10.4",
     retrieveManaged := true,
     retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
-    libraryDependencies := Seq("spark-streaming-mqtt", 
"spark-streaming-zeromq", 
+    libraryDependencies := Seq("spark-streaming-mqtt", 
"spark-streaming-zeromq",
       "spark-streaming-flume", "spark-streaming-kafka", 
"spark-streaming-twitter",
-      "spark-streaming", "spark-mllib", "spark-bagel", "spark-graphx", 
+      "spark-streaming", "spark-mllib", "spark-bagel", "spark-graphx",
       "spark-core").map(sparkPreviousArtifact(_).get intransitive())
   )
 

Reply via email to