Repository: spark
Updated Branches:
  refs/heads/master e4e46b20f -> e71c07557


[SPARK-11672][ML] flaky spark.ml read/write tests

We set `sqlContext = null` in `afterAll`. However, this doesn't change 
`SQLContext.activeContext`  and then `SQLContext.getOrCreate` might use the 
`SparkContext` from previous test suite and hence causes the error. This PR 
calls `clearActive` in `beforeAll` and `afterAll` to avoid using an old context 
from other test suites.

cc: yhuai

Author: Xiangrui Meng <m...@databricks.com>

Closes #9677 from mengxr/SPARK-11672.2.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e71c0755
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e71c0755
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e71c0755

Branch: refs/heads/master
Commit: e71c07557c39e2f74bd20d2ab3a2fca88aa5dfbb
Parents: e4e46b2
Author: Xiangrui Meng <m...@databricks.com>
Authored: Thu Nov 12 20:01:13 2015 -0800
Committer: Xiangrui Meng <m...@databricks.com>
Committed: Thu Nov 12 20:01:13 2015 -0800

----------------------------------------------------------------------
 .../java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java | 4 ++--
 .../apache/spark/ml/classification/LogisticRegressionSuite.scala | 2 +-
 .../test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala  | 2 +-
 .../scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala    | 2 +-
 .../org/apache/spark/mllib/util/MLlibTestSparkContext.scala      | 2 ++
 5 files changed, 7 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/e71c0755/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java 
b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
index 4f7aeac..c395380 100644
--- 
a/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
+++ 
b/mllib/src/test/java/org/apache/spark/ml/util/JavaDefaultReadWriteSuite.java
@@ -23,7 +23,7 @@ import java.io.IOException;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
-import org.junit.Ignore;
+import org.junit.Test;
 
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.sql.SQLContext;
@@ -50,7 +50,7 @@ public class JavaDefaultReadWriteSuite {
     Utils.deleteRecursively(tempDir);
   }
 
-  @Ignore // SPARK-11672
+  @Test
   public void testDefaultReadWrite() throws IOException {
     String uid = "my_params";
     MyParams instance = new MyParams(uid);

http://git-wip-us.apache.org/repos/asf/spark/blob/e71c0755/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
 
b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
index e4c2f1b..51b06b7 100644
--- 
a/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/ml/classification/LogisticRegressionSuite.scala
@@ -872,7 +872,7 @@ class LogisticRegressionSuite
     assert(model1a0.intercept ~== model1b.intercept absTol 1E-3)
   }
 
-  ignore("read/write") { // SPARK-11672
+  test("read/write") {
     // Set some Params to make sure set Params are serialized.
     val lr = new LogisticRegression()
       .setElasticNetParam(0.1)

http://git-wip-us.apache.org/repos/asf/spark/blob/e71c0755/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala 
b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
index a66fe03..9dfa143 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
@@ -68,7 +68,7 @@ class BinarizerSuite extends SparkFunSuite with 
MLlibTestSparkContext with Defau
     }
   }
 
-  ignore("read/write") { // SPARK-11672
+  test("read/write") {
     val binarizer = new Binarizer()
       .setInputCol("feature")
       .setOutputCol("binarized_feature")

http://git-wip-us.apache.org/repos/asf/spark/blob/e71c0755/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala 
b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
index 44e09c3..cac4bd9 100644
--- a/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
+++ b/mllib/src/test/scala/org/apache/spark/ml/util/DefaultReadWriteTest.scala
@@ -105,7 +105,7 @@ object MyParams extends Readable[MyParams] {
 class DefaultReadWriteSuite extends SparkFunSuite with MLlibTestSparkContext
   with DefaultReadWriteTest {
 
-  ignore("default read/write") { // SPARK-11672
+  test("default read/write") {
     val myParams = new MyParams("my_params")
     testDefaultReadWrite(myParams)
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/e71c0755/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala 
b/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala
index 5d1796e..998ee48 100644
--- 
a/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/mllib/util/MLlibTestSparkContext.scala
@@ -32,11 +32,13 @@ trait MLlibTestSparkContext extends BeforeAndAfterAll { 
self: Suite =>
       .setMaster("local[2]")
       .setAppName("MLlibUnitTest")
     sc = new SparkContext(conf)
+    SQLContext.clearActive()
     sqlContext = new SQLContext(sc)
   }
 
   override def afterAll() {
     sqlContext = null
+    SQLContext.clearActive()
     if (sc != null) {
       sc.stop()
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to