Repository: spark
Updated Branches:
  refs/heads/master 8aff36e91 -> c09e51398


[SPARK-11442] Reduce numSlices for local metrics test of SparkListenerSuite

In the thread, 
http://search-hadoop.com/m/q3RTtcQiFSlTxeP/test+failed+due+to+OOME&subj=test+failed+due+to+OOME,
 it was discussed that memory consumption for SparkListenerSuite should be 
brought down.

This is an attempt in that direction by reducing numSlices for local metrics 
test.

Author: tedyu <yuzhih...@gmail.com>

Closes #9384 from tedyu/master.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c09e5139
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c09e5139
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c09e5139

Branch: refs/heads/master
Commit: c09e5139874fb3626e005c8240cca5308b902ef3
Parents: 8aff36e
Author: tedyu <yuzhih...@gmail.com>
Authored: Wed Nov 4 10:51:40 2015 +0000
Committer: Sean Owen <so...@cloudera.com>
Committed: Wed Nov 4 10:51:40 2015 +0000

----------------------------------------------------------------------
 .../org/apache/spark/scheduler/SparkListenerSuite.scala     | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/c09e5139/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala 
b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
index a9652d7..53102b9 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
@@ -212,14 +212,15 @@ class SparkListenerSuite extends SparkFunSuite with 
LocalSparkContext with Match
       i
     }
 
-    val d = sc.parallelize(0 to 1e4.toInt, 64).map(w)
+    val numSlices = 16
+    val d = sc.parallelize(0 to 1e3.toInt, numSlices).map(w)
     d.count()
     sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
     listener.stageInfos.size should be (1)
 
     val d2 = d.map { i => w(i) -> i * 2 }.setName("shuffle input 1")
     val d3 = d.map { i => w(i) -> (0 to (i % 5)) }.setName("shuffle input 2")
-    val d4 = d2.cogroup(d3, 64).map { case (k, (v1, v2)) =>
+    val d4 = d2.cogroup(d3, numSlices).map { case (k, (v1, v2)) =>
       w(k) -> (v1.size, v2.size)
     }
     d4.setName("A Cogroup")
@@ -258,8 +259,8 @@ class SparkListenerSuite extends SparkFunSuite with 
LocalSparkContext with Match
         if (stageInfo.rddInfos.exists(_.name == d4.name)) {
           taskMetrics.shuffleReadMetrics should be ('defined)
           val sm = taskMetrics.shuffleReadMetrics.get
-          sm.totalBlocksFetched should be (128)
-          sm.localBlocksFetched should be (128)
+          sm.totalBlocksFetched should be (2*numSlices)
+          sm.localBlocksFetched should be (2*numSlices)
           sm.remoteBlocksFetched should be (0)
           sm.remoteBytesRead should be (0L)
         }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to