Repository: spark
Updated Branches:
  refs/heads/branch-1.4 e3dd2802f -> b3a590061


[SPARK-7927] whitespace fixes for GraphX.

So we can enable a whitespace enforcement rule in the style checker to save 
code review time.

Author: Reynold Xin <r...@databricks.com>

Closes #6474 from rxin/whitespace-graphx and squashes the following commits:

4d3cd26 [Reynold Xin] Fixed tests.
869dde4 [Reynold Xin] [SPARK-7927] whitespace fixes for GraphX.

(cherry picked from commit b069ad23d9b6cbfb3a8bf245547add4816669075)
Signed-off-by: Reynold Xin <r...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b3a59006
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b3a59006
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b3a59006

Branch: refs/heads/branch-1.4
Commit: b3a590061da09674cb0ff868c808985ea846145e
Parents: e3dd280
Author: Reynold Xin <r...@databricks.com>
Authored: Thu May 28 20:17:16 2015 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Thu May 28 20:17:28 2015 -0700

----------------------------------------------------------------------
 .../org/apache/spark/graphx/EdgeDirection.scala      |  4 ++--
 .../scala/org/apache/spark/graphx/EdgeTriplet.scala  |  2 +-
 .../main/scala/org/apache/spark/graphx/Graph.scala   |  2 +-
 .../scala/org/apache/spark/graphx/GraphOps.scala     | 10 +++++-----
 .../main/scala/org/apache/spark/graphx/Pregel.scala  |  8 ++++----
 .../org/apache/spark/graphx/impl/EdgePartition.scala |  4 ++--
 .../scala/org/apache/spark/graphx/lib/PageRank.scala |  8 ++++----
 .../org/apache/spark/graphx/lib/SVDPlusPlus.scala    |  2 +-
 .../org/apache/spark/graphx/lib/TriangleCount.scala  |  4 ++--
 .../apache/spark/graphx/util/GraphGenerators.scala   |  9 +++++----
 .../org/apache/spark/graphx/GraphOpsSuite.scala      |  6 +++---
 .../scala/org/apache/spark/graphx/GraphSuite.scala   |  6 +++---
 .../spark/graphx/lib/ConnectedComponentsSuite.scala  | 15 +++++++++------
 .../org/apache/spark/graphx/lib/PageRankSuite.scala  | 14 +++++++-------
 .../apache/spark/graphx/lib/TriangleCountSuite.scala |  2 +-
 15 files changed, 50 insertions(+), 46 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/main/scala/org/apache/spark/graphx/EdgeDirection.scala
----------------------------------------------------------------------
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/EdgeDirection.scala 
b/graphx/src/main/scala/org/apache/spark/graphx/EdgeDirection.scala
index 058c8c8..ce1054e 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/EdgeDirection.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/EdgeDirection.scala
@@ -26,8 +26,8 @@ class EdgeDirection private (private val name: String) 
extends Serializable {
    * out becomes in and both and either remain the same.
    */
   def reverse: EdgeDirection = this match {
-    case EdgeDirection.In   => EdgeDirection.Out
-    case EdgeDirection.Out  => EdgeDirection.In
+    case EdgeDirection.In => EdgeDirection.Out
+    case EdgeDirection.Out => EdgeDirection.In
     case EdgeDirection.Either => EdgeDirection.Either
     case EdgeDirection.Both => EdgeDirection.Both
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/main/scala/org/apache/spark/graphx/EdgeTriplet.scala
----------------------------------------------------------------------
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/EdgeTriplet.scala 
b/graphx/src/main/scala/org/apache/spark/graphx/EdgeTriplet.scala
index c8790ca..65f8242 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/EdgeTriplet.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/EdgeTriplet.scala
@@ -37,7 +37,7 @@ class EdgeTriplet[VD, ED] extends Edge[ED] {
   /**
    * Set the edge properties of this triplet.
    */
-  protected[spark] def set(other: Edge[ED]): EdgeTriplet[VD,ED] = {
+  protected[spark] def set(other: Edge[ED]): EdgeTriplet[VD, ED] = {
     srcId = other.srcId
     dstId = other.dstId
     attr = other.attr

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/main/scala/org/apache/spark/graphx/Graph.scala
----------------------------------------------------------------------
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/Graph.scala 
b/graphx/src/main/scala/org/apache/spark/graphx/Graph.scala
index 36dc7b0..db73a8a 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/Graph.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/Graph.scala
@@ -316,7 +316,7 @@ abstract class Graph[VD: ClassTag, ED: ClassTag] protected 
() extends Serializab
    * satisfy the predicates
    */
   def subgraph(
-      epred: EdgeTriplet[VD,ED] => Boolean = (x => true),
+      epred: EdgeTriplet[VD, ED] => Boolean = (x => true),
       vpred: (VertexId, VD) => Boolean = ((v, d) => true))
     : Graph[VD, ED]
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala
----------------------------------------------------------------------
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala 
b/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala
index 7edd627..9451ff1 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/GraphOps.scala
@@ -124,18 +124,18 @@ class GraphOps[VD: ClassTag, ED: ClassTag](graph: 
Graph[VD, ED]) extends Seriali
   def collectNeighbors(edgeDirection: EdgeDirection): 
VertexRDD[Array[(VertexId, VD)]] = {
     val nbrs = edgeDirection match {
       case EdgeDirection.Either =>
-        graph.aggregateMessages[Array[(VertexId,VD)]](
+        graph.aggregateMessages[Array[(VertexId, VD)]](
           ctx => {
             ctx.sendToSrc(Array((ctx.dstId, ctx.dstAttr)))
             ctx.sendToDst(Array((ctx.srcId, ctx.srcAttr)))
           },
           (a, b) => a ++ b, TripletFields.All)
       case EdgeDirection.In =>
-        graph.aggregateMessages[Array[(VertexId,VD)]](
+        graph.aggregateMessages[Array[(VertexId, VD)]](
           ctx => ctx.sendToDst(Array((ctx.srcId, ctx.srcAttr))),
           (a, b) => a ++ b, TripletFields.Src)
       case EdgeDirection.Out =>
-        graph.aggregateMessages[Array[(VertexId,VD)]](
+        graph.aggregateMessages[Array[(VertexId, VD)]](
           ctx => ctx.sendToSrc(Array((ctx.dstId, ctx.dstAttr))),
           (a, b) => a ++ b, TripletFields.Dst)
       case EdgeDirection.Both =>
@@ -253,7 +253,7 @@ class GraphOps[VD: ClassTag, ED: ClassTag](graph: Graph[VD, 
ED]) extends Seriali
   def filter[VD2: ClassTag, ED2: ClassTag](
       preprocess: Graph[VD, ED] => Graph[VD2, ED2],
       epred: (EdgeTriplet[VD2, ED2]) => Boolean = (x: EdgeTriplet[VD2, ED2]) 
=> true,
-      vpred: (VertexId, VD2) => Boolean = (v:VertexId, d:VD2) => true): 
Graph[VD, ED] = {
+      vpred: (VertexId, VD2) => Boolean = (v: VertexId, d: VD2) => true): 
Graph[VD, ED] = {
     graph.mask(preprocess(graph).subgraph(epred, vpred))
   }
 
@@ -356,7 +356,7 @@ class GraphOps[VD: ClassTag, ED: ClassTag](graph: Graph[VD, 
ED]) extends Seriali
       maxIterations: Int = Int.MaxValue,
       activeDirection: EdgeDirection = EdgeDirection.Either)(
       vprog: (VertexId, VD, A) => VD,
-      sendMsg: EdgeTriplet[VD, ED] => Iterator[(VertexId,A)],
+      sendMsg: EdgeTriplet[VD, ED] => Iterator[(VertexId, A)],
       mergeMsg: (A, A) => A)
     : Graph[VD, ED] = {
     Pregel(graph, initialMsg, maxIterations, activeDirection)(vprog, sendMsg, 
mergeMsg)

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/main/scala/org/apache/spark/graphx/Pregel.scala
----------------------------------------------------------------------
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/Pregel.scala 
b/graphx/src/main/scala/org/apache/spark/graphx/Pregel.scala
index 01b013f..cfcf724 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/Pregel.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/Pregel.scala
@@ -147,10 +147,10 @@ object Pregel extends Logging {
       logInfo("Pregel finished iteration " + i)
 
       // Unpersist the RDDs hidden by newly-materialized RDDs
-      oldMessages.unpersist(blocking=false)
-      newVerts.unpersist(blocking=false)
-      prevG.unpersistVertices(blocking=false)
-      prevG.edges.unpersist(blocking=false)
+      oldMessages.unpersist(blocking = false)
+      newVerts.unpersist(blocking = false)
+      prevG.unpersistVertices(blocking = false)
+      prevG.edges.unpersist(blocking = false)
       // count the iteration
       i += 1
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala
----------------------------------------------------------------------
diff --git 
a/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala 
b/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala
index c561570..ab021a2 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/impl/EdgePartition.scala
@@ -156,8 +156,8 @@ class EdgePartition[
     val size = data.size
     var i = 0
     while (i < size) {
-      edge.srcId  = srcIds(i)
-      edge.dstId  = dstIds(i)
+      edge.srcId = srcIds(i)
+      edge.dstId = dstIds(i)
       edge.attr = data(i)
       newData(i) = f(edge)
       i += 1

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala
----------------------------------------------------------------------
diff --git a/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala 
b/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala
index bc974b2..8c0a461 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/lib/PageRank.scala
@@ -116,7 +116,7 @@ object PageRank extends Logging {
 
     val personalized = srcId isDefined
     val src: VertexId = srcId.getOrElse(-1L)
-    def delta(u: VertexId, v: VertexId):Double = { if (u == v) 1.0 else 0.0 }
+    def delta(u: VertexId, v: VertexId): Double = { if (u == v) 1.0 else 0.0 }
 
     var iteration = 0
     var prevRankGraph: Graph[Double, Double] = null
@@ -133,13 +133,13 @@ object PageRank extends Logging {
       // edge partitions.
       prevRankGraph = rankGraph
       val rPrb = if (personalized) {
-        (src: VertexId ,id: VertexId) => resetProb * delta(src,id)
+        (src: VertexId , id: VertexId) => resetProb * delta(src, id)
       } else {
         (src: VertexId, id: VertexId) => resetProb
       }
 
       rankGraph = rankGraph.joinVertices(rankUpdates) {
-        (id, oldRank, msgSum) => rPrb(src,id) + (1.0 - resetProb) * msgSum
+        (id, oldRank, msgSum) => rPrb(src, id) + (1.0 - resetProb) * msgSum
       }.cache()
 
       rankGraph.edges.foreachPartition(x => {}) // also materializes 
rankGraph.vertices
@@ -243,7 +243,7 @@ object PageRank extends Logging {
 
     // Execute a dynamic version of Pregel.
     val vp = if (personalized) {
-      (id: VertexId, attr: (Double, Double),msgSum: Double) =>
+      (id: VertexId, attr: (Double, Double), msgSum: Double) =>
         personalizedVertexProgram(id, attr, msgSum)
     } else {
       (id: VertexId, attr: (Double, Double), msgSum: Double) =>

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/main/scala/org/apache/spark/graphx/lib/SVDPlusPlus.scala
----------------------------------------------------------------------
diff --git 
a/graphx/src/main/scala/org/apache/spark/graphx/lib/SVDPlusPlus.scala 
b/graphx/src/main/scala/org/apache/spark/graphx/lib/SVDPlusPlus.scala
index 3b0e162..9cb24ed 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/lib/SVDPlusPlus.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/lib/SVDPlusPlus.scala
@@ -210,7 +210,7 @@ object SVDPlusPlus {
   /**
    * Forces materialization of a Graph by count()ing its RDDs.
    */
-  private def materialize(g: Graph[_,_]): Unit = {
+  private def materialize(g: Graph[_, _]): Unit = {
     g.vertices.count()
     g.edges.count()
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/main/scala/org/apache/spark/graphx/lib/TriangleCount.scala
----------------------------------------------------------------------
diff --git 
a/graphx/src/main/scala/org/apache/spark/graphx/lib/TriangleCount.scala 
b/graphx/src/main/scala/org/apache/spark/graphx/lib/TriangleCount.scala
index daf1620..a5d5980 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/lib/TriangleCount.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/lib/TriangleCount.scala
@@ -38,7 +38,7 @@ import org.apache.spark.graphx._
  */
 object TriangleCount {
 
-  def run[VD: ClassTag, ED: ClassTag](graph: Graph[VD,ED]): Graph[Int, ED] = {
+  def run[VD: ClassTag, ED: ClassTag](graph: Graph[VD, ED]): Graph[Int, ED] = {
     // Remove redundant edges
     val g = graph.groupEdges((a, b) => a).cache()
 
@@ -49,7 +49,7 @@ object TriangleCount {
         var i = 0
         while (i < nbrs.size) {
           // prevent self cycle
-          if(nbrs(i) != vid) {
+          if (nbrs(i) != vid) {
             set.add(nbrs(i))
           }
           i += 1

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/main/scala/org/apache/spark/graphx/util/GraphGenerators.scala
----------------------------------------------------------------------
diff --git 
a/graphx/src/main/scala/org/apache/spark/graphx/util/GraphGenerators.scala 
b/graphx/src/main/scala/org/apache/spark/graphx/util/GraphGenerators.scala
index 2d6a825..9591c4e 100644
--- a/graphx/src/main/scala/org/apache/spark/graphx/util/GraphGenerators.scala
+++ b/graphx/src/main/scala/org/apache/spark/graphx/util/GraphGenerators.scala
@@ -243,14 +243,15 @@ object GraphGenerators {
    * @return A graph containing vertices with the row and column ids
    * as their attributes and edge values as 1.0.
    */
-  def gridGraph(sc: SparkContext, rows: Int, cols: Int): Graph[(Int,Int), 
Double] = {
+  def gridGraph(sc: SparkContext, rows: Int, cols: Int): Graph[(Int, Int), 
Double] = {
     // Convert row column address into vertex ids (row major order)
     def sub2ind(r: Int, c: Int): VertexId = r * cols + c
 
-    val vertices: RDD[(VertexId, (Int,Int))] =
-      sc.parallelize(0 until rows).flatMap( r => (0 until cols).map( c => 
(sub2ind(r,c), (r,c)) ) )
+    val vertices: RDD[(VertexId, (Int, Int))] = sc.parallelize(0 until 
rows).flatMap { r =>
+      (0 until cols).map( c => (sub2ind(r, c), (r, c)) )
+    }
     val edges: RDD[Edge[Double]] =
-      vertices.flatMap{ case (vid, (r,c)) =>
+      vertices.flatMap{ case (vid, (r, c)) =>
         (if (r + 1 < rows) { Seq( (sub2ind(r, c), sub2ind(r + 1, c))) } else { 
Seq.empty }) ++
         (if (c + 1 < cols) { Seq( (sub2ind(r, c), sub2ind(r, c + 1))) } else { 
Seq.empty })
       }.map{ case (src, dst) => Edge(src, dst, 1.0) }

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala
----------------------------------------------------------------------
diff --git a/graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala 
b/graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala
index 9bc8007..68fe837 100644
--- a/graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala
+++ b/graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala
@@ -59,7 +59,7 @@ class GraphOpsSuite extends FunSuite with LocalSparkContext {
   test ("filter") {
     withSpark { sc =>
       val n = 5
-      val vertices = sc.parallelize((0 to n).map(x => (x:VertexId, x)))
+      val vertices = sc.parallelize((0 to n).map(x => (x: VertexId, x)))
       val edges = sc.parallelize((1 to n).map(x => Edge(0, x, x)))
       val graph: Graph[Int, Int] = Graph(vertices, edges).cache()
       val filteredGraph = graph.filter(
@@ -67,11 +67,11 @@ class GraphOpsSuite extends FunSuite with LocalSparkContext 
{
           val degrees: VertexRDD[Int] = graph.outDegrees
           graph.outerJoinVertices(degrees) {(vid, data, deg) => 
deg.getOrElse(0)}
         },
-        vpred = (vid: VertexId, deg:Int) => deg > 0
+        vpred = (vid: VertexId, deg: Int) => deg > 0
       ).cache()
 
       val v = filteredGraph.vertices.collect().toSet
-      assert(v === Set((0,0)))
+      assert(v === Set((0, 0)))
 
       // the map is necessary because of object-reuse in the edge iterator
       val e = filteredGraph.edges.map(e => Edge(e.srcId, e.dstId, 
e.attr)).collect().toSet

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala
----------------------------------------------------------------------
diff --git a/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala 
b/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala
index a570e4e..2b1d8e4 100644
--- a/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala
+++ b/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala
@@ -248,7 +248,7 @@ class GraphSuite extends FunSuite with LocalSparkContext {
   test("mask") {
     withSpark { sc =>
       val n = 5
-      val vertices = sc.parallelize((0 to n).map(x => (x:VertexId, x)))
+      val vertices = sc.parallelize((0 to n).map(x => (x: VertexId, x)))
       val edges = sc.parallelize((1 to n).map(x => Edge(0, x, x)))
       val graph: Graph[Int, Int] = Graph(vertices, edges).cache()
 
@@ -260,11 +260,11 @@ class GraphSuite extends FunSuite with LocalSparkContext {
       val projectedGraph = graph.mask(subgraph)
 
       val v = projectedGraph.vertices.collect().toSet
-      assert(v === Set((0,0), (1,1), (2,2), (4,4), (5,5)))
+      assert(v === Set((0, 0), (1, 1), (2, 2), (4, 4), (5, 5)))
 
       // the map is necessary because of object-reuse in the edge iterator
       val e = projectedGraph.edges.map(e => Edge(e.srcId, e.dstId, 
e.attr)).collect().toSet
-      assert(e === Set(Edge(0,1,1), Edge(0,2,2), Edge(0,5,5)))
+      assert(e === Set(Edge(0, 1, 1), Edge(0, 2, 2), Edge(0, 5, 5)))
 
     }
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/test/scala/org/apache/spark/graphx/lib/ConnectedComponentsSuite.scala
----------------------------------------------------------------------
diff --git 
a/graphx/src/test/scala/org/apache/spark/graphx/lib/ConnectedComponentsSuite.scala
 
b/graphx/src/test/scala/org/apache/spark/graphx/lib/ConnectedComponentsSuite.scala
index 4cc30a9..accccfc 100644
--- 
a/graphx/src/test/scala/org/apache/spark/graphx/lib/ConnectedComponentsSuite.scala
+++ 
b/graphx/src/test/scala/org/apache/spark/graphx/lib/ConnectedComponentsSuite.scala
@@ -52,13 +52,16 @@ class ConnectedComponentsSuite extends FunSuite with 
LocalSparkContext {
     withSpark { sc =>
       val chain1 = (0 until 9).map(x => (x, x + 1))
       val chain2 = (10 until 20).map(x => (x, x + 1))
-      val rawEdges = sc.parallelize(chain1 ++ chain2, 3).map { case (s,d) => 
(s.toLong, d.toLong) }
+      val rawEdges = sc.parallelize(chain1 ++ chain2, 3).map { case (s, d) => 
(s.toLong, d.toLong) }
       val twoChains = Graph.fromEdgeTuples(rawEdges, 1.0)
       val ccGraph = twoChains.connectedComponents()
       val vertices = ccGraph.vertices.collect()
       for ( (id, cc) <- vertices ) {
-        if(id < 10) { assert(cc === 0) }
-        else { assert(cc === 10) }
+        if (id < 10) {
+          assert(cc === 0)
+        } else {
+          assert(cc === 10)
+        }
       }
       val ccMap = vertices.toMap
       for (id <- 0 until 20) {
@@ -75,7 +78,7 @@ class ConnectedComponentsSuite extends FunSuite with 
LocalSparkContext {
     withSpark { sc =>
       val chain1 = (0 until 9).map(x => (x, x + 1))
       val chain2 = (10 until 20).map(x => (x, x + 1))
-      val rawEdges = sc.parallelize(chain1 ++ chain2, 3).map { case (s,d) => 
(s.toLong, d.toLong) }
+      val rawEdges = sc.parallelize(chain1 ++ chain2, 3).map { case (s, d) => 
(s.toLong, d.toLong) }
       val twoChains = Graph.fromEdgeTuples(rawEdges, true).reverse
       val ccGraph = twoChains.connectedComponents()
       val vertices = ccGraph.vertices.collect()
@@ -106,9 +109,9 @@ class ConnectedComponentsSuite extends FunSuite with 
LocalSparkContext {
                        (4L, ("peter", "student"))))
       // Create an RDD for edges
       val relationships: RDD[Edge[String]] =
-        sc.parallelize(Array(Edge(3L, 7L, "collab"),    Edge(5L, 3L, 
"advisor"),
+        sc.parallelize(Array(Edge(3L, 7L, "collab"), Edge(5L, 3L, "advisor"),
                        Edge(2L, 5L, "colleague"), Edge(5L, 7L, "pi"),
-                       Edge(4L, 0L, "student"),   Edge(5L, 0L, "colleague")))
+                       Edge(4L, 0L, "student"), Edge(5L, 0L, "colleague")))
       // Edges are:
       //   2 ---> 5 ---> 3
       //          | \

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/test/scala/org/apache/spark/graphx/lib/PageRankSuite.scala
----------------------------------------------------------------------
diff --git 
a/graphx/src/test/scala/org/apache/spark/graphx/lib/PageRankSuite.scala 
b/graphx/src/test/scala/org/apache/spark/graphx/lib/PageRankSuite.scala
index 3f3c9df..39c6ace 100644
--- a/graphx/src/test/scala/org/apache/spark/graphx/lib/PageRankSuite.scala
+++ b/graphx/src/test/scala/org/apache/spark/graphx/lib/PageRankSuite.scala
@@ -31,14 +31,14 @@ object GridPageRank {
     def sub2ind(r: Int, c: Int): Int = r * nCols + c
     // Make the grid graph
     for (r <- 0 until nRows; c <- 0 until nCols) {
-      val ind = sub2ind(r,c)
+      val ind = sub2ind(r, c)
       if (r + 1 < nRows) {
         outDegree(ind) += 1
-        inNbrs(sub2ind(r + 1,c)) += ind
+        inNbrs(sub2ind(r + 1, c)) += ind
       }
       if (c + 1 < nCols) {
         outDegree(ind) += 1
-        inNbrs(sub2ind(r,c + 1)) += ind
+        inNbrs(sub2ind(r, c + 1)) += ind
       }
     }
     // compute the pagerank
@@ -99,8 +99,8 @@ class PageRankSuite extends FunSuite with LocalSparkContext {
       val resetProb = 0.15
       val errorTol = 1.0e-5
 
-      val staticRanks1 = starGraph.staticPersonalizedPageRank(0,numIter = 1, 
resetProb).vertices
-      val staticRanks2 = starGraph.staticPersonalizedPageRank(0,numIter = 2, 
resetProb)
+      val staticRanks1 = starGraph.staticPersonalizedPageRank(0, numIter = 1, 
resetProb).vertices
+      val staticRanks2 = starGraph.staticPersonalizedPageRank(0, numIter = 2, 
resetProb)
         .vertices.cache()
 
       // Static PageRank should only take 2 iterations to converge
@@ -117,7 +117,7 @@ class PageRankSuite extends FunSuite with LocalSparkContext 
{
       }
       assert(staticErrors.sum === 0)
 
-      val dynamicRanks = starGraph.personalizedPageRank(0,0, 
resetProb).vertices.cache()
+      val dynamicRanks = starGraph.personalizedPageRank(0, 0, 
resetProb).vertices.cache()
       assert(compareRanks(staticRanks2, dynamicRanks) < errorTol)
     }
   } // end of test Star PageRank
@@ -162,7 +162,7 @@ class PageRankSuite extends FunSuite with LocalSparkContext 
{
   test("Chain PersonalizedPageRank") {
     withSpark { sc =>
       val chain1 = (0 until 9).map(x => (x, x + 1) )
-      val rawEdges = sc.parallelize(chain1, 1).map { case (s,d) => (s.toLong, 
d.toLong) }
+      val rawEdges = sc.parallelize(chain1, 1).map { case (s, d) => (s.toLong, 
d.toLong) }
       val chain = Graph.fromEdgeTuples(rawEdges, 1.0).cache()
       val resetProb = 0.15
       val tol = 0.0001

http://git-wip-us.apache.org/repos/asf/spark/blob/b3a59006/graphx/src/test/scala/org/apache/spark/graphx/lib/TriangleCountSuite.scala
----------------------------------------------------------------------
diff --git 
a/graphx/src/test/scala/org/apache/spark/graphx/lib/TriangleCountSuite.scala 
b/graphx/src/test/scala/org/apache/spark/graphx/lib/TriangleCountSuite.scala
index 293c7f3..79bf4e6 100644
--- a/graphx/src/test/scala/org/apache/spark/graphx/lib/TriangleCountSuite.scala
+++ b/graphx/src/test/scala/org/apache/spark/graphx/lib/TriangleCountSuite.scala
@@ -58,7 +58,7 @@ class TriangleCountSuite extends FunSuite with 
LocalSparkContext {
       val triangles =
         Array(0L -> 1L, 1L -> 2L, 2L -> 0L) ++
         Array(0L -> -1L, -1L -> -2L, -2L -> 0L)
-      val revTriangles = triangles.map { case (a,b) => (b,a) }
+      val revTriangles = triangles.map { case (a, b) => (b, a) }
       val rawEdges = sc.parallelize(triangles ++ revTriangles, 2)
       val graph = Graph.fromEdgeTuples(rawEdges, true).cache()
       val triangleCount = graph.triangleCount()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to