Repository: spark
Updated Branches:
  refs/heads/master 6181937f3 -> 94f62a497


[SPARK-7940] Enforce whitespace checking for DO, TRY, CATCH, FINALLY, MATCH, 
LARROW, RARROW in style checker.

…

Author: Reynold Xin <r...@databricks.com>

Closes #6491 from rxin/more-whitespace and squashes the following commits:

f6e63dc [Reynold Xin] [SPARK-7940] Enforce whitespace checking for DO, TRY, 
CATCH, FINALLY, MATCH, LARROW, RARROW in style checker.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/94f62a49
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/94f62a49
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/94f62a49

Branch: refs/heads/master
Commit: 94f62a4979e4bc5f7bf4f5852d76977e097209e6
Parents: 6181937
Author: Reynold Xin <r...@databricks.com>
Authored: Fri May 29 13:38:37 2015 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Fri May 29 13:38:37 2015 -0700

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/network/nio/BlockMessage.scala  | 2 +-
 .../main/scala/org/apache/spark/network/nio/Connection.scala    | 5 ++---
 .../scala/org/apache/spark/network/nio/ConnectionManager.scala  | 5 ++---
 .../scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala   | 2 +-
 .../src/main/scala/org/apache/spark/mllib/tree/model/Node.scala | 2 +-
 .../spark/mllib/classification/LogisticRegressionSuite.scala    | 4 ++--
 scalastyle-config.xml                                           | 4 ++--
 .../src/main/scala/org/apache/spark/sql/types/UTF8String.scala  | 2 +-
 sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala | 2 +-
 9 files changed, 13 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/94f62a49/core/src/main/scala/org/apache/spark/network/nio/BlockMessage.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/network/nio/BlockMessage.scala 
b/core/src/main/scala/org/apache/spark/network/nio/BlockMessage.scala
index 1a92a79..67a3761 100644
--- a/core/src/main/scala/org/apache/spark/network/nio/BlockMessage.scala
+++ b/core/src/main/scala/org/apache/spark/network/nio/BlockMessage.scala
@@ -155,7 +155,7 @@ private[nio] class BlockMessage() {
 
   override def toString: String = {
     "BlockMessage [type = " + typ + ", id = " + id + ", level = " + level +
-    ", data = " + (if (data != null) data.remaining.toString  else "null") + 
"]"
+    ", data = " + (if (data != null) data.remaining.toString else "null") + "]"
   }
 }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/94f62a49/core/src/main/scala/org/apache/spark/network/nio/Connection.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/network/nio/Connection.scala 
b/core/src/main/scala/org/apache/spark/network/nio/Connection.scala
index 6b898bd..1499da0 100644
--- a/core/src/main/scala/org/apache/spark/network/nio/Connection.scala
+++ b/core/src/main/scala/org/apache/spark/network/nio/Connection.scala
@@ -326,15 +326,14 @@ class SendingConnection(val address: InetSocketAddress, 
selector_ : Selector,
 
   // MUST be called within the selector loop
   def connect() {
-    try{
+    try {
       channel.register(selector, SelectionKey.OP_CONNECT)
       channel.connect(address)
       logInfo("Initiating connection to [" + address + "]")
     } catch {
-      case e: Exception => {
+      case e: Exception =>
         logError("Error connecting to " + address, e)
         callOnExceptionCallbacks(e)
-      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/94f62a49/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala 
b/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala
index 497871e..c0bca2c 100644
--- a/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala
+++ b/core/src/main/scala/org/apache/spark/network/nio/ConnectionManager.scala
@@ -635,12 +635,11 @@ private[nio] class ConnectionManager(
         val message = securityMsgResp.toBufferMessage
         if (message == null) throw new IOException("Error creating security 
message")
         sendSecurityMessage(waitingConn.getRemoteConnectionManagerId(), 
message)
-      } catch  {
-        case e: Exception => {
+      } catch {
+        case e: Exception =>
           logError("Error handling sasl client authentication", e)
           waitingConn.close()
           throw new IOException("Error evaluating sasl response: ", e)
-        }
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/94f62a49/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala 
b/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala
index 7598ff6..9e38807 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala
@@ -86,7 +86,7 @@ class PartitionerAwareUnionRDD[T: ClassTag](
     }
     val location = if (locations.isEmpty) {
       None
-    } else  {
+    } else {
       // Find the location that maximum number of parent partitions prefer
       Some(locations.groupBy(x => x).maxBy(_._2.length)._1)
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/94f62a49/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala
----------------------------------------------------------------------
diff --git a/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala 
b/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala
index ee710fc..a6d1398 100644
--- a/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala
+++ b/mllib/src/main/scala/org/apache/spark/mllib/tree/model/Node.scala
@@ -83,7 +83,7 @@ class Node (
   def predict(features: Vector) : Double = {
     if (isLeaf) {
       predict.predict
-    } else{
+    } else {
       if (split.get.featureType == Continuous) {
         if (features(split.get.feature) <= split.get.threshold) {
           leftNode.get.predict(features)

http://git-wip-us.apache.org/repos/asf/spark/blob/94f62a49/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
 
b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
index 966811a..b1014ab 100644
--- 
a/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/mllib/classification/LogisticRegressionSuite.scala
@@ -119,7 +119,7 @@ object LogisticRegressionSuite {
       }
       // Preventing the overflow when we compute the probability
       val maxMargin = margins.max
-      if (maxMargin > 0) for (i <-0 until nClasses) margins(i) -= maxMargin
+      if (maxMargin > 0) for (i <- 0 until nClasses) margins(i) -= maxMargin
 
       // Computing the probabilities for each class from the margins.
       val norm = {
@@ -130,7 +130,7 @@ object LogisticRegressionSuite {
         }
         temp
       }
-      for (i <-0 until nClasses) probs(i) /= norm
+      for (i <- 0 until nClasses) probs(i) /= norm
 
       // Compute the cumulative probability so we can generate a random number 
and assign a label.
       for (i <- 1 until nClasses) probs(i) += probs(i - 1)

http://git-wip-us.apache.org/repos/asf/spark/blob/94f62a49/scalastyle-config.xml
----------------------------------------------------------------------
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
index 68d980b..68c8ce3 100644
--- a/scalastyle-config.xml
+++ b/scalastyle-config.xml
@@ -144,12 +144,12 @@
  <check level="error" 
class="org.scalastyle.scalariform.SpaceAfterCommentStartChecker" 
enabled="true"></check>
  <check level="error" 
class="org.scalastyle.scalariform.EnsureSingleSpaceBeforeTokenChecker" 
enabled="true">
    <parameters>
-     <parameter name="tokens">ARROW, EQUALS</parameter>
+     <parameter name="tokens">ARROW, EQUALS, ELSE, TRY, CATCH, FINALLY, 
LARROW, RARROW</parameter>
    </parameters>
  </check>
   <check level="error" 
class="org.scalastyle.scalariform.EnsureSingleSpaceAfterTokenChecker" 
enabled="true">
     <parameters>
-     <parameter name="tokens">ARROW, EQUALS, COMMA, COLON, IF, WHILE, 
FOR</parameter>
+     <parameter name="tokens">ARROW, EQUALS, COMMA, COLON, IF, ELSE, DO, 
WHILE, FOR, MATCH, TRY, CATCH, FINALLY, LARROW, RARROW</parameter>
     </parameters>
   </check>
   <check level="error" 
class="org.scalastyle.scalariform.NotImplementedErrorUsage" 
enabled="true"></check>

http://git-wip-us.apache.org/repos/asf/spark/blob/94f62a49/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UTF8String.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UTF8String.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UTF8String.scala
index bc9c37b..f5d8fcc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UTF8String.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/UTF8String.scala
@@ -203,7 +203,7 @@ object UTF8String {
   def apply(s: String): UTF8String = {
     if (s != null) {
       new UTF8String().set(s)
-    } else{
+    } else {
       null
     }
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/94f62a49/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
index 0bdb68e..2d8d950 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
@@ -262,7 +262,7 @@ private[sql] class JDBCRDD(
   }
 
   private def escapeSql(value: String): String =
-    if (value == null) null else  StringUtils.replace(value, "'", "''")
+    if (value == null) null else StringUtils.replace(value, "'", "''")
 
   /**
    * Turns a single Filter into a String representing a SQL expression.


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to