Resolve PR review over 100 chars

Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/aa56585d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/aa56585d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/aa56585d

Branch: refs/heads/master
Commit: aa56585d2148b3ced506d2fff89da0858300928c
Parents: f6b6f88
Author: Henry Saputra <hsapu...@apache.org>
Authored: Wed Jan 8 00:38:29 2014 -0800
Committer: Henry Saputra <hsapu...@apache.org>
Committed: Wed Jan 8 00:38:29 2014 -0800

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/aa56585d/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala 
b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
index c8446fd..4fe3bc5 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
@@ -613,7 +613,8 @@ class PairRDDFunctions[K: ClassTag, V: ClassTag](self: 
RDD[(K, V)])
       // around by taking a mod. We expect that no task will be attempted 2 
billion times.
       val attemptNumber = (context.attemptId % Int.MaxValue).toInt
       /* "reduce task" <split #> <attempt # = spark task #> */
-      val attemptId = newTaskAttemptID(jobtrackerID, stageId, isMap = false, 
context.partitionId, attemptNumber)
+      val attemptId = newTaskAttemptID(jobtrackerID, stageId, isMap = false, 
context.partitionId,
+        attemptNumber)
       val hadoopContext = newTaskAttemptContext(wrappedConf.value, attemptId)
       val format = outputFormatClass.newInstance
       val committer = format.getOutputCommitter(hadoopContext)

Reply via email to