Repository: spark
Updated Branches:
  refs/heads/branch-1.5 3ceee5572 -> 392bd19d6


[SPARK-9967] [SPARK-10099] [STREAMING] Renamed conf 
spark.streaming.backpressure.{enable-->enabled} and fixed deprecated annotations

Small changes
- Renamed conf spark.streaming.backpressure.{enable --> enabled}
- Change Java Deprecated annotations to Scala deprecated annotation with more 
information.

Author: Tathagata Das <tathagata.das1...@gmail.com>

Closes #8299 from tdas/SPARK-9967.

(cherry picked from commit bc9a0e03235865d2ec33372f6400dec8c770778a)
Signed-off-by: Tathagata Das <tathagata.das1...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/392bd19d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/392bd19d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/392bd19d

Branch: refs/heads/branch-1.5
Commit: 392bd19d678567751cd3844d9d166a7491c5887e
Parents: 3ceee55
Author: Tathagata Das <tathagata.das1...@gmail.com>
Authored: Tue Aug 18 23:37:57 2015 -0700
Committer: Tathagata Das <tathagata.das1...@gmail.com>
Committed: Tue Aug 18 23:38:13 2015 -0700

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/api/java/JavaRDDLike.scala   | 2 +-
 .../main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala | 2 +-
 .../apache/spark/streaming/api/java/JavaDStreamLike.scala    | 4 ++--
 .../apache/spark/streaming/scheduler/RateController.scala    | 8 ++++----
 4 files changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/392bd19d/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala 
b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
index 829fae1..c582488 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
@@ -354,7 +354,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends 
Serializable {
    * Return an array that contains all of the elements in this RDD.
    * @deprecated As of Spark 1.0.0, toArray() is deprecated, use {@link 
#collect()} instead
    */
-  @Deprecated
+  @deprecated("use collect()", "1.0.0")
   def toArray(): JList[T] = collect()
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/392bd19d/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala 
b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
index 7e9dba4..dda4216 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkHadoopUtil.scala
@@ -76,7 +76,7 @@ class SparkHadoopUtil extends Logging {
     }
   }
 
-  @Deprecated
+  @deprecated("use newConfiguration with SparkConf argument", "1.2.0")
   def newConfiguration(): Configuration = newConfiguration(null)
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/392bd19d/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
----------------------------------------------------------------------
diff --git 
a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
 
b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
index 808dcc1..214cd80 100644
--- 
a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
+++ 
b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaDStreamLike.scala
@@ -291,7 +291,7 @@ trait JavaDStreamLike[T, This <: JavaDStreamLike[T, This, 
R], R <: JavaRDDLike[T
    *
    * @deprecated  As of release 0.9.0, replaced by foreachRDD
    */
-  @Deprecated
+  @deprecated("Use foreachRDD", "0.9.0")
   def foreach(foreachFunc: JFunction[R, Void]) {
     foreachRDD(foreachFunc)
   }
@@ -302,7 +302,7 @@ trait JavaDStreamLike[T, This <: JavaDStreamLike[T, This, 
R], R <: JavaRDDLike[T
    *
    * @deprecated  As of release 0.9.0, replaced by foreachRDD
    */
-  @Deprecated
+  @deprecated("Use foreachRDD", "0.9.0")
   def foreach(foreachFunc: JFunction2[R, Time, Void]) {
     foreachRDD(foreachFunc)
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/392bd19d/streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala
----------------------------------------------------------------------
diff --git 
a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala
 
b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala
index 882ca06..a46c0c1 100644
--- 
a/streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala
+++ 
b/streaming/src/main/scala/org/apache/spark/streaming/scheduler/RateController.scala
@@ -76,9 +76,9 @@ private[streaming] abstract class RateController(val 
streamUID: Int, rateEstimat
     val elements = batchCompleted.batchInfo.streamIdToInputInfo
 
     for {
-      processingEnd <- batchCompleted.batchInfo.processingEndTime;
-      workDelay <- batchCompleted.batchInfo.processingDelay;
-      waitDelay <- batchCompleted.batchInfo.schedulingDelay;
+      processingEnd <- batchCompleted.batchInfo.processingEndTime
+      workDelay <- batchCompleted.batchInfo.processingDelay
+      waitDelay <- batchCompleted.batchInfo.schedulingDelay
       elems <- elements.get(streamUID).map(_.numRecords)
     } computeAndPublish(processingEnd, elems, workDelay, waitDelay)
   }
@@ -86,5 +86,5 @@ private[streaming] abstract class RateController(val 
streamUID: Int, rateEstimat
 
 object RateController {
   def isBackPressureEnabled(conf: SparkConf): Boolean =
-    conf.getBoolean("spark.streaming.backpressure.enable", false)
+    conf.getBoolean("spark.streaming.backpressure.enabled", false)
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to