Repository: spark
Updated Branches:
  refs/heads/master d8fefab4d -> bc1babd63


http://git-wip-us.apache.org/repos/asf/spark/blob/bc1babd6/mllib/src/main/scala/org/apache/spark/mllib/feature/VectorTransformer.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/main/scala/org/apache/spark/mllib/feature/VectorTransformer.scala 
b/mllib/src/main/scala/org/apache/spark/mllib/feature/VectorTransformer.scala
index 5778fd1..ca73851 100644
--- 
a/mllib/src/main/scala/org/apache/spark/mllib/feature/VectorTransformer.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/mllib/feature/VectorTransformer.scala
@@ -47,7 +47,7 @@ trait VectorTransformer extends Serializable {
    */
   @Since("1.1.0")
   def transform(data: RDD[Vector]): RDD[Vector] = {
-    // Later in #1498 , all RDD objects are sent via broadcasting instead of 
akka.
+    // Later in #1498 , all RDD objects are sent via broadcasting instead of 
RPC.
     // So it should be no longer necessary to explicitly broadcast `this` 
object.
     data.map(x => this.transform(x))
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/bc1babd6/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala
----------------------------------------------------------------------
diff --git 
a/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala
 
b/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala
index 9b2d023..95d874b 100644
--- 
a/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala
+++ 
b/mllib/src/test/scala/org/apache/spark/mllib/util/LocalClusterSparkContext.scala
@@ -29,7 +29,7 @@ trait LocalClusterSparkContext extends BeforeAndAfterAll { 
self: Suite =>
     val conf = new SparkConf()
       .setMaster("local-cluster[2, 1, 1024]")
       .setAppName("test-cluster")
-      .set("spark.akka.frameSize", "1") // set to 1MB to detect direct 
serialization of data
+      .set("spark.rpc.message.maxSize", "1") // set to 1MB to detect direct 
serialization of data
     sc = new SparkContext(conf)
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/bc1babd6/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 43f08ef..f08642f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -570,6 +570,11 @@
         <version>4.0.29.Final</version>
       </dependency>
       <dependency>
+        <groupId>io.netty</groupId>
+        <artifactId>netty</artifactId>
+        <version>3.8.0.Final</version>
+      </dependency>
+      <dependency>
         <groupId>org.apache.derby</groupId>
         <artifactId>derby</artifactId>
         <version>${derby.version}</version>

http://git-wip-us.apache.org/repos/asf/spark/blob/bc1babd6/project/MimaExcludes.scala
----------------------------------------------------------------------
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index 905fb4c..c65fae4 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -162,7 +162,9 @@ object MimaExcludes {
         
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.streaming.StreamingContext.actorStream$default$4"),
         
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.streaming.StreamingContext.actorStream$default$3"),
         
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.streaming.StreamingContext.actorStream"),
-        
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.streaming.api.java.JavaStreamingContext.actorStream")
+        
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.streaming.api.java.JavaStreamingContext.actorStream"),
+        
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.streaming.zeromq.ZeroMQReceiver"),
+        
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.streaming.receiver.ActorReceiver$Supervisor")
       ) ++ Seq(
         // SPARK-12847 Remove StreamingListenerBus and post all Streaming 
events to the same thread as Spark events
         
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.util.AsynchronousListenerBus$"),

http://git-wip-us.apache.org/repos/asf/spark/blob/bc1babd6/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala
----------------------------------------------------------------------
diff --git 
a/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala
 
b/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala
index 081f5a1..898db85 100644
--- 
a/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala
+++ 
b/streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala
@@ -41,7 +41,6 @@ class ReceivedBlockTrackerSuite
   extends SparkFunSuite with BeforeAndAfter with Matchers with Logging {
 
   val hadoopConf = new Configuration()
-  val akkaTimeout = 10 seconds
   val streamId = 1
 
   var allReceivedBlockTrackers = new ArrayBuffer[ReceivedBlockTracker]()

http://git-wip-us.apache.org/repos/asf/spark/blob/bc1babd6/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala
----------------------------------------------------------------------
diff --git 
a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala 
b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala
index 31fa53e..21ac04d 100644
--- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala
+++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala
@@ -166,7 +166,7 @@ class ExecutorRunnable(
 
     // Certain configs need to be passed here because they are needed before 
the Executor
     // registers with the Scheduler and transfers the spark configs. Since the 
Executor backend
-    // uses Akka to connect to the scheduler, the akka settings are needed as 
well as the
+    // uses RPC to connect to the scheduler, the RPC settings are needed as 
well as the
     // authentication settings.
     sparkConf.getAll
       .filter { case (k, v) => SparkConf.isExecutorStartupConf(k) }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to