Merge branch 'master' of github.com:apache/incubator-spark into scala-2.10-temp

Conflicts:
        
core/src/main/scala/org/apache/spark/util/collection/PrimitiveVector.scala
        
streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/95d8dbce
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/95d8dbce
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/95d8dbce

Branch: refs/heads/master
Commit: 95d8dbce91f49467050250d5cf3671aaaa648d76
Parents: 199e9cf 2fead51
Author: Prashant Sharma <prashan...@imaginea.com>
Authored: Thu Nov 21 12:34:46 2013 +0530
Committer: Prashant Sharma <prashan...@imaginea.com>
Committed: Thu Nov 21 12:34:46 2013 +0530

----------------------------------------------------------------------
 .../scala/org/apache/spark/SparkContext.scala   |   1 -
 .../spark/deploy/FaultToleranceTest.scala       |  28 ++---
 .../apache/spark/deploy/LocalSparkCluster.scala |   6 +-
 .../executor/CoarseGrainedExecutorBackend.scala |   2 +-
 .../apache/spark/executor/ExecutorSource.scala  |   2 -
 .../spark/network/netty/ShuffleCopier.scala     |   2 +-
 .../org/apache/spark/rdd/CartesianRDD.scala     |   2 +-
 .../apache/spark/rdd/PartitionPruningRDD.scala  |   8 +-
 .../scheduler/cluster/ClusterScheduler.scala    |  13 +--
 .../cluster/ClusterTaskSetManager.scala         |   9 ++
 .../cluster/CoarseGrainedSchedulerBackend.scala |   1 +
 .../cluster/SimrSchedulerBackend.scala          |  13 +--
 .../org/apache/spark/ui/exec/ExecutorsUI.scala  |  23 ++--
 .../org/apache/spark/ui/jobs/StagePage.scala    |   2 +-
 .../spark/util/collection/PrimitiveVector.scala |  46 +++++---
 .../org/apache/spark/LocalSparkContext.scala    |   2 +-
 .../apache/spark/PartitionPruningRDDSuite.scala |  45 -------
 .../org/apache/spark/PartitioningSuite.scala    |  10 +-
 .../spark/rdd/PartitionPruningRDDSuite.scala    |  86 ++++++++++++++
 .../util/collection/PrimitiveVectorSuite.scala  | 117 +++++++++++++++++++
 docs/running-on-yarn.md                         |   2 +
 .../apache/spark/examples/BroadcastTest.scala   |  10 +-
 .../org/apache/spark/examples/LocalALS.scala    |   2 +-
 .../spark/examples/MultiBroadcastTest.scala     |  15 ++-
 .../org/apache/spark/examples/SparkTC.scala     |   2 +-
 .../streaming/examples/ActorWordCount.scala     |   2 +-
 .../streaming/examples/MQTTWordCount.scala      |   4 +-
 .../org/apache/spark/streaming/Checkpoint.scala |   6 +-
 .../api/java/JavaStreamingContext.scala         |   7 +-
 .../streaming/dstream/FlumeInputDStream.scala   |   4 +-
 .../spark/streaming/InputStreamsSuite.scala     |   4 +-
 .../apache/spark/streaming/TestSuiteBase.scala  |   2 +-
 .../spark/deploy/yarn/ApplicationMaster.scala   |  53 +++++----
 .../org/apache/spark/deploy/yarn/Client.scala   |  63 +++++-----
 .../yarn/ClientDistributedCacheManager.scala    |   6 +-
 .../spark/deploy/yarn/WorkerRunnable.scala      |  13 +--
 .../deploy/yarn/YarnAllocationHandler.scala     |  16 ++-
 .../spark/deploy/yarn/YarnSparkHadoopUtil.scala |   5 +-
 .../ClientDistributedCacheManagerSuite.scala    |   2 +-
 39 files changed, 415 insertions(+), 221 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/95d8dbce/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --cc core/src/main/scala/org/apache/spark/SparkContext.scala
index a12f886,42b2985..b9fe7f6
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@@ -24,11 -24,8 +24,10 @@@ import java.util.concurrent.atomic.Atom
  
  import scala.collection.Map
  import scala.collection.generic.Growable
- import scala.collection.JavaConverters._
  import scala.collection.mutable.ArrayBuffer
  import scala.collection.mutable.HashMap
 +import scala.reflect.{ ClassTag, classTag}
 +import scala.util.DynamicVariable
  
  import org.apache.hadoop.conf.Configuration
  import org.apache.hadoop.fs.Path

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/95d8dbce/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/95d8dbce/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
----------------------------------------------------------------------
diff --cc 
core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
index 16d8f81,8332631..a98ec06
--- 
a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
+++ 
b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
@@@ -19,10 -19,10 +19,10 @@@ package org.apache.spark.executo
  
  import java.nio.ByteBuffer
  
 -import akka.actor.{ActorRef, Actor, Props, Terminated}
 -import akka.remote.{RemoteClientLifeCycleEvent, RemoteClientShutdown, 
RemoteClientDisconnected}
 +import akka.actor._
 +import akka.remote._
  
- import org.apache.spark.{Logging, SparkEnv}
+ import org.apache.spark.Logging
  import org.apache.spark.TaskState.TaskState
  import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
  import org.apache.spark.util.{Utils, AkkaUtils}

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/95d8dbce/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/95d8dbce/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/95d8dbce/core/src/main/scala/org/apache/spark/scheduler/cluster/ClusterScheduler.scala
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/95d8dbce/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/95d8dbce/core/src/main/scala/org/apache/spark/util/collection/PrimitiveVector.scala
----------------------------------------------------------------------
diff --cc 
core/src/main/scala/org/apache/spark/util/collection/PrimitiveVector.scala
index 465c221,20554f0..b84eb65
--- a/core/src/main/scala/org/apache/spark/util/collection/PrimitiveVector.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/PrimitiveVector.scala
@@@ -17,21 -17,21 +17,23 @@@
  
  package org.apache.spark.util.collection
  
 +import scala.reflect.ClassTag
 +
- /** Provides a simple, non-threadsafe, array-backed vector that can store 
primitives. */
+ /**
+  * An append-only, non-threadsafe, array-backed vector that is optimized for 
primitive types.
+  */
  private[spark]
 -class PrimitiveVector[@specialized(Long, Int, Double) V: 
ClassManifest](initialSize: Int = 64) {
 +class PrimitiveVector[@specialized(Long, Int, Double) V: 
ClassTag](initialSize: Int = 64) {
-   private var numElements = 0
-   private var array: Array[V] = _
+   private var _numElements = 0
+   private var _array: Array[V] = _
  
    // NB: This must be separate from the declaration, otherwise the 
specialized parent class
-   // will get its own array with the same initial size. TODO: Figure out 
why...
-   array = new Array[V](initialSize)
+   // will get its own array with the same initial size.
+   _array = new Array[V](initialSize)
  
    def apply(index: Int): V = {
-     require(index < numElements)
-     array(index)
+     require(index < _numElements)
+     _array(index)
    }
  
    def +=(value: V) {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/95d8dbce/examples/src/main/scala/org/apache/spark/streaming/examples/ActorWordCount.scala
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/95d8dbce/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
----------------------------------------------------------------------
diff --cc 
streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
index ca0c905,7f9dab0..80dcf87
--- 
a/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
+++ 
b/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala
@@@ -309,13 -304,13 +308,13 @@@ class JavaStreamingContext(val ssc: Str
     * @tparam F Input format for reading HDFS file
     */
    def fileStream[K, V, F <: NewInputFormat[K, V]](directory: String): 
JavaPairDStream[K, V] = {
 -    implicit val cmk: ClassManifest[K] =
 -      implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[K]]
 -    implicit val cmv: ClassManifest[V] =
 -      implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[V]]
 -    implicit val cmf: ClassManifest[F] =
 -      implicitly[ClassManifest[AnyRef]].asInstanceOf[ClassManifest[F]]
 +    implicit val cmk: ClassTag[K] =
 +      implicitly[ClassTag[AnyRef]].asInstanceOf[ClassTag[K]]
 +    implicit val cmv: ClassTag[V] =
 +      implicitly[ClassTag[AnyRef]].asInstanceOf[ClassTag[V]]
 +    implicit val cmf: ClassTag[F] =
 +      implicitly[ClassTag[AnyRef]].asInstanceOf[ClassTag[F]]
-     ssc.fileStream[K, V, F](directory);
+     ssc.fileStream[K, V, F](directory)
    }
  
    /**

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/95d8dbce/streaming/src/main/scala/org/apache/spark/streaming/dstream/FlumeInputDStream.scala
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/95d8dbce/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/95d8dbce/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocationHandler.scala
----------------------------------------------------------------------

Reply via email to