Repository: spark
Updated Branches:
  refs/heads/master 64f3175bf -> 6dfe38a03


[SPARK-4397][Core] Cleanup 'import SparkContext._' in core

This PR cleans up `import SparkContext._` in core for SPARK-4397(#3262) to 
prove it really works well.

Author: zsxwing <zsxw...@gmail.com>

Closes #3530 from zsxwing/SPARK-4397-cleanup and squashes the following commits:

04e2273 [zsxwing] Cleanup 'import SparkContext._' in core


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6dfe38a0
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6dfe38a0
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6dfe38a0

Branch: refs/heads/master
Commit: 6dfe38a03a619282815b4032243a20414eea712e
Parents: 64f3175
Author: zsxwing <zsxw...@gmail.com>
Authored: Tue Dec 2 00:18:41 2014 -0800
Committer: Reynold Xin <r...@databricks.com>
Committed: Tue Dec 2 00:18:41 2014 -0800

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/api/java/JavaRDDLike.scala    | 1 -
 .../main/scala/org/apache/spark/api/python/PythonRDD.scala    | 1 -
 core/src/main/scala/org/apache/spark/package.scala            | 4 ++--
 .../src/main/scala/org/apache/spark/rdd/AsyncRDDActions.scala | 1 -
 .../main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala  | 1 -
 .../main/scala/org/apache/spark/rdd/OrderedRDDFunctions.scala | 7 +++----
 .../main/scala/org/apache/spark/rdd/PairRDDFunctions.scala    | 2 --
 core/src/main/scala/org/apache/spark/rdd/RDD.scala            | 6 +++---
 .../scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala | 1 -
 .../main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala  | 1 -
 .../apache/spark/util/random/StratifiedSamplingUtils.scala    | 1 -
 core/src/test/scala/org/apache/spark/AccumulatorSuite.scala   | 1 -
 core/src/test/scala/org/apache/spark/CheckpointSuite.scala    | 1 -
 .../src/test/scala/org/apache/spark/ContextCleanerSuite.scala | 1 -
 core/src/test/scala/org/apache/spark/DistributedSuite.scala   | 1 -
 .../scala/org/apache/spark/ExternalShuffleServiceSuite.scala  | 1 -
 core/src/test/scala/org/apache/spark/FailureSuite.scala       | 1 -
 core/src/test/scala/org/apache/spark/FileServerSuite.scala    | 1 -
 core/src/test/scala/org/apache/spark/FutureActionSuite.scala  | 1 -
 .../test/scala/org/apache/spark/ImplicitOrderingSuite.scala   | 1 -
 .../test/scala/org/apache/spark/JobCancellationSuite.scala    | 1 -
 core/src/test/scala/org/apache/spark/PartitioningSuite.scala  | 1 -
 core/src/test/scala/org/apache/spark/ShuffleSuite.scala       | 1 -
 core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala | 1 -
 .../scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala     | 1 -
 core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala | 1 -
 .../scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala    | 1 -
 core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala       | 2 --
 core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala   | 1 -
 .../scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala  | 1 -
 .../org/apache/spark/scheduler/ReplayListenerSuite.scala      | 1 -
 .../scala/org/apache/spark/scheduler/SparkListenerSuite.scala | 1 -
 .../spark/serializer/KryoSerializerDistributedSuite.scala     | 1 -
 core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala | 1 -
 .../spark/util/collection/ExternalAppendOnlyMapSuite.scala    | 1 -
 .../apache/spark/util/collection/ExternalSorterSuite.scala    | 1 -
 36 files changed, 8 insertions(+), 44 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala 
b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
index 5a8e5bb..ac42294 100644
--- a/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
+++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
@@ -28,7 +28,6 @@ import com.google.common.base.Optional
 import org.apache.hadoop.io.compress.CompressionCodec
 
 import org.apache.spark._
-import org.apache.spark.SparkContext._
 import org.apache.spark.annotation.Experimental
 import org.apache.spark.api.java.JavaPairRDD._
 import org.apache.spark.api.java.JavaSparkContext.fakeClassTag

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala 
b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
index e0bc00e..bad40e6 100644
--- a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
+++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
@@ -34,7 +34,6 @@ import org.apache.hadoop.io.compress.CompressionCodec
 import org.apache.hadoop.mapred.{InputFormat, OutputFormat, JobConf}
 import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat, 
OutputFormat => NewOutputFormat}
 import org.apache.spark._
-import org.apache.spark.SparkContext._
 import org.apache.spark.api.java.{JavaSparkContext, JavaPairRDD, JavaRDD}
 import org.apache.spark.broadcast.Broadcast
 import org.apache.spark.rdd.RDD

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/main/scala/org/apache/spark/package.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/package.scala 
b/core/src/main/scala/org/apache/spark/package.scala
index 436dbed..5ad73c3 100644
--- a/core/src/main/scala/org/apache/spark/package.scala
+++ b/core/src/main/scala/org/apache/spark/package.scala
@@ -27,8 +27,8 @@ package org.apache
  * contains operations available only on RDDs of Doubles; and
  * [[org.apache.spark.rdd.SequenceFileRDDFunctions]] contains operations 
available on RDDs that can
  * be saved as SequenceFiles. These operations are automatically available on 
any RDD of the right
- * type (e.g. RDD[(Int, Int)] through implicit conversions when you
- * `import org.apache.spark.SparkContext._`.
+ * type (e.g. RDD[(Int, Int)] through implicit conversions except 
`saveAsSequenceFile`. You need to
+ * `import org.apache.spark.SparkContext._` to make `saveAsSequenceFile` work.
  *
  * Java programmers should reference the [[org.apache.spark.api.java]] package
  * for Spark programming APIs in Java.

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/main/scala/org/apache/spark/rdd/AsyncRDDActions.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/AsyncRDDActions.scala 
b/core/src/main/scala/org/apache/spark/rdd/AsyncRDDActions.scala
index 9f9f10b..646df28 100644
--- a/core/src/main/scala/org/apache/spark/rdd/AsyncRDDActions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/AsyncRDDActions.scala
@@ -27,7 +27,6 @@ import org.apache.spark.{ComplexFutureAction, FutureAction, 
Logging}
 
 /**
  * A set of asynchronous RDD actions available through an implicit conversion.
- * Import `org.apache.spark.SparkContext._` at the top of your program to use 
these functions.
  */
 class AsyncRDDActions[T: ClassTag](self: RDD[T]) extends Serializable with 
Logging {
 

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala 
b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
index e0494ee..e66f83b 100644
--- a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala
@@ -27,7 +27,6 @@ import org.apache.spark.util.StatCounter
 
 /**
  * Extra functions available on RDDs of Doubles through an implicit conversion.
- * Import `org.apache.spark.SparkContext._` at the top of your program to use 
these functions.
  */
 class DoubleRDDFunctions(self: RDD[Double]) extends Logging with Serializable {
   /** Add up the elements in this RDD. */

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/main/scala/org/apache/spark/rdd/OrderedRDDFunctions.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/OrderedRDDFunctions.scala 
b/core/src/main/scala/org/apache/spark/rdd/OrderedRDDFunctions.scala
index d0dbfef..144f679 100644
--- a/core/src/main/scala/org/apache/spark/rdd/OrderedRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/OrderedRDDFunctions.scala
@@ -24,10 +24,9 @@ import org.apache.spark.annotation.DeveloperApi
 
 /**
  * Extra functions available on RDDs of (key, value) pairs where the key is 
sortable through
- * an implicit conversion. Import `org.apache.spark.SparkContext._` at the top 
of your program to
- * use these functions. They will work with any key type `K` that has an 
implicit `Ordering[K]` in
- * scope.  Ordering objects already exist for all of the standard primitive 
types.  Users can also
- * define their own orderings for custom types, or to override the default 
ordering.  The implicit
+ * an implicit conversion. They will work with any key type `K` that has an 
implicit `Ordering[K]`
+ * in scope. Ordering objects already exist for all of the standard primitive 
types. Users can also
+ * define their own orderings for custom types, or to override the default 
ordering. The implicit
  * ordering that is in the closest scope will be used.
  *
  * {{{

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala 
b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
index 8c2c959..e78e576 100644
--- a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
@@ -37,7 +37,6 @@ RecordWriter => NewRecordWriter}
 
 import org.apache.spark._
 import org.apache.spark.Partitioner.defaultPartitioner
-import org.apache.spark.SparkContext._
 import org.apache.spark.annotation.Experimental
 import org.apache.spark.deploy.SparkHadoopUtil
 import org.apache.spark.executor.{DataWriteMethod, OutputMetrics}
@@ -50,7 +49,6 @@ import org.apache.spark.util.random.StratifiedSamplingUtils
 
 /**
  * Extra functions available on RDDs of (key, value) pairs through an implicit 
conversion.
- * Import `org.apache.spark.SparkContext._` at the top of your program to use 
these functions.
  */
 class PairRDDFunctions[K, V](self: RDD[(K, V)])
     (implicit kt: ClassTag[K], vt: ClassTag[V], ord: Ordering[K] = null)

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/main/scala/org/apache/spark/rdd/RDD.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala 
b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
index 3add4a7..8dfd952 100644
--- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
@@ -34,7 +34,6 @@ import org.apache.hadoop.mapred.TextOutputFormat
 
 import org.apache.spark._
 import org.apache.spark.Partitioner._
-import org.apache.spark.SparkContext._
 import org.apache.spark.annotation.{DeveloperApi, Experimental}
 import org.apache.spark.api.java.JavaRDD
 import org.apache.spark.broadcast.Broadcast
@@ -58,8 +57,9 @@ import org.apache.spark.util.random.{BernoulliSampler, 
PoissonSampler, Bernoulli
  * Doubles; and
  * [[org.apache.spark.rdd.SequenceFileRDDFunctions]] contains operations 
available on RDDs that
  * can be saved as SequenceFiles.
- * These operations are automatically available on any RDD of the right type 
(e.g. RDD[(Int, Int)]
- * through implicit conversions when you `import 
org.apache.spark.SparkContext._`.
+ * All operations are automatically available on any RDD of the right type 
(e.g. RDD[(Int, Int)]
+ * through implicit conversions except `saveAsSequenceFile`. You need to
+ * `import org.apache.spark.SparkContext._` to make `saveAsSequenceFile` work.
  *
  * Internally, each RDD is characterized by five main properties:
  *

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala 
b/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala
index 9a1efc8..2b48916 100644
--- a/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala
@@ -24,7 +24,6 @@ import org.apache.hadoop.mapred.JobConf
 import org.apache.hadoop.mapred.SequenceFileOutputFormat
 
 import org.apache.spark.Logging
-import org.apache.spark.SparkContext._
 
 /**
  * Extra functions available on RDDs of (key, value) pairs to create a Hadoop 
SequenceFile,

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala 
b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
index 18d2b50..b467744 100644
--- a/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
+++ b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala
@@ -20,7 +20,6 @@ package org.apache.spark.ui
 import scala.util.Random
 
 import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.SparkContext._
 import org.apache.spark.scheduler.SchedulingMode
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/main/scala/org/apache/spark/util/random/StratifiedSamplingUtils.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/util/random/StratifiedSamplingUtils.scala
 
b/core/src/main/scala/org/apache/spark/util/random/StratifiedSamplingUtils.scala
index 4fa357e..2ae308d 100644
--- 
a/core/src/main/scala/org/apache/spark/util/random/StratifiedSamplingUtils.scala
+++ 
b/core/src/main/scala/org/apache/spark/util/random/StratifiedSamplingUtils.scala
@@ -25,7 +25,6 @@ import scala.reflect.ClassTag
 import org.apache.commons.math3.distribution.PoissonDistribution
 
 import org.apache.spark.Logging
-import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala 
b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
index 52d1d52..f087fc5 100644
--- a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
@@ -22,7 +22,6 @@ import scala.collection.mutable
 import org.scalatest.FunSuite
 import org.scalatest.Matchers
 
-import org.apache.spark.SparkContext._
 
 class AccumulatorSuite extends FunSuite with Matchers with LocalSparkContext {
 

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala 
b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
index a41914a..3b10b3a 100644
--- a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
+++ b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
@@ -23,7 +23,6 @@ import scala.reflect.ClassTag
 
 import org.scalatest.FunSuite
 
-import org.apache.spark.SparkContext._
 import org.apache.spark.rdd._
 import org.apache.spark.storage.{BlockId, StorageLevel, TestBlockId}
 import org.apache.spark.util.Utils

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala 
b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
index 2e3fc5e..ae2ae7e 100644
--- a/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
@@ -28,7 +28,6 @@ import org.scalatest.concurrent.{PatienceConfiguration, 
Eventually}
 import org.scalatest.concurrent.Eventually._
 import org.scalatest.time.SpanSugar._
 
-import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 import org.apache.spark.storage._
 import org.apache.spark.shuffle.hash.HashShuffleManager

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/DistributedSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/DistributedSuite.scala 
b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
index 429199f..998f300 100644
--- a/core/src/test/scala/org/apache/spark/DistributedSuite.scala
+++ b/core/src/test/scala/org/apache/spark/DistributedSuite.scala
@@ -23,7 +23,6 @@ import org.scalatest.concurrent.Timeouts._
 import org.scalatest.Matchers
 import org.scalatest.time.{Millis, Span}
 
-import org.apache.spark.SparkContext._
 import org.apache.spark.storage.{RDDBlockId, StorageLevel}
 
 class NotSerializableClass

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/ExternalShuffleServiceSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/ExternalShuffleServiceSuite.scala 
b/core/src/test/scala/org/apache/spark/ExternalShuffleServiceSuite.scala
index 55799f5..cc3592e 100644
--- a/core/src/test/scala/org/apache/spark/ExternalShuffleServiceSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ExternalShuffleServiceSuite.scala
@@ -21,7 +21,6 @@ import java.util.concurrent.atomic.AtomicInteger
 
 import org.scalatest.BeforeAndAfterAll
 
-import org.apache.spark.SparkContext._
 import org.apache.spark.network.TransportContext
 import org.apache.spark.network.netty.SparkTransportConf
 import org.apache.spark.network.server.TransportServer

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/FailureSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FailureSuite.scala 
b/core/src/test/scala/org/apache/spark/FailureSuite.scala
index 2229e6a..1212d0b 100644
--- a/core/src/test/scala/org/apache/spark/FailureSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FailureSuite.scala
@@ -19,7 +19,6 @@ package org.apache.spark
 
 import org.scalatest.FunSuite
 
-import org.apache.spark.SparkContext._
 import org.apache.spark.util.NonSerializable
 
 import java.io.NotSerializableException

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/FileServerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FileServerSuite.scala 
b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
index 379c2a6..4942654 100644
--- a/core/src/test/scala/org/apache/spark/FileServerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileServerSuite.scala
@@ -23,7 +23,6 @@ import java.util.jar.{JarEntry, JarOutputStream}
 import com.google.common.io.ByteStreams
 import org.scalatest.FunSuite
 
-import org.apache.spark.SparkContext._
 import org.apache.spark.util.Utils
 
 class FileServerSuite extends FunSuite with LocalSparkContext {

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/FutureActionSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/FutureActionSuite.scala 
b/core/src/test/scala/org/apache/spark/FutureActionSuite.scala
index db9c25f..f5cdb01 100644
--- a/core/src/test/scala/org/apache/spark/FutureActionSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FutureActionSuite.scala
@@ -22,7 +22,6 @@ import scala.concurrent.duration.Duration
 
 import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
 
-import org.apache.spark.SparkContext._
 
 class FutureActionSuite extends FunSuite with BeforeAndAfter with Matchers 
with LocalSparkContext {
 

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala 
b/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
index 8e4a9e2..d895230 100644
--- a/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ImplicitOrderingSuite.scala
@@ -20,7 +20,6 @@ package org.apache.spark
 import org.scalatest.FunSuite
 
 import org.apache.spark.rdd.RDD
-import org.apache.spark.SparkContext._
 
 class ImplicitOrderingSuite extends FunSuite with LocalSparkContext {
   // Tests that PairRDDFunctions grabs an implicit Ordering in various cases 
where it should.

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala 
b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
index a57430e..41ed2bc 100644
--- a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
+++ b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala
@@ -27,7 +27,6 @@ import scala.concurrent.future
 import org.scalatest.{BeforeAndAfter, FunSuite}
 import org.scalatest.Matchers
 
-import org.apache.spark.SparkContext._
 import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskStart}
 
 /**

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala 
b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
index 646ede3..b753231 100644
--- a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
+++ b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala
@@ -22,7 +22,6 @@ import scala.math.abs
 
 import org.scalatest.{FunSuite, PrivateMethodTester}
 
-import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 import org.apache.spark.util.StatCounter
 

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala 
b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
index 5d20b4d..5a133c0 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
@@ -20,7 +20,6 @@ package org.apache.spark
 import org.scalatest.FunSuite
 import org.scalatest.Matchers
 
-import org.apache.spark.SparkContext._
 import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
 import org.apache.spark.rdd.{CoGroupedRDD, OrderedRDDFunctions, RDD, 
ShuffledRDD, SubtractedRDD}
 import org.apache.spark.serializer.KryoSerializer

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala 
b/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala
index 8577e4a..41d6ea2 100644
--- a/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/StatusTrackerSuite.scala
@@ -25,7 +25,6 @@ import org.scalatest.{Matchers, FunSuite}
 import org.scalatest.concurrent.Eventually._
 
 import org.apache.spark.JobExecutionStatus._
-import org.apache.spark.SparkContext._
 
 class StatusTrackerSuite extends FunSuite with Matchers with LocalSparkContext 
{
 

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala 
b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
index 3b833f2..f2b0ea1 100644
--- a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala
@@ -27,7 +27,6 @@ import org.scalatest.{BeforeAndAfterAll, FunSuite}
 import org.scalatest.concurrent.Timeouts
 import org.scalatest.time.SpanSugar._
 
-import org.apache.spark.SparkContext._
 import org.apache.spark.{SparkContext, SparkException, LocalSparkContext}
 
 class AsyncRDDActionsSuite extends FunSuite with BeforeAndAfterAll with 
Timeouts {

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala 
b/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala
index f89bdb6..de30653 100644
--- a/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala
@@ -20,7 +20,6 @@ package org.apache.spark.rdd
 import org.scalatest.FunSuite
 
 import org.apache.spark._
-import org.apache.spark.SparkContext._
 
 class DoubleRDDSuite extends FunSuite with SharedSparkContext {
   // Verify tests on the histogram functionality. We test with both evenly

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala 
b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
index 3620e25..108f70a 100644
--- a/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala
@@ -29,7 +29,6 @@ import org.apache.hadoop.mapreduce.{JobContext => 
NewJobContext, OutputCommitter
 OutputFormat => NewOutputFormat, RecordWriter => NewRecordWriter,
 TaskAttemptContext => NewTaskAttempContext}
 import org.apache.spark.{Partitioner, SharedSparkContext}
-import org.apache.spark.SparkContext._
 import org.apache.spark.util.Utils
 
 import org.scalatest.FunSuite

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala 
b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
index e079ca3..6d9be79 100644
--- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
@@ -24,7 +24,6 @@ import scala.reflect.ClassTag
 import org.scalatest.FunSuite
 
 import org.apache.spark._
-import org.apache.spark.SparkContext._
 import org.apache.spark.util.Utils
 
 import org.apache.spark.api.java.{JavaRDD, JavaSparkContext}
@@ -97,7 +96,6 @@ class RDDSuite extends FunSuite with SharedSparkContext {
   }
 
   test("partitioner aware union") {
-    import SparkContext._
     def makeRDDWithPartitioner(seq: Seq[Int]) = {
       sc.makeRDD(seq, 1)
         .map(x => (x, null))

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala 
b/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
index 6569176..a40f2ff 100644
--- a/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala
@@ -21,7 +21,6 @@ import org.scalatest.FunSuite
 import org.scalatest.Matchers
 
 import org.apache.spark.{Logging, SharedSparkContext}
-import org.apache.spark.SparkContext._
 
 class SortingSuite extends FunSuite with SharedSparkContext with Matchers with 
Logging {
 

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala 
b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
index bdd721d..436eea4 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
@@ -27,7 +27,6 @@ import org.scalatest.concurrent.Timeouts
 import org.scalatest.time.SpanSugar._
 
 import org.apache.spark._
-import org.apache.spark.SparkContext._
 import org.apache.spark.rdd.RDD
 import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
 import org.apache.spark.storage.{BlockId, BlockManagerId, BlockManagerMaster}

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala 
b/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala
index e05f373..90bdfe0 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala
@@ -22,7 +22,6 @@ import java.io.{File, PrintWriter}
 import org.json4s.jackson.JsonMethods._
 import org.scalatest.{BeforeAndAfter, FunSuite}
 
-import org.apache.spark.SparkContext._
 import org.apache.spark.{SparkConf, SparkContext}
 import org.apache.spark.deploy.SparkHadoopUtil
 import org.apache.spark.io.CompressionCodec

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala 
b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
index abe0dc3..b276343 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
@@ -25,7 +25,6 @@ import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, 
FunSuite}
 import org.scalatest.Matchers
 
 import org.apache.spark.{LocalSparkContext, SparkContext}
-import org.apache.spark.SparkContext._
 import org.apache.spark.executor.TaskMetrics
 
 class SparkListenerSuite extends FunSuite with LocalSparkContext with Matchers

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala
 
b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala
index 11e8c9c..855f1b6 100644
--- 
a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerDistributedSuite.scala
@@ -23,7 +23,6 @@ import com.esotericsoftware.kryo.Kryo
 import org.scalatest.FunSuite
 
 import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkEnv, 
TestUtils}
-import org.apache.spark.SparkContext._
 import org.apache.spark.serializer.KryoDistributedTest._
 
 class KryoSerializerDistributedSuite extends FunSuite {

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala 
b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
index d2857b8..787f4c2 100644
--- a/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISeleniumSuite.scala
@@ -27,7 +27,6 @@ import org.scalatest.selenium.WebBrowser
 import org.scalatest.time.SpanSugar._
 
 import org.apache.spark._
-import org.apache.spark.SparkContext._
 import org.apache.spark.LocalSparkContext._
 import org.apache.spark.api.java.StorageLevels
 import org.apache.spark.shuffle.FetchFailedException

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala
 
b/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala
index 511d76c..48f79ea 100644
--- 
a/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala
@@ -22,7 +22,6 @@ import scala.collection.mutable.ArrayBuffer
 import org.scalatest.FunSuite
 
 import org.apache.spark._
-import org.apache.spark.SparkContext._
 import org.apache.spark.io.CompressionCodec
 
 class ExternalAppendOnlyMapSuite extends FunSuite with LocalSparkContext {

http://git-wip-us.apache.org/repos/asf/spark/blob/6dfe38a0/core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala
 
b/core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala
index 3cb42d4..72d9679 100644
--- 
a/core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala
@@ -22,7 +22,6 @@ import scala.collection.mutable.ArrayBuffer
 import org.scalatest.{PrivateMethodTester, FunSuite}
 
 import org.apache.spark._
-import org.apache.spark.SparkContext._
 
 import scala.util.Random
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to