Repository: spark
Updated Branches:
  refs/heads/branch-1.6 93ac30741 -> fafeca36e


[SPARK-11615] Drop @VisibleForTesting annotation

See http://search-hadoop.com/m/q3RTtjpe8r1iRbTj2 for discussion.

Summary: addition of VisibleForTesting annotation resulted in spark-shell 
malfunctioning.

Author: tedyu <yuzhih...@gmail.com>

Closes #9585 from tedyu/master.

(cherry picked from commit 900917541651abe7125f0d205085d2ab6a00d92c)
Signed-off-by: Andrew Or <and...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/fafeca36
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/fafeca36
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/fafeca36

Branch: refs/heads/branch-1.6
Commit: fafeca36eaa04e6e54bcab863959cce710698e30
Parents: 93ac307
Author: tedyu <yuzhih...@gmail.com>
Authored: Tue Nov 10 16:52:26 2015 -0800
Committer: Andrew Or <and...@databricks.com>
Committed: Tue Nov 10 16:53:09 2015 -0800

----------------------------------------------------------------------
 core/src/main/scala/org/apache/spark/rpc/netty/Inbox.scala   | 8 ++++----
 .../scala/org/apache/spark/ui/jobs/JobProgressListener.scala | 2 --
 .../org/apache/spark/util/AsynchronousListenerBus.scala      | 5 ++---
 .../org/apache/spark/util/collection/ExternalSorter.scala    | 3 +--
 scalastyle-config.xml                                        | 7 +++++++
 .../org/apache/spark/sql/execution/QueryExecution.scala      | 3 ---
 .../apache/spark/network/shuffle/ShuffleTestAccessor.scala   | 1 -
 7 files changed, 14 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/fafeca36/core/src/main/scala/org/apache/spark/rpc/netty/Inbox.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/rpc/netty/Inbox.scala 
b/core/src/main/scala/org/apache/spark/rpc/netty/Inbox.scala
index c72b588..464027f 100644
--- a/core/src/main/scala/org/apache/spark/rpc/netty/Inbox.scala
+++ b/core/src/main/scala/org/apache/spark/rpc/netty/Inbox.scala
@@ -21,8 +21,6 @@ import javax.annotation.concurrent.GuardedBy
 
 import scala.util.control.NonFatal
 
-import com.google.common.annotations.VisibleForTesting
-
 import org.apache.spark.{Logging, SparkException}
 import org.apache.spark.rpc.{RpcAddress, RpcEndpoint, ThreadSafeRpcEndpoint}
 
@@ -193,8 +191,10 @@ private[netty] class Inbox(
 
   def isEmpty: Boolean = inbox.synchronized { messages.isEmpty }
 
-  /** Called when we are dropping a message. Test cases override this to test 
message dropping. */
-  @VisibleForTesting
+  /**
+   * Called when we are dropping a message. Test cases override this to test 
message dropping.
+   * Exposed for testing.
+   */
   protected def onDrop(message: InboxMessage): Unit = {
     logWarning(s"Drop $message because $endpointRef is stopped")
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/fafeca36/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala 
b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
index 77d034f..ca37829 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
@@ -21,8 +21,6 @@ import java.util.concurrent.TimeoutException
 
 import scala.collection.mutable.{HashMap, HashSet, ListBuffer}
 
-import com.google.common.annotations.VisibleForTesting
-
 import org.apache.spark._
 import org.apache.spark.annotation.DeveloperApi
 import org.apache.spark.executor.TaskMetrics

http://git-wip-us.apache.org/repos/asf/spark/blob/fafeca36/core/src/main/scala/org/apache/spark/util/AsynchronousListenerBus.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/util/AsynchronousListenerBus.scala 
b/core/src/main/scala/org/apache/spark/util/AsynchronousListenerBus.scala
index b8481ea..b3b54af 100644
--- a/core/src/main/scala/org/apache/spark/util/AsynchronousListenerBus.scala
+++ b/core/src/main/scala/org/apache/spark/util/AsynchronousListenerBus.scala
@@ -20,7 +20,6 @@ package org.apache.spark.util
 import java.util.concurrent._
 import java.util.concurrent.atomic.AtomicBoolean
 
-import com.google.common.annotations.VisibleForTesting
 import org.apache.spark.SparkContext
 
 /**
@@ -119,8 +118,8 @@ private[spark] abstract class AsynchronousListenerBus[L <: 
AnyRef, E](name: Stri
    * For testing only. Wait until there are no more events in the queue, or 
until the specified
    * time has elapsed. Throw `TimeoutException` if the specified time elapsed 
before the queue
    * emptied.
+   * Exposed for testing.
    */
-  @VisibleForTesting
   @throws(classOf[TimeoutException])
   def waitUntilEmpty(timeoutMillis: Long): Unit = {
     val finishTime = System.currentTimeMillis + timeoutMillis
@@ -137,8 +136,8 @@ private[spark] abstract class AsynchronousListenerBus[L <: 
AnyRef, E](name: Stri
 
   /**
    * For testing only. Return whether the listener daemon thread is still 
alive.
+   * Exposed for testing.
    */
-  @VisibleForTesting
   def listenerThreadIsAlive: Boolean = listenerThread.isAlive
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/fafeca36/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala 
b/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala
index a44e72b..bd6844d 100644
--- a/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala
@@ -23,7 +23,6 @@ import java.util.Comparator
 import scala.collection.mutable.ArrayBuffer
 import scala.collection.mutable
 
-import com.google.common.annotations.VisibleForTesting
 import com.google.common.io.ByteStreams
 
 import org.apache.spark._
@@ -608,8 +607,8 @@ private[spark] class ExternalSorter[K, V, C](
    *
    * For now, we just merge all the spilled files in once pass, but this can 
be modified to
    * support hierarchical merging.
+   * Exposed for testing.
    */
-  @VisibleForTesting
   def partitionedIterator: Iterator[(Int, Iterator[Product2[K, C]])] = {
     val usingMap = aggregator.isDefined
     val collection: WritablePartitionedPairCollection[K, C] = if (usingMap) 
map else buffer

http://git-wip-us.apache.org/repos/asf/spark/blob/fafeca36/scalastyle-config.xml
----------------------------------------------------------------------
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
index 64a0c71..050c3f3 100644
--- a/scalastyle-config.xml
+++ b/scalastyle-config.xml
@@ -150,6 +150,13 @@ This file is divided into 3 sections:
       // scalastyle:on println]]></customMessage>
   </check>
 
+  <check customId="visiblefortesting" level="error" 
class="org.scalastyle.file.RegexChecker" enabled="true">
+    <parameters><parameter 
name="regex">@VisibleForTesting</parameter></parameters>
+    <customMessage><![CDATA[
+      @VisibleForTesting auses classpath issues. Please note this in the java 
doc instead (SPARK-11615).
+    ]]></customMessage>
+  </check>
+
   <check customId="classforname" level="error" 
class="org.scalastyle.file.RegexChecker" enabled="true">
     <parameters><parameter name="regex">Class\.forName</parameter></parameters>
     <customMessage><![CDATA[

http://git-wip-us.apache.org/repos/asf/spark/blob/fafeca36/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
index c2142d0..77843f5 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
@@ -17,8 +17,6 @@
 
 package org.apache.spark.sql.execution
 
-import com.google.common.annotations.VisibleForTesting
-
 import org.apache.spark.rdd.RDD
 import org.apache.spark.sql.SQLContext
 import org.apache.spark.sql.catalyst.InternalRow
@@ -33,7 +31,6 @@ import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
  */
 class QueryExecution(val sqlContext: SQLContext, val logical: LogicalPlan) {
 
-  @VisibleForTesting
   def assertAnalyzed(): Unit = sqlContext.analyzer.checkAnalysis(analyzed)
 
   lazy val analyzed: LogicalPlan = sqlContext.analyzer.execute(logical)

http://git-wip-us.apache.org/repos/asf/spark/blob/fafeca36/yarn/src/test/scala/org/apache/spark/network/shuffle/ShuffleTestAccessor.scala
----------------------------------------------------------------------
diff --git 
a/yarn/src/test/scala/org/apache/spark/network/shuffle/ShuffleTestAccessor.scala
 
b/yarn/src/test/scala/org/apache/spark/network/shuffle/ShuffleTestAccessor.scala
index aa46ec5..94bf579 100644
--- 
a/yarn/src/test/scala/org/apache/spark/network/shuffle/ShuffleTestAccessor.scala
+++ 
b/yarn/src/test/scala/org/apache/spark/network/shuffle/ShuffleTestAccessor.scala
@@ -19,7 +19,6 @@ package org.apache.spark.network.shuffle
 import java.io.{IOException, File}
 import java.util.concurrent.ConcurrentMap
 
-import com.google.common.annotations.VisibleForTesting
 import org.apache.hadoop.yarn.api.records.ApplicationId
 import org.fusesource.leveldbjni.JniDBFactory
 import org.iq80.leveldb.{DB, Options}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to