Repository: spark
Updated Branches:
  refs/heads/branch-1.6 ea1a51fc1 -> a4a71b0a5


[SPARK-11831][CORE][TESTS] Use port 0 to avoid port conflicts in tests

Use port 0 to fix port-contention-related flakiness

Author: Shixiong Zhu <shixi...@databricks.com>

Closes #9841 from zsxwing/SPARK-11831.

(cherry picked from commit 90d384dcbc1d1a3466cf8bae570a26f23012c102)
Signed-off-by: Andrew Or <and...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/a4a71b0a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/a4a71b0a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/a4a71b0a

Branch: refs/heads/branch-1.6
Commit: a4a71b0a51579bbb61930fc857a55f566a115195
Parents: ea1a51f
Author: Shixiong Zhu <shixi...@databricks.com>
Authored: Thu Nov 19 14:49:25 2015 -0800
Committer: Andrew Or <and...@databricks.com>
Committed: Thu Nov 19 14:49:34 2015 -0800

----------------------------------------------------------------------
 .../org/apache/spark/rpc/RpcEnvSuite.scala      | 24 ++++++++++----------
 .../apache/spark/rpc/akka/AkkaRpcEnvSuite.scala |  4 ++--
 2 files changed, 14 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/a4a71b0a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala 
b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
index 834e474..2f55006 100644
--- a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala
@@ -39,7 +39,7 @@ abstract class RpcEnvSuite extends SparkFunSuite with 
BeforeAndAfterAll {
 
   override def beforeAll(): Unit = {
     val conf = new SparkConf()
-    env = createRpcEnv(conf, "local", 12345)
+    env = createRpcEnv(conf, "local", 0)
   }
 
   override def afterAll(): Unit = {
@@ -76,7 +76,7 @@ abstract class RpcEnvSuite extends SparkFunSuite with 
BeforeAndAfterAll {
       }
     })
 
-    val anotherEnv = createRpcEnv(new SparkConf(), "remote", 13345, clientMode 
= true)
+    val anotherEnv = createRpcEnv(new SparkConf(), "remote", 0, clientMode = 
true)
     // Use anotherEnv to find out the RpcEndpointRef
     val rpcEndpointRef = anotherEnv.setupEndpointRef("local", env.address, 
"send-remotely")
     try {
@@ -130,7 +130,7 @@ abstract class RpcEnvSuite extends SparkFunSuite with 
BeforeAndAfterAll {
       }
     })
 
-    val anotherEnv = createRpcEnv(new SparkConf(), "remote", 13345, clientMode 
= true)
+    val anotherEnv = createRpcEnv(new SparkConf(), "remote", 0, clientMode = 
true)
     // Use anotherEnv to find out the RpcEndpointRef
     val rpcEndpointRef = anotherEnv.setupEndpointRef("local", env.address, 
"ask-remotely")
     try {
@@ -158,7 +158,7 @@ abstract class RpcEnvSuite extends SparkFunSuite with 
BeforeAndAfterAll {
     val shortProp = "spark.rpc.short.timeout"
     conf.set("spark.rpc.retry.wait", "0")
     conf.set("spark.rpc.numRetries", "1")
-    val anotherEnv = createRpcEnv(conf, "remote", 13345, clientMode = true)
+    val anotherEnv = createRpcEnv(conf, "remote", 0, clientMode = true)
     // Use anotherEnv to find out the RpcEndpointRef
     val rpcEndpointRef = anotherEnv.setupEndpointRef("local", env.address, 
"ask-timeout")
     try {
@@ -417,7 +417,7 @@ abstract class RpcEnvSuite extends SparkFunSuite with 
BeforeAndAfterAll {
       }
     })
 
-    val anotherEnv = createRpcEnv(new SparkConf(), "remote", 13345, clientMode 
= true)
+    val anotherEnv = createRpcEnv(new SparkConf(), "remote", 0, clientMode = 
true)
     // Use anotherEnv to find out the RpcEndpointRef
     val rpcEndpointRef = anotherEnv.setupEndpointRef("local", env.address, 
"sendWithReply-remotely")
     try {
@@ -457,7 +457,7 @@ abstract class RpcEnvSuite extends SparkFunSuite with 
BeforeAndAfterAll {
       }
     })
 
-    val anotherEnv = createRpcEnv(new SparkConf(), "remote", 13345, clientMode 
= true)
+    val anotherEnv = createRpcEnv(new SparkConf(), "remote", 0, clientMode = 
true)
     // Use anotherEnv to find out the RpcEndpointRef
     val rpcEndpointRef = anotherEnv.setupEndpointRef(
       "local", env.address, "sendWithReply-remotely-error")
@@ -497,7 +497,7 @@ abstract class RpcEnvSuite extends SparkFunSuite with 
BeforeAndAfterAll {
 
     })
 
-    val anotherEnv = createRpcEnv(new SparkConf(), "remote", 13345, clientMode 
= true)
+    val anotherEnv = createRpcEnv(new SparkConf(), "remote", 0, clientMode = 
true)
     // Use anotherEnv to find out the RpcEndpointRef
     val rpcEndpointRef = anotherEnv.setupEndpointRef(
       "local", env.address, "network-events")
@@ -543,7 +543,7 @@ abstract class RpcEnvSuite extends SparkFunSuite with 
BeforeAndAfterAll {
       }
     })
 
-    val anotherEnv = createRpcEnv(new SparkConf(), "remote", 13345, clientMode 
= true)
+    val anotherEnv = createRpcEnv(new SparkConf(), "remote", 0, clientMode = 
true)
     // Use anotherEnv to find out the RpcEndpointRef
     val rpcEndpointRef = anotherEnv.setupEndpointRef(
       "local", env.address, "sendWithReply-unserializable-error")
@@ -571,8 +571,8 @@ abstract class RpcEnvSuite extends SparkFunSuite with 
BeforeAndAfterAll {
     conf.set("spark.authenticate", "true")
     conf.set("spark.authenticate.secret", "good")
 
-    val localEnv = createRpcEnv(conf, "authentication-local", 13345)
-    val remoteEnv = createRpcEnv(conf, "authentication-remote", 14345, 
clientMode = true)
+    val localEnv = createRpcEnv(conf, "authentication-local", 0)
+    val remoteEnv = createRpcEnv(conf, "authentication-remote", 0, clientMode 
= true)
 
     try {
       @volatile var message: String = null
@@ -602,8 +602,8 @@ abstract class RpcEnvSuite extends SparkFunSuite with 
BeforeAndAfterAll {
     conf.set("spark.authenticate", "true")
     conf.set("spark.authenticate.secret", "good")
 
-    val localEnv = createRpcEnv(conf, "authentication-local", 13345)
-    val remoteEnv = createRpcEnv(conf, "authentication-remote", 14345, 
clientMode = true)
+    val localEnv = createRpcEnv(conf, "authentication-local", 0)
+    val remoteEnv = createRpcEnv(conf, "authentication-remote", 0, clientMode 
= true)
 
     try {
       localEnv.setupEndpoint("ask-authentication", new RpcEndpoint {

http://git-wip-us.apache.org/repos/asf/spark/blob/a4a71b0a/core/src/test/scala/org/apache/spark/rpc/akka/AkkaRpcEnvSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/rpc/akka/AkkaRpcEnvSuite.scala 
b/core/src/test/scala/org/apache/spark/rpc/akka/AkkaRpcEnvSuite.scala
index 6478ab5..7aac027 100644
--- a/core/src/test/scala/org/apache/spark/rpc/akka/AkkaRpcEnvSuite.scala
+++ b/core/src/test/scala/org/apache/spark/rpc/akka/AkkaRpcEnvSuite.scala
@@ -40,7 +40,7 @@ class AkkaRpcEnvSuite extends RpcEnvSuite {
     })
     val conf = new SparkConf()
     val newRpcEnv = new AkkaRpcEnvFactory().create(
-      RpcEnvConfig(conf, "test", "localhost", 12346, new 
SecurityManager(conf), false))
+      RpcEnvConfig(conf, "test", "localhost", 0, new SecurityManager(conf), 
false))
     try {
       val newRef = newRpcEnv.setupEndpointRef("local", ref.address, 
"test_endpoint")
       assert(s"akka.tcp://local@${env.address}/user/test_endpoint" ===
@@ -59,7 +59,7 @@ class AkkaRpcEnvSuite extends RpcEnvSuite {
     val conf = SSLSampleConfigs.sparkSSLConfig()
     val securityManager = new SecurityManager(conf)
     val rpcEnv = new AkkaRpcEnvFactory().create(
-      RpcEnvConfig(conf, "test", "localhost", 12346, securityManager, false))
+      RpcEnvConfig(conf, "test", "localhost", 0, securityManager, false))
     try {
       val uri = rpcEnv.uriOf("local", RpcAddress("1.2.3.4", 12345), 
"test_endpoint")
       assert("akka.ssl.tcp://local@1.2.3.4:12345/user/test_endpoint" === uri)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to