Repository: spark Updated Branches: refs/heads/master a0761ec70 -> 3a3f7100f
[SPARK-6490][Docs] Add docs for rpc configurations Added docs for rpc configurations and also fixed two places that should have been fixed in #5595. Author: zsxwing <zsxw...@gmail.com> Closes #5607 from zsxwing/SPARK-6490-docs and squashes the following commits: 25a6736 [zsxwing] Increase the default timeout to 120s 6e37c30 [zsxwing] Update docs 5577540 [zsxwing] Use spark.network.timeout as the default timeout if it presents 4f07174 [zsxwing] Fix unit tests 1c2cf26 [zsxwing] Add docs for rpc configurations Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3a3f7100 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3a3f7100 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3a3f7100 Branch: refs/heads/master Commit: 3a3f7100f4ead9b7ac50e9711ac50b603ebf6bea Parents: a0761ec Author: zsxwing <zsxw...@gmail.com> Authored: Tue Apr 21 18:37:53 2015 -0700 Committer: Reynold Xin <r...@databricks.com> Committed: Tue Apr 21 18:37:53 2015 -0700 ---------------------------------------------------------------------- .../scala/org/apache/spark/util/RpcUtils.scala | 6 ++-- .../scala/org/apache/spark/SparkConfSuite.scala | 2 +- .../org/apache/spark/rpc/RpcEnvSuite.scala | 2 +- docs/configuration.md | 34 ++++++++++++++++++-- 4 files changed, 38 insertions(+), 6 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/3a3f7100/core/src/main/scala/org/apache/spark/util/RpcUtils.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/util/RpcUtils.scala b/core/src/main/scala/org/apache/spark/util/RpcUtils.scala index 5ae793e..f16cc8e 100644 --- a/core/src/main/scala/org/apache/spark/util/RpcUtils.scala +++ b/core/src/main/scala/org/apache/spark/util/RpcUtils.scala @@ -48,11 +48,13 @@ object RpcUtils { /** Returns the default Spark timeout to use for RPC ask operations. */ def askTimeout(conf: SparkConf): FiniteDuration = { - conf.getTimeAsSeconds("spark.rpc.askTimeout", "30s") seconds + conf.getTimeAsSeconds("spark.rpc.askTimeout", + conf.get("spark.network.timeout", "120s")) seconds } /** Returns the default Spark timeout to use for RPC remote endpoint lookup. */ def lookupTimeout(conf: SparkConf): FiniteDuration = { - conf.getTimeAsSeconds("spark.rpc.lookupTimeout", "30s") seconds + conf.getTimeAsSeconds("spark.rpc.lookupTimeout", + conf.get("spark.network.timeout", "120s")) seconds } } http://git-wip-us.apache.org/repos/asf/spark/blob/3a3f7100/core/src/test/scala/org/apache/spark/SparkConfSuite.scala ---------------------------------------------------------------------- diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala index d7d8014..272e6af 100644 --- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala @@ -227,7 +227,7 @@ class SparkConfSuite extends FunSuite with LocalSparkContext with ResetSystemPro test("akka deprecated configs") { val conf = new SparkConf() - assert(!conf.contains("spark.rpc.num.retries")) + assert(!conf.contains("spark.rpc.numRetries")) assert(!conf.contains("spark.rpc.retry.wait")) assert(!conf.contains("spark.rpc.askTimeout")) assert(!conf.contains("spark.rpc.lookupTimeout")) http://git-wip-us.apache.org/repos/asf/spark/blob/3a3f7100/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala ---------------------------------------------------------------------- diff --git a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala index 5fbda37..44c88b0 100644 --- a/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala +++ b/core/src/test/scala/org/apache/spark/rpc/RpcEnvSuite.scala @@ -156,7 +156,7 @@ abstract class RpcEnvSuite extends FunSuite with BeforeAndAfterAll { val conf = new SparkConf() conf.set("spark.rpc.retry.wait", "0") - conf.set("spark.rpc.num.retries", "1") + conf.set("spark.rpc.numRetries", "1") val anotherEnv = createRpcEnv(conf, "remote", 13345) // Use anotherEnv to find out the RpcEndpointRef val rpcEndpointRef = anotherEnv.setupEndpointRef("local", env.address, "ask-timeout") http://git-wip-us.apache.org/repos/asf/spark/blob/3a3f7100/docs/configuration.md ---------------------------------------------------------------------- diff --git a/docs/configuration.md b/docs/configuration.md index d9e9e67..d587b91 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -963,8 +963,9 @@ Apart from these, the following properties are also available, and may be useful <td> Default timeout for all network interactions. This config will be used in place of <code>spark.core.connection.ack.wait.timeout</code>, <code>spark.akka.timeout</code>, - <code>spark.storage.blockManagerSlaveTimeoutMs</code> or - <code>spark.shuffle.io.connectionTimeout</code>, if they are not configured. + <code>spark.storage.blockManagerSlaveTimeoutMs</code>, + <code>spark.shuffle.io.connectionTimeout</code>, <code>spark.rpc.askTimeout</code> or + <code>spark.rpc.lookupTimeout</code> if they are not configured. </td> </tr> <tr> @@ -982,6 +983,35 @@ Apart from these, the following properties are also available, and may be useful This is only relevant for the Spark shell. </td> </tr> +<tr> + <td><code>spark.rpc.numRetries</code></td> + <td>3</td> + Number of times to retry before an RPC task gives up. + An RPC task will run at most times of this number. + <td> + </td> +</tr> +<tr> + <td><code>spark.rpc.retry.wait</code></td> + <td>3s</td> + <td> + Duration for an RPC ask operation to wait before retrying. + </td> +</tr> +<tr> + <td><code>spark.rpc.askTimeout</code></td> + <td>120s</td> + <td> + Duration for an RPC ask operation to wait before timing out. + </td> +</tr> +<tr> + <td><code>spark.rpc.lookupTimeout</code></td> + <td>120s</td> + Duration for an RPC remote endpoint lookup operation to wait before timing out. + <td> + </td> +</tr> </table> #### Scheduling --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org