Repository: spark Updated Branches: refs/heads/branch-1.4 7b88e6a1e -> 28e8a6ea6
[SPARK-8282] [SPARKR] Make number of threads used in RBackend configurable Read number of threads for RBackend from configuration. [SPARK-8282] #comment Linking with JIRA Author: Hossein <hoss...@databricks.com> Closes #6730 from falaki/SPARK-8282 and squashes the following commits: 33b3d98 [Hossein] Documented new config parameter 70f2a9c [Hossein] Fixing import ec44225 [Hossein] Read number of threads for RBackend from configuration (cherry picked from commit 30ebf1a233295539c2455bd838bae7315711e1e2) Signed-off-by: Andrew Or <and...@databricks.com> Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/28e8a6ea Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/28e8a6ea Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/28e8a6ea Branch: refs/heads/branch-1.4 Commit: 28e8a6ea65fd08ab9cefc4d179d5c66ffefd3eb4 Parents: 7b88e6a Author: Hossein <hoss...@databricks.com> Authored: Wed Jun 10 13:18:48 2015 -0700 Committer: Andrew Or <and...@databricks.com> Committed: Wed Jun 10 13:19:53 2015 -0700 ---------------------------------------------------------------------- .../main/scala/org/apache/spark/api/r/RBackend.scala | 5 +++-- docs/configuration.md | 12 ++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/28e8a6ea/core/src/main/scala/org/apache/spark/api/r/RBackend.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/api/r/RBackend.scala b/core/src/main/scala/org/apache/spark/api/r/RBackend.scala index d24c650..1a5f2bc 100644 --- a/core/src/main/scala/org/apache/spark/api/r/RBackend.scala +++ b/core/src/main/scala/org/apache/spark/api/r/RBackend.scala @@ -29,7 +29,7 @@ import io.netty.channel.socket.nio.NioServerSocketChannel import io.netty.handler.codec.LengthFieldBasedFrameDecoder import io.netty.handler.codec.bytes.{ByteArrayDecoder, ByteArrayEncoder} -import org.apache.spark.Logging +import org.apache.spark.{Logging, SparkConf} /** * Netty-based backend server that is used to communicate between R and Java. @@ -41,7 +41,8 @@ private[spark] class RBackend { private[this] var bossGroup: EventLoopGroup = null def init(): Int = { - bossGroup = new NioEventLoopGroup(2) + val conf = new SparkConf() + bossGroup = new NioEventLoopGroup(conf.getInt("spark.r.numRBackendThreads", 2)) val workerGroup = bossGroup val handler = new RBackendHandler(this) http://git-wip-us.apache.org/repos/asf/spark/blob/28e8a6ea/docs/configuration.md ---------------------------------------------------------------------- diff --git a/docs/configuration.md b/docs/configuration.md index 3960e7e..95a322f 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -1495,6 +1495,18 @@ Apart from these, the following properties are also available, and may be useful </tr> </table> +#### SparkR +<table class="table"> +<tr><th>Property Name</th><th>Default</th><th>Meaning</th></tr> +<tr> + <td><code>spark.r.numRBackendThreads</code></td> + <td>2</td> + <td> + Number of threads used by RBackend to handle RPC calls from SparkR package. + </td> +</tr> +</table> + #### Cluster Managers Each cluster manager in Spark has additional configuration options. Configurations can be found on the pages for each mode: --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org