This is an automated email from the ASF dual-hosted git repository. gurwls223 pushed a commit to branch branch-3.0 in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.0 by this push: new 742e35f [SPARK-30689][CORE][FOLLOW-UP] Rename config name of discovery plugin 742e35f is described below commit 742e35f1d48c2523dda2ce21d73b7ab5ade20582 Author: yi.wu <yi...@databricks.com> AuthorDate: Wed Feb 26 11:55:05 2020 +0900 [SPARK-30689][CORE][FOLLOW-UP] Rename config name of discovery plugin ### What changes were proposed in this pull request? Rename config `spark.resources.discovery.plugin` to `spark.resources.discoveryPlugin`. Also, as a side minor change: labeled `ResourceDiscoveryScriptPlugin` as `DeveloperApi` since it's not for end user. ### Why are the changes needed? Discovery plugin doesn't need to reserve the "discovery" namespace here and it's more consistent with the interface name `ResourceDiscoveryPlugin` if we use `discoveryPlugin` instead. ### Does this PR introduce any user-facing change? No, it's newly added in Spark3.0. ### How was this patch tested? Pass Jenkins. Closes #27689 from Ngone51/spark_30689_followup. Authored-by: yi.wu <yi...@databricks.com> Signed-off-by: HyukjinKwon <gurwls...@apache.org> (cherry picked from commit e9fd52282e4ed4831c5922348b0e1ee71e045b4b) Signed-off-by: HyukjinKwon <gurwls...@apache.org> --- core/src/main/scala/org/apache/spark/internal/config/package.scala | 2 +- .../scala/org/apache/spark/resource/ResourceDiscoveryScriptPlugin.scala | 2 ++ docs/configuration.md | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala index 3f36e61..37ce178 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/package.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala @@ -55,7 +55,7 @@ package object config { .createOptional private[spark] val RESOURCES_DISCOVERY_PLUGIN = - ConfigBuilder("spark.resources.discovery.plugin") + ConfigBuilder("spark.resources.discoveryPlugin") .doc("Comma-separated list of class names implementing" + "org.apache.spark.api.resource.ResourceDiscoveryPlugin to load into the application." + "This is for advanced users to replace the resource discovery class with a " + diff --git a/core/src/main/scala/org/apache/spark/resource/ResourceDiscoveryScriptPlugin.scala b/core/src/main/scala/org/apache/spark/resource/ResourceDiscoveryScriptPlugin.scala index 2ac6d3c..7027d1e 100644 --- a/core/src/main/scala/org/apache/spark/resource/ResourceDiscoveryScriptPlugin.scala +++ b/core/src/main/scala/org/apache/spark/resource/ResourceDiscoveryScriptPlugin.scala @@ -21,6 +21,7 @@ import java.io.File import java.util.Optional import org.apache.spark.{SparkConf, SparkException} +import org.apache.spark.annotation.DeveloperApi import org.apache.spark.api.resource.ResourceDiscoveryPlugin import org.apache.spark.internal.Logging import org.apache.spark.util.Utils.executeAndGetOutput @@ -32,6 +33,7 @@ import org.apache.spark.util.Utils.executeAndGetOutput * If the user specifies custom plugins, this is the last one to be executed and * throws if the resource isn't discovered. */ +@DeveloperApi class ResourceDiscoveryScriptPlugin extends ResourceDiscoveryPlugin with Logging { override def discoverResource( request: ResourceRequest, diff --git a/docs/configuration.md b/docs/configuration.md index 2421e00..469feed 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -244,7 +244,7 @@ of the most common options to set are: </td> </tr> <tr> - <td><code>spark.resources.discovery.plugin</code></td> + <td><code>spark.resources.discoveryPlugin</code></td> <td>org.apache.spark.resource.ResourceDiscoveryScriptPlugin</td> <td> Comma-separated list of class names implementing --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org