Github user vanzin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/14022#discussion_r71197993
  
    --- Diff: 
core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala ---
    @@ -99,13 +133,66 @@ private class FallbackConfigEntry[T] (
         key: String,
         doc: String,
         isPublic: Boolean,
    -    private val fallback: ConfigEntry[T])
    +    private[config] val fallback: ConfigEntry[T])
         extends ConfigEntry[T](key, fallback.valueConverter, 
fallback.stringConverter, doc, isPublic) {
     
       override def defaultValueString: String = s"<value of ${fallback.key}>"
     
    -  override def readFrom(conf: SparkConf): T = {
    -    
conf.getOption(key).map(valueConverter).getOrElse(fallback.readFrom(conf))
    +  override def readFrom(conf: JMap[String, String], getenv: String => 
String): T = {
    +    
Option(conf.get(key)).map(valueConverter).getOrElse(fallback.readFrom(conf, 
getenv))
    +  }
    +
    +}
    +
    +private object ConfigEntry {
    +
    +  private val knownConfigs = new 
java.util.concurrent.ConcurrentHashMap[String, ConfigEntry[_]]()
    +
    +  private val REF_RE = "\\$\\{(?:(\\w+?):)?(\\S+?)\\}".r
    +
    +  def registerEntry(entry: ConfigEntry[_]): Unit = {
    +    val existing = knownConfigs.putIfAbsent(entry.key, entry)
    +    require(existing == null, s"Config entry ${entry.key} already 
registered!")
    +  }
    +
    +  def findEntry(key: String): ConfigEntry[_] = knownConfigs.get(key)
    +
    +  /**
    +   * Expand the `value` according to the rules explained in ConfigEntry.
    +   */
    +  def expand(
    +      value: String,
    +      conf: JMap[String, String],
    +      getenv: String => String,
    +      usedRefs: Set[String]): String = {
    +    REF_RE.replaceAllIn(value, { m =>
    +      val prefix = m.group(1)
    +      val name = m.group(2)
    +      val replacement = prefix match {
    +        case null =>
    +          require(!usedRefs.contains(name), s"Circular reference in 
$value: $name")
    +          if (name.startsWith("spark.")) {
    +            Option(findEntry(name))
    +              .flatMap(_.readAndExpand(conf, getenv, usedRefs = usedRefs + 
name))
    +              .orElse(Option(conf.get(name)))
    +              .orElse(defaultValueString(name))
    +          } else {
    +            None
    +          }
    +        case "system" => sys.props.get(name)
    +        case "env" => Option(getenv(name))
    +        case _ => throw new IllegalArgumentException(s"Invalid prefix: 
$prefix")
    --- End diff --
    
    Yeah, I have to take a look at this. Throwing might actually break some 
existing code in SparkHadoopUtil.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to