Github user HyukjinKwon commented on a diff in the pull request: https://github.com/apache/spark/pull/21648#discussion_r198375176 --- Diff: python/pyspark/sql/conf.py --- @@ -64,6 +64,97 @@ def _checkType(self, obj, identifier): (identifier, obj, type(obj).__name__)) +class ConfigEntry(object): + """An entry contains all meta information for a configuration""" + + def __init__(self, confKey): + """Create a new ConfigEntry with config key""" + self.confKey = confKey + self.converter = None + self.default = _NoValue + + def boolConf(self): + """Designate current config entry is boolean config""" + self.converter = lambda x: str(x).lower() == "true" + return self + + def intConf(self): + """Designate current config entry is integer config""" + self.converter = lambda x: int(x) + return self + + def stringConf(self): + """Designate current config entry is string config""" + self.converter = lambda x: str(x) + return self + + def withDefault(self, default): + """Give a default value for current config entry, the default value will be set + to _NoValue when its absent""" + self.default = default + return self + + def read(self, ctx): + """Read value from this config entry through sql context""" + return self.converter(ctx.getConf(self.confKey, self.default)) + + +class SQLConf(object): + """A class that enables the getting of SQL config parameters in pyspark""" + + REPL_EAGER_EVAL_ENABLED = ConfigEntry("spark.sql.repl.eagerEval.enabled")\ --- End diff -- Can we do this by wrapping existing SQLConf? We can make them static properties by, for example, [this hack](https://github.com/graphframes/graphframes/pull/169/files#diff-e81e6b169c0aa35012a3263b2f31b330R381)
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org