This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new bed2177  [SPARK-31215][SQL][DOC] Add version information to the static 
configuration of SQL
bed2177 is described below

commit bed21770af67f99f7a1b49a078604abfd0c3e8d6
Author: beliefer <belie...@163.com>
AuthorDate: Tue Mar 31 12:31:25 2020 +0900

    [SPARK-31215][SQL][DOC] Add version information to the static configuration 
of SQL
    
    ### What changes were proposed in this pull request?
    Add version information to the static configuration of `SQL`.
    
    I sorted out some information show below.
    
    Item name | Since version | JIRA ID | Commit ID | Note
    -- | -- | -- | -- | --
    spark.sql.warehouse.dir | 2.0.0 | SPARK-14994 | 
054f991c4350af1350af7a4109ee77f4a34822f0#diff-32bb9518401c0948c5ea19377b5069ab 
|  
    spark.sql.catalogImplementation | 2.0.0 | SPARK-14720 and SPARK-13643 | 
8fc267ab3322e46db81e725a5cb1adb5a71b2b4d#diff-6bdad48cfc34314e89599655442ff210 
|  
    spark.sql.globalTempDatabase | 2.1.0 | SPARK-17338 | 
23ddff4b2b2744c3dc84d928e144c541ad5df376#diff-6bdad48cfc34314e89599655442ff210 
|  
    spark.sql.sources.schemaStringLengthThreshold | 1.3.1 | SPARK-6024 | 
6200f0709c5c8440decae8bf700d7859f32ac9d5#diff-41ef65b9ef5b518f77e2a03559893f4d 
| 1.3
    spark.sql.filesourceTableRelationCacheSize | 2.2.0 | SPARK-19265 | 
9d9d67c7957f7cbbdbe889bdbc073568b2bfbb16#diff-32bb9518401c0948c5ea19377b5069ab |
    spark.sql.codegen.cache.maxEntries | 2.4.0 | SPARK-24727 | 
b2deef64f604ddd9502a31105ed47cb63470ec85#diff-5081b9388de3add800b6e4a6ddf55c01 |
    spark.sql.codegen.comments | 2.0.0 | SPARK-15680 | 
f0e8738c1ec0e4c5526aeada6f50cf76428f9afd#diff-8bcc5aea39c73d4bf38aef6f6951d42c 
|  
    spark.sql.debug | 2.1.0 | SPARK-17899 | 
db8784feaa605adcbd37af4bc8b7146479b631f8#diff-32bb9518401c0948c5ea19377b5069ab 
|  
    spark.sql.hive.thriftServer.singleSession | 1.6.0 | SPARK-11089 | 
167ea61a6a604fd9c0b00122a94d1bc4b1de24ff#diff-ff50aea397a607b79df9bec6f2a841db 
|  
    spark.sql.extensions | 2.2.0 | SPARK-18127 | 
f0de600797ff4883927d0c70732675fd8629e239#diff-5081b9388de3add800b6e4a6ddf55c01 
|  
    spark.sql.queryExecutionListeners | 2.3.0 | SPARK-19558 | 
bd4eb9ce57da7bacff69d9ed958c94f349b7e6fb#diff-5081b9388de3add800b6e4a6ddf55c01 
|  
    spark.sql.streaming.streamingQueryListeners | 2.4.0 | SPARK-24479 | 
7703b46d2843db99e28110c4c7ccf60934412504#diff-5081b9388de3add800b6e4a6ddf55c01 
|  
    spark.sql.ui.retainedExecutions | 1.5.0 | SPARK-8861 and SPARK-8862 | 
ebc3aad272b91cf58e2e1b4aa92b49b8a947a045#diff-81764e4d52817f83bdd5336ef1226bd9 
|  
    spark.sql.broadcastExchange.maxThreadThreshold | 3.0.0 | SPARK-26601 | 
126310ca68f2f248ea8b312c4637eccaba2fdc2b#diff-5081b9388de3add800b6e4a6ddf55c01 
|  
    spark.sql.subquery.maxThreadThreshold | 2.4.6 | SPARK-30556 | 
2fc562cafd71ec8f438f37a28b65118906ab2ad2#diff-5081b9388de3add800b6e4a6ddf55c01 
|  
    spark.sql.event.truncate.length | 3.0.0 | SPARK-27045 | 
e60d8fce0b0cf2a6d766ea2fc5f994546550570a#diff-5081b9388de3add800b6e4a6ddf55c01 |
    spark.sql.legacy.sessionInitWithConfigDefaults | 3.0.0 | SPARK-27253 | 
83f628b57da39ad9732d1393aebac373634a2eb9#diff-5081b9388de3add800b6e4a6ddf55c01 |
    spark.sql.defaultUrlStreamHandlerFactory.enabled | 3.0.0 | SPARK-25694 | 
8469614c0513fbed87977d4e741649db3fdd8add#diff-5081b9388de3add800b6e4a6ddf55c01 |
    spark.sql.streaming.ui.enabled | 3.0.0 | SPARK-29543 | 
f9b86370cb04b72a4f00cbd4d60873960aa2792c#diff-5081b9388de3add800b6e4a6ddf55c01 
|  
    spark.sql.streaming.ui.retainedProgressUpdates | 3.0.0 | SPARK-29543 | 
f9b86370cb04b72a4f00cbd4d60873960aa2792c#diff-5081b9388de3add800b6e4a6ddf55c01 
|  
    spark.sql.streaming.ui.retainedQueries | 3.0.0 | SPARK-29543 | 
f9b86370cb04b72a4f00cbd4d60873960aa2792c#diff-5081b9388de3add800b6e4a6ddf55c01 
|  
    
    ### Why are the changes needed?
    Supplemental configuration version information.
    
    ### Does this PR introduce any user-facing change?
    'No'.
    
    ### How was this patch tested?
    Exists UT
    
    Closes #27981 from beliefer/add-version-to-sql-static-config.
    
    Authored-by: beliefer <belie...@163.com>
    Signed-off-by: HyukjinKwon <gurwls...@apache.org>
---
 docs/configuration.md                              |  1 +
 .../apache/spark/sql/internal/StaticSQLConf.scala  | 25 ++++++++++++++++++++--
 2 files changed, 24 insertions(+), 2 deletions(-)

diff --git a/docs/configuration.md b/docs/configuration.md
index a7a1477..4835336 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -1233,6 +1233,7 @@ Apart from these, the following properties are also 
available, and may be useful
   <td>
     How many finished executions the Spark UI and status APIs remember before 
garbage collecting.
   </td>
+  <td>1.5.0</td>
 </tr>
 <tr>
   <td><code>spark.streaming.ui.retainedBatches</code></td>
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala
index 563e51e..d202528 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala
@@ -32,17 +32,20 @@ object StaticSQLConf {
 
   val WAREHOUSE_PATH = buildStaticConf("spark.sql.warehouse.dir")
     .doc("The default location for managed databases and tables.")
+    .version("2.0.0")
     .stringConf
     .createWithDefault(Utils.resolveURI("spark-warehouse").toString)
 
   val CATALOG_IMPLEMENTATION = 
buildStaticConf("spark.sql.catalogImplementation")
     .internal()
+    .version("2.0.0")
     .stringConf
     .checkValues(Set("hive", "in-memory"))
     .createWithDefault("in-memory")
 
   val GLOBAL_TEMP_DATABASE = buildStaticConf("spark.sql.globalTempDatabase")
     .internal()
+    .version("2.1.0")
     .stringConf
     .transform(_.toLowerCase(Locale.ROOT))
     .createWithDefault("global_temp")
@@ -55,9 +58,10 @@ object StaticSQLConf {
   // that's why this conf has to be a static SQL conf.
   val SCHEMA_STRING_LENGTH_THRESHOLD =
     buildStaticConf("spark.sql.sources.schemaStringLengthThreshold")
+      .internal()
       .doc("The maximum length allowed in a single cell when " +
         "storing additional schema information in Hive's metastore.")
-      .internal()
+      .version("1.3.1")
       .intConf
       .createWithDefault(4000)
 
@@ -65,6 +69,7 @@ object StaticSQLConf {
     buildStaticConf("spark.sql.filesourceTableRelationCacheSize")
       .internal()
       .doc("The maximum size of the cache that maps qualified table names to 
table relation plans.")
+      .version("2.2.0")
       .intConf
       .checkValue(cacheSize => cacheSize >= 0, "The maximum size of the cache 
must not be negative")
       .createWithDefault(1000)
@@ -73,6 +78,7 @@ object StaticSQLConf {
       .internal()
       .doc("When nonzero, enable caching of generated classes for operators 
and expressions. " +
         "All jobs share the cache that can use up to the specified number for 
generated classes.")
+      .version("2.4.0")
       .intConf
       .checkValue(maxEntries => maxEntries >= 0, "The maximum must not be 
negative")
       .createWithDefault(100)
@@ -82,6 +88,7 @@ object StaticSQLConf {
     .doc("When true, put comment in the generated code. Since computing huge 
comments " +
       "can be extremely expensive in certain cases, such as deeply-nested 
expressions which " +
       "operate over inputs with wide schemas, default is false.")
+    .version("2.0.0")
     .booleanConf
     .createWithDefault(false)
 
@@ -90,6 +97,7 @@ object StaticSQLConf {
   val DEBUG_MODE = buildStaticConf("spark.sql.debug")
     .internal()
     .doc("Only used for internal debugging. Not all functions are supported 
when it is enabled.")
+    .version("2.1.0")
     .booleanConf
     .createWithDefault(false)
 
@@ -98,6 +106,7 @@ object StaticSQLConf {
       .doc("When set to true, Hive Thrift server is running in a single 
session mode. " +
         "All the JDBC/ODBC connections share the temporary views, function 
registries, " +
         "SQL configuration and the current database.")
+      .version("1.6.0")
       .booleanConf
       .createWithDefault(false)
 
@@ -109,6 +118,7 @@ object StaticSQLConf {
       "applied in the specified order. For the case of parsers, the last 
parser is used and each " +
       "parser can delegate to its predecessor. For the case of function name 
conflicts, the last " +
       "registered function name is used.")
+    .version("2.2.0")
     .stringConf
     .toSequence
     .createOptional
@@ -117,6 +127,7 @@ object StaticSQLConf {
     .doc("List of class names implementing QueryExecutionListener that will be 
automatically " +
       "added to newly created sessions. The classes should have either a 
no-arg constructor, " +
       "or a constructor that expects a SparkConf argument.")
+    .version("2.3.0")
     .stringConf
     .toSequence
     .createOptional
@@ -125,6 +136,7 @@ object StaticSQLConf {
     .doc("List of class names implementing StreamingQueryListener that will be 
automatically " +
       "added to newly created sessions. The classes should have either a 
no-arg constructor, " +
       "or a constructor that expects a SparkConf argument.")
+    .version("2.4.0")
     .stringConf
     .toSequence
     .createOptional
@@ -132,6 +144,7 @@ object StaticSQLConf {
   val UI_RETAINED_EXECUTIONS =
     buildStaticConf("spark.sql.ui.retainedExecutions")
       .doc("Number of executions to retain in the Spark UI.")
+      .version("1.5.0")
       .intConf
       .createWithDefault(1000)
 
@@ -144,6 +157,7 @@ object StaticSQLConf {
         "Notice the number should be carefully chosen since decreasing 
parallelism might " +
         "cause longer waiting for other broadcasting. Also, increasing 
parallelism may " +
         "cause memory problem.")
+      .version("3.0.0")
       .intConf
       .checkValue(thres => thres > 0 && thres <= 128, "The threshold must be 
in (0,128].")
       .createWithDefault(128)
@@ -152,6 +166,7 @@ object StaticSQLConf {
     buildStaticConf("spark.sql.subquery.maxThreadThreshold")
       .internal()
       .doc("The maximum degree of parallelism to execute the subquery.")
+      .version("2.4.6")
       .intConf
       .checkValue(thres => thres > 0 && thres <= 128, "The threshold must be 
in (0,128].")
       .createWithDefault(16)
@@ -159,6 +174,7 @@ object StaticSQLConf {
   val SQL_EVENT_TRUNCATE_LENGTH = 
buildStaticConf("spark.sql.event.truncate.length")
     .doc("Threshold of SQL length beyond which it will be truncated before 
adding to " +
       "event. Defaults to no truncation. If set to 0, callsite will be logged 
instead.")
+    .version("3.0.0")
     .intConf
     .checkValue(_ >= 0, "Must be set greater or equal to zero")
     .createWithDefault(Int.MaxValue)
@@ -167,11 +183,13 @@ object StaticSQLConf {
     buildStaticConf("spark.sql.legacy.sessionInitWithConfigDefaults")
       .doc("Flag to revert to legacy behavior where a cloned SparkSession 
receives SparkConf " +
         "defaults, dropping any overrides in its parent SparkSession.")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(false)
 
   val DEFAULT_URL_STREAM_HANDLER_FACTORY_ENABLED =
     buildStaticConf("spark.sql.defaultUrlStreamHandlerFactory.enabled")
+      .internal()
       .doc(
         "When true, register Hadoop's FsUrlStreamHandlerFactory to support " +
         "ADD JAR against HDFS locations. " +
@@ -179,7 +197,7 @@ object StaticSQLConf {
         "to support a particular protocol type, or if Hadoop's 
FsUrlStreamHandlerFactory " +
         "conflicts with other protocol types such as `http` or `https`. See 
also SPARK-25694 " +
         "and HADOOP-14598.")
-      .internal()
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(true)
 
@@ -187,6 +205,7 @@ object StaticSQLConf {
     buildStaticConf("spark.sql.streaming.ui.enabled")
       .doc("Whether to run the Structured Streaming Web UI for the Spark 
application when the " +
         "Spark Web UI is enabled.")
+      .version("3.0.0")
       .booleanConf
       .createWithDefault(true)
 
@@ -194,12 +213,14 @@ object StaticSQLConf {
     buildStaticConf("spark.sql.streaming.ui.retainedProgressUpdates")
       .doc("The number of progress updates to retain for a streaming query for 
Structured " +
         "Streaming UI.")
+      .version("3.0.0")
       .intConf
       .createWithDefault(100)
 
   val STREAMING_UI_RETAINED_QUERIES =
     buildStaticConf("spark.sql.streaming.ui.retainedQueries")
       .doc("The number of inactive queries to retain for Structured Streaming 
UI.")
+      .version("3.0.0")
       .intConf
       .createWithDefault(100)
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to