This is an automated email from the ASF dual-hosted git repository.

viirya pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git


The following commit(s) were added to refs/heads/main by this push:
     new 1865284  docs: Generate configuration guide in mvn build (#349)
1865284 is described below

commit 1865284559fec6fcad138843604fc5798845f3ce
Author: Andy Grove <[email protected]>
AuthorDate: Mon Apr 29 19:28:34 2024 -0600

    docs: Generate configuration guide in mvn build (#349)
    
    * initial config doc
    
    * Generate configuration guide as part of mvn package
    
    * formatting
    
    * scalafix
    
    * add maven usage to comment
    
    * do not publish internal configs
    
    * improve check for public configs
---
 .../main/scala/org/apache/comet/CometConf.scala    | 56 ++++++++++++++++++++--
 docs/source/index.rst                              |  1 +
 docs/source/user-guide/configs-template.md         | 24 ++++++++++
 docs/source/user-guide/configs.md                  | 52 ++++++++++++++++++++
 spark/pom.xml                                      | 22 +++++++++
 5 files changed, 150 insertions(+), 5 deletions(-)

diff --git a/common/src/main/scala/org/apache/comet/CometConf.scala 
b/common/src/main/scala/org/apache/comet/CometConf.scala
index b245953..ca4bf47 100644
--- a/common/src/main/scala/org/apache/comet/CometConf.scala
+++ b/common/src/main/scala/org/apache/comet/CometConf.scala
@@ -19,8 +19,12 @@
 
 package org.apache.comet
 
+import java.io.{BufferedOutputStream, FileOutputStream}
 import java.util.concurrent.TimeUnit
 
+import scala.collection.mutable.ListBuffer
+import scala.io.Source
+
 import org.apache.spark.network.util.ByteUnit
 import org.apache.spark.network.util.JavaUtils
 import org.apache.spark.sql.comet.util.Utils
@@ -39,6 +43,14 @@ import org.apache.spark.sql.internal.SQLConf
  * can also explicitly pass a [[SQLConf]] object to the `get` method.
  */
 object CometConf {
+
+  /** List of all configs that is used for generating documentation */
+  val allConfs = new ListBuffer[ConfigEntry[_]]
+
+  def register(conf: ConfigEntryWithDefault[_]): Unit = {
+    allConfs.append(conf)
+  }
+
   def conf(key: String): ConfigBuilder = ConfigBuilder(key)
 
   val COMET_EXEC_CONFIG_PREFIX = "spark.comet.exec";
@@ -341,10 +353,9 @@ object CometConf {
   val COMET_ROW_TO_COLUMNAR_ENABLED: ConfigEntry[Boolean] =
     conf("spark.comet.rowToColumnar.enabled")
       .internal()
-      .doc("""
-         |Whether to enable row to columnar conversion in Comet. When this is 
turned on, Comet will
-         |convert row-based operators in 
`spark.comet.rowToColumnar.supportedOperatorList` into
-         |columnar based before processing.""".stripMargin)
+      .doc("Whether to enable row to columnar conversion in Comet. When this 
is turned on, " +
+        "Comet will convert row-based operators in " +
+        "`spark.comet.rowToColumnar.supportedOperatorList` into columnar based 
before processing.")
       .booleanConf
       .createWithDefault(false)
 
@@ -475,7 +486,7 @@ private class TypedConfigBuilder[T](
   /** Creates a [[ConfigEntry]] that has a default value. */
   def createWithDefault(default: T): ConfigEntry[T] = {
     val transformedDefault = converter(stringConverter(default))
-    new ConfigEntryWithDefault[T](
+    val conf = new ConfigEntryWithDefault[T](
       parent.key,
       transformedDefault,
       converter,
@@ -483,6 +494,8 @@ private class TypedConfigBuilder[T](
       parent._doc,
       parent._public,
       parent._version)
+    CometConf.register(conf)
+    conf
   }
 }
 
@@ -612,3 +625,36 @@ private[comet] case class ConfigBuilder(key: String) {
 private object ConfigEntry {
   val UNDEFINED = "<undefined>"
 }
+
+/**
+ * Utility for generating markdown documentation from the configs.
+ *
+ * This is invoked when running `mvn clean package -DskipTests`.
+ */
+object CometConfGenerateDocs {
+  def main(args: Array[String]): Unit = {
+    if (args.length != 2) {
+      // scalastyle:off println
+      println("Missing arguments for template file and output file")
+      // scalastyle:on println
+      sys.exit(-1)
+    }
+    val templateFilename = args.head
+    val outputFilename = args(1)
+    val w = new BufferedOutputStream(new FileOutputStream(outputFilename))
+    for (line <- Source.fromFile(templateFilename).getLines()) {
+      if (line.trim == "<!--CONFIG_TABLE-->") {
+        val publicConfigs = CometConf.allConfs.filter(_.isPublic)
+        val confs = publicConfigs.sortBy(_.key)
+        w.write("| Config | Description | Default Value |\n".getBytes)
+        w.write("|--------|-------------|---------------|\n".getBytes)
+        for (conf <- confs) {
+          w.write(s"| ${conf.key} | ${conf.doc.trim} | 
${conf.defaultValueString} |\n".getBytes)
+        }
+      } else {
+        w.write(s"${line.trim}\n".getBytes)
+      }
+    }
+    w.close()
+  }
+}
diff --git a/docs/source/index.rst b/docs/source/index.rst
index a19f642..dfd19e5 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -45,6 +45,7 @@ as a native runtime to achieve improvement in terms of query 
efficiency and quer
    Supported Expressions <user-guide/expressions>
    Supported Operators <user-guide/operators>
    Supported Data Types <user-guide/datatypes>
+   Configuration Settings <user-guide/configs>
    Compatibility Guide <user-guide/compatibility>
 
 .. _toc.links:
diff --git a/docs/source/user-guide/configs-template.md 
b/docs/source/user-guide/configs-template.md
new file mode 100644
index 0000000..f5c15b6
--- /dev/null
+++ b/docs/source/user-guide/configs-template.md
@@ -0,0 +1,24 @@
+<!---
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing,
+  software distributed under the License is distributed on an
+  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  KIND, either express or implied.  See the License for the
+  specific language governing permissions and limitations
+  under the License.
+-->
+
+# Comet Configuration Settings
+
+Comet provides the following configuration settings.
+
+<!--CONFIG_TABLE-->
diff --git a/docs/source/user-guide/configs.md 
b/docs/source/user-guide/configs.md
new file mode 100644
index 0000000..3a16cd4
--- /dev/null
+++ b/docs/source/user-guide/configs.md
@@ -0,0 +1,52 @@
+<!---
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+
+# Comet Configuration Settings
+
+Comet provides the following configuration settings.
+
+| Config | Description | Default Value |
+|--------|-------------|---------------|
+| spark.comet.ansi.enabled | Comet does not respect ANSI mode in most cases 
and by default will not accelerate queries when ansi mode is enabled. Enable 
this setting to test Comet's experimental support for ANSI mode. This should 
not be used in production. | false |
+| spark.comet.batchSize | The columnar batch size, i.e., the maximum number of 
rows that a batch can contain. | 8192 |
+| spark.comet.cast.stringToTimestamp | Comet is not currently fully compatible 
with Spark when casting from String to Timestamp. | false |
+| spark.comet.columnar.shuffle.async.enabled | Whether to enable asynchronous 
shuffle for Arrow-based shuffle. By default, this config is false. | false |
+| spark.comet.columnar.shuffle.async.max.thread.num | Maximum number of 
threads on an executor used for Comet async columnar shuffle. By default, this 
config is 100. This is the upper bound of total number of shuffle threads per 
executor. In other words, if the number of cores * the number of shuffle 
threads per task `spark.comet.columnar.shuffle.async.thread.num` is larger than 
this config. Comet will use this config as the number of shuffle threads per 
executor instead. | 100 |
+| spark.comet.columnar.shuffle.async.thread.num | Number of threads used for 
Comet async columnar shuffle per shuffle task. By default, this config is 3. 
Note that more threads means more memory requirement to buffer shuffle data 
before flushing to disk. Also, more threads may not always improve performance, 
and should be set based on the number of cores available. | 3 |
+| spark.comet.columnar.shuffle.enabled | Force Comet to only use columnar 
shuffle for CometScan and Spark regular operators. If this is enabled, Comet 
native shuffle will not be enabled but only Arrow shuffle. By default, this 
config is false. | false |
+| spark.comet.columnar.shuffle.memory.factor | Fraction of Comet memory to be 
allocated per executor process for Comet shuffle. Comet memory size is 
specified by `spark.comet.memoryOverhead` or calculated by 
`spark.comet.memory.overhead.factor` * `spark.executor.memory`. By default, 
this config is 1.0. | 1.0 |
+| spark.comet.debug.enabled | Whether to enable debug mode for Comet. By 
default, this config is false. When enabled, Comet will do additional checks 
for debugging purpose. For example, validating array when importing arrays from 
JVM at native side. Note that these checks may be expensive in performance and 
should only be enabled for debugging purpose. | false |
+| spark.comet.enabled | Whether to enable Comet extension for Spark. When this 
is turned on, Spark will use Comet to read Parquet data source. Note that to 
enable native vectorized execution, both this config and 
'spark.comet.exec.enabled' need to be enabled. By default, this config is the 
value of the env var `ENABLE_COMET` if set, or true otherwise. | true |
+| spark.comet.exceptionOnDatetimeRebase | Whether to throw exception when 
seeing dates/timestamps from the legacy hybrid (Julian + Gregorian) calendar. 
Since Spark 3, dates/timestamps were written according to the Proleptic 
Gregorian calendar. When this is true, Comet will throw exceptions when seeing 
these dates/timestamps that were written by Spark version before 3.0. If this 
is false, these dates/timestamps will be read as if they were written to the 
Proleptic Gregorian calendar and w [...]
+| spark.comet.exec.all.enabled | Whether to enable all Comet operators. By 
default, this config is false. Note that this config precedes all separate 
config 'spark.comet.exec.<operator_name>.enabled'. That being said, if this 
config is enabled, separate configs are ignored. | false |
+| spark.comet.exec.all.expr.enabled | Whether to enable all Comet exprs. By 
default, this config is false. Note that this config precedes all separate 
config 'spark.comet.exec.<expr_name>.enabled'. That being said, if this config 
is enabled, separate configs are ignored. | false |
+| spark.comet.exec.broadcast.enabled | Whether to force enabling broadcasting 
for Comet native operators. By default, this config is false. Comet broadcast 
feature will be enabled automatically by Comet extension. But for unit tests, 
we need this feature to force enabling it for invalid cases. So this config is 
only used for unit test. | false |
+| spark.comet.exec.enabled | Whether to enable Comet native vectorized 
execution for Spark. This controls whether Spark should convert operators into 
their Comet counterparts and execute them in native space. Note: each operator 
is associated with a separate config in the format of 
'spark.comet.exec.<operator_name>.enabled' at the moment, and both the config 
and this need to be turned on, in order for the operator to be executed in 
native. By default, this config is false. | false |
+| spark.comet.exec.memoryFraction | The fraction of memory from Comet memory 
overhead that the native memory manager can use for execution. The purpose of 
this config is to set aside memory for untracked data structures, as well as 
imprecise size estimation during memory acquisition. Default value is 0.7. | 
0.7 |
+| spark.comet.exec.shuffle.codec | The codec of Comet native shuffle used to 
compress shuffle data. Only zstd is supported. | zstd |
+| spark.comet.exec.shuffle.enabled | Whether to enable Comet native shuffle. 
By default, this config is false. Note that this requires setting 
'spark.shuffle.manager' to 
'org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager'. 
'spark.shuffle.manager' must be set before starting the Spark application and 
cannot be changed during the application. | false |
+| spark.comet.memory.overhead.factor | Fraction of executor memory to be 
allocated as additional non-heap memory per executor process for Comet. Default 
value is 0.2. | 0.2 |
+| spark.comet.memory.overhead.min | Minimum amount of additional memory to be 
allocated per executor process for Comet, in MiB. | 402653184b |
+| spark.comet.nativeLoadRequired | Whether to require Comet native library to 
load successfully when Comet is enabled. If not, Comet will silently fallback 
to Spark when it fails to load the native lib. Otherwise, an error will be 
thrown and the Spark job will be aborted. | false |
+| spark.comet.parquet.enable.directBuffer | Whether to use Java direct byte 
buffer when reading Parquet. By default, this is false | false |
+| spark.comet.rowToColumnar.supportedOperatorList | A comma-separated list of 
row-based operators that will be converted to columnar format when 
'spark.comet.rowToColumnar.enabled' is true | Range,InMemoryTableScan |
+| spark.comet.scan.enabled | Whether to enable Comet scan. When this is turned 
on, Spark will use Comet to read Parquet data source. Note that to enable 
native vectorized execution, both this config and 'spark.comet.exec.enabled' 
need to be enabled. By default, this config is true. | true |
+| spark.comet.scan.preFetch.enabled | Whether to enable pre-fetching feature 
of CometScan. By default is disabled. | false |
+| spark.comet.scan.preFetch.threadNum | The number of threads running 
pre-fetching for CometScan. Effective if spark.comet.scan.preFetch.enabled is 
enabled. By default it is 2. Note that more pre-fetching threads means more 
memory requirement to store pre-fetched row groups. | 2 |
+| spark.comet.shuffle.preferDictionary.ratio | The ratio of total values to 
distinct values in a string column to decide whether to prefer dictionary 
encoding when shuffling the column. If the ratio is higher than this config, 
dictionary encoding will be used on shuffling string column. This config is 
effective if it is higher than 1.0. By default, this config is 10.0. Note that 
this config is only used when 'spark.comet.columnar.shuffle.enabled' is true. | 
10.0 |
diff --git a/spark/pom.xml b/spark/pom.xml
index 66ff829..9392b7f 100644
--- a/spark/pom.xml
+++ b/spark/pom.xml
@@ -264,6 +264,28 @@ under the License.
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>exec-maven-plugin</artifactId>
+        <version>3.2.0</version>
+        <executions>
+          <execution>
+            <id>generate-config-docs</id>
+            <phase>package</phase>
+            <goals>
+              <goal>java</goal>
+            </goals>
+            <configuration>
+              <mainClass>org.apache.comet.CometConfGenerateDocs</mainClass>
+              <arguments>
+                <argument>docs/source/user-guide/configs-template.md</argument>
+                <argument>docs/source/user-guide/configs.md</argument>
+              </arguments>
+              <classpathScope>compile</classpathScope>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to