This is an automated email from the ASF dual-hosted git repository.

danny0405 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 968da86593f [HUDI-7962] Add show create table command (#11471)
968da86593f is described below

commit 968da86593f36b9a3855bb6c5aac804834d32ad4
Author: houyuting <yuting....@qq.com>
AuthorDate: Mon Jul 8 12:18:53 2024 +0800

    [HUDI-7962] Add show create table command (#11471)
    
    Co-authored-by: houyuting <houyut...@kuaishou.com>
---
 .../command/ShowHoodieCreateTableCommand.scala     | 94 ++++++++++++++++++++++
 .../spark/sql/hudi/analysis/HoodieAnalysis.scala   |  3 +
 2 files changed, 97 insertions(+)

diff --git 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/ShowHoodieCreateTableCommand.scala
 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/ShowHoodieCreateTableCommand.scala
new file mode 100644
index 00000000000..c806a914699
--- /dev/null
+++ 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/command/ShowHoodieCreateTableCommand.scala
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.spark.sql.hudi.command
+
+import org.apache.hudi.common.util.ConfigUtils
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
+import org.apache.spark.sql.catalyst.catalog.HoodieCatalogTable
+import org.apache.spark.sql.catalyst.util.escapeSingleQuotedString
+import org.apache.spark.sql.catalyst.expressions.{Attribute, 
AttributeReference}
+import org.apache.spark.sql.hudi.HoodieOptionConfig.{SQL_KEY_PRECOMBINE_FIELD, 
SQL_KEY_TABLE_PRIMARY_KEY, SQL_KEY_TABLE_TYPE, SQL_PAYLOAD_CLASS, 
SQL_RECORD_MERGER_STRATEGY}
+import org.apache.spark.sql.{Row, SparkSession}
+import org.apache.spark.sql.types.StringType
+
+case class ShowHoodieCreateTableCommand(table: TableIdentifier)
+  extends HoodieLeafRunnableCommand {
+
+  override val output: Seq[Attribute] = Seq(
+    AttributeReference("createtab_stmt", StringType, nullable = false)()
+  )
+
+  override def run(sparkSession: SparkSession): Seq[Row] = {
+    val dbName = table.database.getOrElse("default")
+    val tableName = table.table
+    val tableExists = sparkSession.sessionState.catalog.tableExists(table)
+    if (!tableExists) {
+      throw new NoSuchTableException(dbName, tableName)
+    }
+    val hoodieCatalogTable = HoodieCatalogTable(sparkSession, table)
+    val stmt = showCreateHoodieTable(hoodieCatalogTable)
+    Seq(Row(stmt))
+  }
+
+  private def showCreateHoodieTable(metadata: HoodieCatalogTable): String = {
+    val builder = StringBuilder.newBuilder
+    builder ++= s"CREATE TABLE IF NOT EXISTS ${table.quotedString}"
+    showHoodieTableHeader(metadata, builder)
+    showHoodieTableNonDataColumns(metadata, builder)
+    showHoodieTableProperties(metadata, builder)
+    builder.toString()
+  }
+
+  private def showHoodieTableHeader(metadata: HoodieCatalogTable, builder: 
StringBuilder): Unit = {
+    val columns = metadata.tableSchemaWithoutMetaFields.map(_.toDDL)
+    if (columns.nonEmpty) {
+      builder ++= columns.mkString(" (\n", ",\n ", ")\n")
+    }
+
+    metadata.table
+      .comment
+      .map("COMMENT '" + escapeSingleQuotedString(_) + "'\n")
+      .foreach(builder.append)
+  }
+
+
+  private def showHoodieTableNonDataColumns(metadata: HoodieCatalogTable, 
builder: StringBuilder): Unit = {
+    builder ++= s"USING ${metadata.table.provider.get}\n"
+    if (metadata.partitionFields.nonEmpty) {
+      val partCols = metadata.partitionFields
+      builder ++= partCols.mkString("PARTITIONED BY (", ", ", ")\n")
+    }
+  }
+
+  private def showHoodieTableProperties(metadata: HoodieCatalogTable, builder: 
StringBuilder): Unit = {
+    val standardOptions = Seq(SQL_KEY_TABLE_PRIMARY_KEY, 
SQL_KEY_PRECOMBINE_FIELD,
+      SQL_KEY_TABLE_TYPE, SQL_PAYLOAD_CLASS, 
SQL_RECORD_MERGER_STRATEGY).map(key => key.sqlKeyName)
+    val props = metadata.catalogProperties.filter(key => 
standardOptions.contains(key._1)).map {
+      case (key, value) => s"$key='${escapeSingleQuotedString(value)}'"
+    } ++ 
metadata.catalogProperties.filterNot(_._1.equals(ConfigUtils.IS_QUERY_AS_RO_TABLE)).map
 {
+      case (key, value) => 
s"'${escapeSingleQuotedString(key)}'='${escapeSingleQuotedString(value)}'"
+    }
+
+    if (props.nonEmpty) {
+      builder ++= props.mkString("TBLPROPERTIES (\n  ", ",\n  ", "\n)\n")
+    }
+  }
+}
diff --git 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/analysis/HoodieAnalysis.scala
 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/analysis/HoodieAnalysis.scala
index e6c7b8c1ab0..a0017883c48 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/analysis/HoodieAnalysis.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/analysis/HoodieAnalysis.scala
@@ -570,6 +570,9 @@ case class HoodiePostAnalysisRule(sparkSession: 
SparkSession) extends Rule[Logic
       case AlterTableAddColumnsCommand(tableId, colsToAdd)
         if sparkAdapter.isHoodieTable(tableId, sparkSession) =>
           AlterHoodieTableAddColumnsCommand(tableId, colsToAdd)
+      case s: ShowCreateTableCommand
+        if sparkAdapter.isHoodieTable(s.table, sparkSession) =>
+        ShowHoodieCreateTableCommand(s.table)
       // Rewrite the AlterTableRenameCommand to AlterHoodieTableRenameCommand
       case AlterTableRenameCommand(oldName, newName, isView)
         if !isView && sparkAdapter.isHoodieTable(oldName, sparkSession) =>

Reply via email to