This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b01b88ad68a2 [SPARK-46629] Fix for STRUCT type DDL not picking up 
nullability and comment
b01b88ad68a2 is described below

commit b01b88ad68a264396001d99c63bf58fb56d2be68
Author: Vitalii Li <vitalii...@databricks.com>
AuthorDate: Thu Jan 18 17:28:45 2024 +0300

    [SPARK-46629] Fix for STRUCT type DDL not picking up nullability and comment
    
    ### What changes were proposed in this pull request?
    
    This change adds logic to generate correct DDL for nested fields in STRUCT. 
In particular instead of generating list of fields with data type names it will 
add `NOT NULL` qualifier when necessary and field comment when present.
    
    For a table:
    ```
    CREATE TABLE t(field STRUCT<one: STRING NOT NULL, two: DOUBLE NOT NULL 
COMMENT 'comment'>);
    SHOW CREATE TABLE t;
    ```
    
    Before:
    ```
    CREATE TABLE t(field STRUCT<one: STRING, two: DOUBLE>)
    ```
    
    After
    ```
    CREATE TABLE t(field STRUCT<one: STRING NOT NULL, two: DOUBLE NOT NULL 
COMMENT 'comment'>)
    ```
    
    Closes #41016
    
    ### Why are the changes needed?
    
    Generate correct DDL.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, we do not document behavior of this command for struct case.
    
    ### How was this patch tested?
    
    New unit test.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #44644 from vitaliili-db/SPARK-46629.
    
    Authored-by: Vitalii Li <vitalii...@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../spark/sql/protobuf/ProtobufSerdeSuite.scala     |  4 +++-
 .../org/apache/spark/sql/types/StructField.scala    |  9 ++++++---
 .../expressions/ConditionalExpressionSuite.scala    |  5 +++--
 .../apache/spark/sql/types/StructTypeSuite.scala    | 16 ++++++++++++++++
 .../analyzer-results/ansi/literals.sql.out          |  2 +-
 .../sql-tests/analyzer-results/literals.sql.out     |  2 +-
 .../typeCoercion/native/mapconcat.sql.out           |  4 ++--
 .../analyzer-results/xml-functions.sql.out          |  2 +-
 .../sql-tests/results/ansi/literals.sql.out         |  2 +-
 .../resources/sql-tests/results/literals.sql.out    |  2 +-
 .../results/typeCoercion/native/mapconcat.sql.out   |  4 ++--
 .../sql-tests/results/xml-functions.sql.out         |  2 +-
 .../apache/spark/sql/DataFrameFunctionsSuite.scala  |  2 +-
 .../spark/sql/DataFrameSetOperationsSuite.scala     |  3 ++-
 .../apache/spark/sql/FileBasedDataSourceSuite.scala |  4 ++--
 .../apache/spark/sql/GeneratorFunctionSuite.scala   |  4 ++--
 .../scala/org/apache/spark/sql/SQLQuerySuite.scala  |  4 ++--
 .../sql/errors/QueryCompilationErrorsSuite.scala    |  6 +++---
 .../execution/command/v2/ShowCreateTableSuite.scala | 21 +++++++++++++++++++++
 .../org/apache/spark/sql/sources/InsertSuite.scala  |  2 +-
 20 files changed, 72 insertions(+), 28 deletions(-)

diff --git 
a/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala
 
b/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala
index 56a980d05fbd..03285c73f1ff 100644
--- 
a/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala
+++ 
b/connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala
@@ -127,9 +127,11 @@ class ProtobufSerdeSuite extends SharedSparkSession with 
ProtobufTestBase {
         "protobufType" -> "FieldMissingInProto",
         "toType" -> toSQLType(CATALYST_STRUCT)))
 
-    assertFailedConversionMessage(protoFile,
+    assertFailedConversionMessage(
+      protoFile,
       Serializer,
       BY_NAME,
+      nonnullCatalyst,
       errorClass = "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE",
       params = Map(
         "protobufType" -> "FieldMissingInProto",
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/types/StructField.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/types/StructField.scala
index ca15d23b601e..66f9557db213 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/types/StructField.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/types/StructField.scala
@@ -148,11 +148,14 @@ case class StructField(
     .map(" COMMENT '" + _ + "'")
     .getOrElse("")
 
+  private lazy val nullDDL = if (nullable) "" else " NOT NULL"
+
   /**
    * Returns a string containing a schema in SQL format. For example the 
following value:
    * `StructField("eventId", IntegerType)` will be converted to `eventId`: INT.
    */
-  private[sql] def sql = s"${QuotingUtils.quoteIfNeeded(name)}: 
${dataType.sql}$getDDLComment"
+  private[sql] def sql =
+    s"${QuotingUtils.quoteIfNeeded(name)}: 
${dataType.sql}$nullDDL$getDDLComment"
 
   /**
    * Returns a string containing a schema in DDL format. For example, the 
following value:
@@ -161,7 +164,7 @@ case class StructField(
    * @since 2.4.0
    */
   def toDDL: String = {
-    val nullString = if (nullable) "" else " NOT NULL"
-    s"${QuotingUtils.quoteIfNeeded(name)} 
${dataType.sql}${nullString}$getDDLDefault$getDDLComment"
+    s"${QuotingUtils.quoteIfNeeded(name)} ${dataType.sql}$nullDDL" +
+      s"$getDDLDefault$getDDLComment"
   }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ConditionalExpressionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ConditionalExpressionSuite.scala
index e7f7c370dcb9..7ffb32121702 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ConditionalExpressionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ConditionalExpressionSuite.scala
@@ -242,7 +242,7 @@ class ConditionalExpressionSuite extends SparkFunSuite with 
ExpressionEvalHelper
       errorSubClass = "DATA_DIFF_TYPES",
       messageParameters = Map(
         "functionName" -> "`casewhen`",
-        "dataType" -> "[\"STRUCT<x: INT>\", \"STRUCT<y: INT>\"]")))
+        "dataType" -> "[\"STRUCT<x: INT NOT NULL>\", \"STRUCT<y: INT NOT 
NULL>\"]")))
 
     val checkResult2 = CaseWhen(Seq((Literal.FalseLiteral, caseVal1),
       (Literal.FalseLiteral, caseVal2)), Some(elseVal)).checkInputDataTypes()
@@ -250,7 +250,8 @@ class ConditionalExpressionSuite extends SparkFunSuite with 
ExpressionEvalHelper
       errorSubClass = "DATA_DIFF_TYPES",
       messageParameters = Map(
         "functionName" -> "`casewhen`",
-        "dataType" -> "[\"STRUCT<x: INT>\", \"STRUCT<y: INT>\", \"STRUCT<z: 
INT>\"]")))
+        "dataType" -> ("[\"STRUCT<x: INT NOT NULL>\", " +
+          "\"STRUCT<y: INT NOT NULL>\", \"STRUCT<z: INT NOT NULL>\"]"))))
   }
 
   test("SPARK-27917 test semantic equals of CaseWhen") {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala
index 7c28fd8e2a03..1aa0d515d8f9 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala
@@ -584,4 +584,20 @@ class StructTypeSuite extends SparkFunSuite with SQLHelper 
{
       ResolveDefaultColumns.existenceDefaultValues(source4)
     }.getMessage.contains(error))
   }
+
+  test("SPARK-46629: Test STRUCT DDL with NOT NULL round trip") {
+    val struct = StructType(
+      Seq(
+        StructField(
+          "b",
+          StructType(
+            Seq(StructField("c", StringType, nullable = 
false).withComment("struct comment"))),
+          nullable = false),
+        StructField("b", StringType, nullable = false),
+        StructField("c", StringType).withComment("nullable comment")))
+    assert(
+      struct.toDDL == "b STRUCT<c: STRING NOT NULL COMMENT 'struct comment'> 
NOT NULL," +
+        "b STRING NOT NULL,c STRING COMMENT 'nullable comment'")
+    assert(fromDDL(struct.toDDL) === struct)
+  }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/literals.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/literals.sql.out
index 48368ca11723..83d0ff3f2edf 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/literals.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/literals.sql.out
@@ -590,7 +590,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
   "sqlState" : "42K09",
   "messageParameters" : {
     "inputSql" : "\"named_struct(a, 1, b, spark)\"",
-    "inputType" : "\"STRUCT<a: INT, b: STRING>\"",
+    "inputType" : "\"STRUCT<a: INT NOT NULL, b: STRING NOT NULL>\"",
     "paramIndex" : "1",
     "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL 
YEAR TO MONTH\" or \"INTERVAL\")",
     "sqlExpr" : "\"(+ named_struct(a, 1, b, spark))\""
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/literals.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/literals.sql.out
index 48368ca11723..83d0ff3f2edf 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/literals.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/literals.sql.out
@@ -590,7 +590,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
   "sqlState" : "42K09",
   "messageParameters" : {
     "inputSql" : "\"named_struct(a, 1, b, spark)\"",
-    "inputType" : "\"STRUCT<a: INT, b: STRING>\"",
+    "inputType" : "\"STRUCT<a: INT NOT NULL, b: STRING NOT NULL>\"",
     "paramIndex" : "1",
     "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL 
YEAR TO MONTH\" or \"INTERVAL\")",
     "sqlExpr" : "\"(+ named_struct(a, 1, b, spark))\""
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/mapconcat.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/mapconcat.sql.out
index 753f79f3f712..f4c932fa29f9 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/mapconcat.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/mapconcat.sql.out
@@ -184,7 +184,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
   "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES",
   "sqlState" : "42K09",
   "messageParameters" : {
-    "dataType" : "(\"MAP<INT, INT>\" or \"MAP<STRUCT<col1: STRING, col2: INT>, 
STRUCT<col1: STRING, col2: INT>>\")",
+    "dataType" : "(\"MAP<INT, INT>\" or \"MAP<STRUCT<col1: STRING NOT NULL, 
col2: INT NOT NULL>, STRUCT<col1: STRING NOT NULL, col2: INT NOT NULL>>\")",
     "functionName" : "`map_concat`",
     "sqlExpr" : "\"map_concat(int_map1, struct_map2)\""
   },
@@ -208,7 +208,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
   "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES",
   "sqlState" : "42K09",
   "messageParameters" : {
-    "dataType" : "(\"MAP<STRUCT<col1: STRING, col2: INT>, STRUCT<col1: STRING, 
col2: INT>>\" or \"MAP<ARRAY<STRING>, ARRAY<STRING>>\")",
+    "dataType" : "(\"MAP<STRUCT<col1: STRING NOT NULL, col2: INT NOT NULL>, 
STRUCT<col1: STRING NOT NULL, col2: INT NOT NULL>>\" or \"MAP<ARRAY<STRING>, 
ARRAY<STRING>>\")",
     "functionName" : "`map_concat`",
     "sqlExpr" : "\"map_concat(struct_map1, array_map2)\""
   },
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/xml-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/xml-functions.sql.out
index 51cf3d976f68..3fbc8d19b94a 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/xml-functions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/xml-functions.sql.out
@@ -22,7 +22,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
   "sqlState" : "42K09",
   "messageParameters" : {
     "inputSql" : "\"array(named_struct(a, 1, b, 2))\"",
-    "inputType" : "\"ARRAY<STRUCT<a: INT, b: INT>>\"",
+    "inputType" : "\"ARRAY<STRUCT<a: INT NOT NULL, b: INT NOT NULL>>\"",
     "paramIndex" : "1",
     "requiredType" : "\"STRUCT\"",
     "sqlExpr" : "\"to_xml(array(named_struct(a, 1, b, 2)))\""
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out
index 3006d30d0a05..6e2c8a65206e 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out
@@ -660,7 +660,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
   "sqlState" : "42K09",
   "messageParameters" : {
     "inputSql" : "\"named_struct(a, 1, b, spark)\"",
-    "inputType" : "\"STRUCT<a: INT, b: STRING>\"",
+    "inputType" : "\"STRUCT<a: INT NOT NULL, b: STRING NOT NULL>\"",
     "paramIndex" : "1",
     "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL 
YEAR TO MONTH\" or \"INTERVAL\")",
     "sqlExpr" : "\"(+ named_struct(a, 1, b, spark))\""
diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out 
b/sql/core/src/test/resources/sql-tests/results/literals.sql.out
index 3006d30d0a05..6e2c8a65206e 100644
--- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out
@@ -660,7 +660,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
   "sqlState" : "42K09",
   "messageParameters" : {
     "inputSql" : "\"named_struct(a, 1, b, spark)\"",
-    "inputType" : "\"STRUCT<a: INT, b: STRING>\"",
+    "inputType" : "\"STRUCT<a: INT NOT NULL, b: STRING NOT NULL>\"",
     "paramIndex" : "1",
     "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL 
YEAR TO MONTH\" or \"INTERVAL\")",
     "sqlExpr" : "\"(+ named_struct(a, 1, b, spark))\""
diff --git 
a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out
index 5907f700877f..8fd398ff87f0 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out
@@ -147,7 +147,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
   "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES",
   "sqlState" : "42K09",
   "messageParameters" : {
-    "dataType" : "(\"MAP<INT, INT>\" or \"MAP<STRUCT<col1: STRING, col2: INT>, 
STRUCT<col1: STRING, col2: INT>>\")",
+    "dataType" : "(\"MAP<INT, INT>\" or \"MAP<STRUCT<col1: STRING NOT NULL, 
col2: INT NOT NULL>, STRUCT<col1: STRING NOT NULL, col2: INT NOT NULL>>\")",
     "functionName" : "`map_concat`",
     "sqlExpr" : "\"map_concat(int_map1, struct_map2)\""
   },
@@ -173,7 +173,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
   "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES",
   "sqlState" : "42K09",
   "messageParameters" : {
-    "dataType" : "(\"MAP<STRUCT<col1: STRING, col2: INT>, STRUCT<col1: STRING, 
col2: INT>>\" or \"MAP<ARRAY<STRING>, ARRAY<STRING>>\")",
+    "dataType" : "(\"MAP<STRUCT<col1: STRING NOT NULL, col2: INT NOT NULL>, 
STRUCT<col1: STRING NOT NULL, col2: INT NOT NULL>>\" or \"MAP<ARRAY<STRING>, 
ARRAY<STRING>>\")",
     "functionName" : "`map_concat`",
     "sqlExpr" : "\"map_concat(struct_map1, array_map2)\""
   },
diff --git 
a/sql/core/src/test/resources/sql-tests/results/xml-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/xml-functions.sql.out
index 704addb7a930..0194a676ad3b 100644
--- a/sql/core/src/test/resources/sql-tests/results/xml-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/xml-functions.sql.out
@@ -26,7 +26,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException
   "sqlState" : "42K09",
   "messageParameters" : {
     "inputSql" : "\"array(named_struct(a, 1, b, 2))\"",
-    "inputType" : "\"ARRAY<STRUCT<a: INT, b: INT>>\"",
+    "inputType" : "\"ARRAY<STRUCT<a: INT NOT NULL, b: INT NOT NULL>>\"",
     "paramIndex" : "1",
     "requiredType" : "\"STRUCT\"",
     "sqlExpr" : "\"to_xml(array(named_struct(a, 1, b, 2)))\""
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
index bb024b79598e..ad78916bd94c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
@@ -1988,7 +1988,7 @@ class DataFrameFunctionsSuite extends QueryTest with 
SharedSparkSession {
         "sqlExpr" -> "\"reverse(struct(1, a))\"",
         "paramIndex" -> "1",
         "inputSql" -> "\"struct(1, a)\"",
-        "inputType" -> "\"STRUCT<col1: INT, col2: STRING>\"",
+        "inputType" -> "\"STRUCT<col1: INT NOT NULL, col2: STRING NOT NULL>\"",
         "requiredType" -> "(\"STRING\" or \"ARRAY\")"
       ),
       queryContext = Array(ExpectedContext("", "", 7, 29, "reverse(struct(1, 
'a'))"))
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSetOperationsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSetOperationsSuite.scala
index bbb1561bb695..d6cf77572731 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSetOperationsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSetOperationsSuite.scala
@@ -1039,7 +1039,8 @@ class DataFrameSetOperationsSuite extends QueryTest
       parameters = Map(
         "tableOrdinalNumber" -> "second",
         "columnOrdinalNumber" -> "third",
-        "dataType2" -> "\"STRUCT<c1: INT, c2: INT, c3: STRUCT<c3: INT>>\"",
+        "dataType2" ->
+          "\"STRUCT<c1: INT NOT NULL, c2: INT NOT NULL, c3: STRUCT<c3: INT NOT 
NULL>>\"",
         "operator" -> "UNION",
         "hint" -> "",
         "dataType1" -> "\"STRUCT<c1: INT, c2: INT, c3: STRUCT<c3: INT, c5: 
INT>>\"")
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/FileBasedDataSourceSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/FileBasedDataSourceSuite.scala
index fc68f27c67ad..03b8ca32f561 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/FileBasedDataSourceSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/FileBasedDataSourceSuite.scala
@@ -322,7 +322,7 @@ class FileBasedDataSourceSuite extends QueryTest
         errorClass = "UNSUPPORTED_DATA_TYPE_FOR_DATASOURCE",
         parameters = Map(
           "columnName" -> "`struct(a)`",
-          "columnType" -> "\"STRUCT<a: INT>\"",
+          "columnType" -> "\"STRUCT<a: INT NOT NULL>\"",
           "format" -> "Text")
       )
 
@@ -404,7 +404,7 @@ class FileBasedDataSourceSuite extends QueryTest
         errorClass = "UNSUPPORTED_DATA_TYPE_FOR_DATASOURCE",
         parameters = Map(
           "columnName" -> "`struct(a, b)`",
-          "columnType" -> "\"STRUCT<a: INT, b: STRING>\"",
+          "columnType" -> "\"STRUCT<a: INT NOT NULL, b: STRING>\"",
           "format" -> "CSV")
       )
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala
index 2ab651237206..25287cce368a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala
@@ -334,7 +334,7 @@ class GeneratorFunctionSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "sqlExpr" -> "\"array(struct(a), struct(b))\"",
         "functionName" -> "`array`",
-        "dataType" -> "(\"STRUCT<a: INT>\" or \"STRUCT<b: INT>\")"),
+        "dataType" -> "(\"STRUCT<a: INT NOT NULL>\" or \"STRUCT<b: INT NOT 
NULL>\")"),
       context = ExpectedContext(
         fragment = "array",
         callSitePattern = getCurrentClassCallSitePattern))
@@ -352,7 +352,7 @@ class GeneratorFunctionSuite extends QueryTest with 
SharedSparkSession {
       parameters = Map(
         "sqlExpr" -> "\"array(struct(a), struct(2))\"",
         "functionName" -> "`array`",
-        "dataType" -> "(\"STRUCT<a: INT>\" or \"STRUCT<col1: INT>\")"),
+        "dataType" -> "(\"STRUCT<a: INT NOT NULL>\" or \"STRUCT<col1: INT NOT 
NULL>\")"),
       context = ExpectedContext(
         fragment = "array",
         callSitePattern = getCurrentClassCallSitePattern))
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index b603c95fb30d..414a4d7197f9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -2731,10 +2731,10 @@ class SQLQuerySuite extends QueryTest with 
SharedSparkSession with AdaptiveSpark
         parameters = Map(
           "tableOrdinalNumber" -> "second",
           "columnOrdinalNumber" -> "first",
-          "dataType2" -> "\"STRUCT<a: INT>\"",
+          "dataType2" -> "\"STRUCT<a: INT NOT NULL>\"",
           "operator" -> "EXCEPT",
           "hint" -> "",
-          "dataType1" -> "\"STRUCT<A: INT>\""),
+          "dataType1" -> "\"STRUCT<A: INT NOT NULL>\""),
         context = ExpectedContext(
           fragment = "SELECT struct(1 a) EXCEPT (SELECT struct(2 A))",
           start = 0,
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
index 0ed58626b099..3c397bd6a4e1 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
@@ -403,7 +403,7 @@ class QueryCompilationErrorsSuite
         spark.read.schema(schema).json(spark.emptyDataset[String])
       },
       errorClass = "INVALID_JSON_SCHEMA_MAP_TYPE",
-      parameters = Map("jsonSchema" -> "\"STRUCT<map: MAP<INT, INT>>\"")
+      parameters = Map("jsonSchema" -> "\"STRUCT<map: MAP<INT, INT> NOT 
NULL>\"")
     )
   }
 
@@ -622,7 +622,7 @@ class QueryCompilationErrorsSuite
       exception = e1,
       errorClass = "UNSUPPORTED_DESERIALIZER.FIELD_NUMBER_MISMATCH",
       parameters = Map(
-        "schema" -> "\"STRUCT<a: STRING, b: INT>\"",
+        "schema" -> "\"STRUCT<a: STRING, b: INT NOT NULL>\"",
         "ordinal" -> "3"))
 
     val e2 = intercept[AnalysisException] {
@@ -631,7 +631,7 @@ class QueryCompilationErrorsSuite
     checkError(
       exception = e2,
       errorClass = "UNSUPPORTED_DESERIALIZER.FIELD_NUMBER_MISMATCH",
-      parameters = Map("schema" -> "\"STRUCT<a: STRING, b: INT>\"",
+      parameters = Map("schema" -> "\"STRUCT<a: STRING, b: INT NOT NULL>\"",
         "ordinal" -> "1"))
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala
index adda9dcfffe4..f72127cbd1de 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala
@@ -160,4 +160,25 @@ class ShowCreateTableSuite extends 
command.ShowCreateTableSuiteBase with Command
       )
     }
   }
+
+  test("SPARK-46629: show struct fields with NOT NULL and comment") {
+    withNamespaceAndTable(ns, table) { t =>
+      sql(s"""
+             |CREATE TABLE $t (
+             |  a struct<b: bigint COMMENT 'comment', c: struct<d: string NOT 
NULL, e: string>>
+             |)
+             |USING parquet
+             |COMMENT 'This is a comment'
+        """.stripMargin)
+      val showDDL = getShowCreateDDL(t)
+      assert(
+        showDDL === Array(
+          s"CREATE TABLE $fullName (",
+          "a STRUCT<b: BIGINT COMMENT 'comment', c: STRUCT<d: STRING NOT NULL, 
e: STRING>>)",
+          "USING parquet",
+          "COMMENT 'This is a comment'"
+        )
+      )
+    }
+  }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
index 76073a108a3c..be175eaeff8a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
@@ -2077,7 +2077,7 @@ class InsertSuite extends DataSourceTest with 
SharedSparkSession {
             "colName" -> "`s`",
             "expectedType" -> "\"STRUCT<x: BOOLEAN, y: STRING>\"",
             "defaultValue" -> "struct(42, 56)",
-            "actualType" -> "\"STRUCT<col1: INT, col2: INT>\""))
+            "actualType" -> "\"STRUCT<col1: INT NOT NULL, col2: INT NOT 
NULL>\""))
       }
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to