This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 26dbf651bf8c Revert "[SPARK-52709][SQL] Fix parsing of  STRUCT<>"
26dbf651bf8c is described below

commit 26dbf651bf8c2389aeea950816288e1db666c611
Author: Wenchen Fan <wenc...@databricks.com>
AuthorDate: Mon Aug 18 19:38:57 2025 +0800

    Revert "[SPARK-52709][SQL] Fix parsing of  STRUCT<>"
    
    This reverts commit 64cada1d25aaa466421b0f7f21de97967466684e.
---
 .../spark/sql/catalyst/parser/SqlBaseLexer.g4      |  2 +-
 .../spark/sql/catalyst/parser/SqlBaseParser.g4     |  2 +-
 .../sql/catalyst/parser/DataTypeAstBuilder.scala   |  2 +-
 .../spark/sql/execution/SparkSqlParserSuite.scala  | 75 +---------------------
 4 files changed, 5 insertions(+), 76 deletions(-)

diff --git 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
index fa0ad2865111..e402067926f2 100644
--- 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
+++ 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
@@ -518,7 +518,7 @@ ZONE: 'ZONE';
 
 EQ  : '=' | '==';
 NSEQ: '<=>';
-NEQ : '<>' {complex_type_level_counter == 0}?;
+NEQ : '<>';
 NEQJ: '!=';
 LT  : '<';
 LTE : '<=' | '!>';
diff --git 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
index c2bb192cf155..b77c90aa86ff 100644
--- 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
+++ 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
@@ -1379,7 +1379,7 @@ primitiveType
 dataType
     : complex=ARRAY (LT dataType GT)?                           
#complexDataType
     | complex=MAP (LT dataType COMMA dataType GT)?              
#complexDataType
-    | complex=STRUCT (LT complexColTypeList? GT)?               
#complexDataType
+    | complex=STRUCT ((LT complexColTypeList? GT) | NEQ)?       
#complexDataType
     | primitiveType                                             
#primitiveDataType
     ;
 
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala
 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala
index b2b1bced39ca..beb7061a841a 100644
--- 
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala
+++ 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala
@@ -180,7 +180,7 @@ class DataTypeAstBuilder extends 
SqlBaseParserBaseVisitor[AnyRef] {
    * Create a complex DataType. Arrays, Maps and Structures are supported.
    */
   override def visitComplexDataType(ctx: ComplexDataTypeContext): DataType = 
withOrigin(ctx) {
-    if (ctx.LT() == null) {
+    if (ctx.LT() == null && ctx.NEQ() == null) {
       throw QueryParsingErrors.nestedTypeMissingElementTypeError(ctx.getText, 
ctx)
     }
     ctx.complex.getType match {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
index e4019b0a723b..94e60db67ac7 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
@@ -23,7 +23,7 @@ import org.apache.spark.{SparkConf, SparkThrowable}
 import org.apache.spark.internal.config.ConfigEntry
 import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
 import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedAlias, 
UnresolvedAttribute, UnresolvedFunction, UnresolvedGenerator, UnresolvedHaving, 
UnresolvedRelation, UnresolvedStar}
-import org.apache.spark.sql.catalyst.expressions.{Ascending, 
AttributeReference, Cast, Concat, GreaterThan, Literal, NamedExpression, 
NullsFirst, ShiftRight, SortOrder, UnresolvedWindowExpression, 
UnspecifiedFrame, WindowSpecDefinition, WindowSpecReference}
+import org.apache.spark.sql.catalyst.expressions.{Ascending, 
AttributeReference, Concat, GreaterThan, Literal, NullsFirst, SortOrder, 
UnresolvedWindowExpression, UnspecifiedFrame, WindowSpecDefinition, 
WindowSpecReference}
 import org.apache.spark.sql.catalyst.parser.{AbstractParser, ParseException}
 import org.apache.spark.sql.catalyst.plans.logical._
 import org.apache.spark.sql.catalyst.trees.TreePattern._
@@ -32,7 +32,7 @@ import org.apache.spark.sql.execution.command._
 import org.apache.spark.sql.execution.datasources.{CreateTempViewUsing, 
RefreshResource}
 import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf}
 import org.apache.spark.sql.test.SharedSparkSession
-import org.apache.spark.sql.types.{DataType, IntegerType, NullType, StringType}
+import org.apache.spark.sql.types.StringType
 import org.apache.spark.util.ArrayImplicits._
 
 /**
@@ -1164,75 +1164,4 @@ class SparkSqlParserSuite extends AnalysisTest with 
SharedSparkSession {
       }
     }
   }
-
-  test("SPARK-52709: Parsing STRUCT (empty,nested,within complex types) 
followed by shiftRight") {
-
-    // Test valid complex data types, and their combinations.
-    val typeStringsToTest = Seq(
-      "STRUCT<>",                               // Empty struct
-      "STRUCT<a: STRUCT<b: INT>>",              // Nested struct
-      "STRUCT<c: ARRAY<INT>>",                  // Struct containing an array
-      "MAP<STRING, STRUCT<x: STRING, y: INT>>",  // Map containing a struct
-      "ARRAY<STRUCT<>>",                        // Array containing empty 
structs
-      "ARRAY<STRUCT<id: INT, name: STRING>>"    // Array containing non-empty 
structs
-    )
-
-    /**
-    * Helper function to generate a SQL CAST fragment and its corresponding
-    * expected expression for a given type string.
-    */
-    def createCastNullAsTypeExpression(typeString: String): (String, 
NamedExpression) = {
-      // Use the suite's 'parser' instance to parse the DataType
-      val dataType: DataType = parser.parseDataType(typeString)
-      val castExpr = Cast(Literal(null, NullType), dataType)
-      val expectedExpr = UnresolvedAlias(castExpr) // SparkSqlParserSuite 
expects UnresolvedAlias
-      val sqlFragment = s"CAST(null AS $typeString)"
-        (sqlFragment, expectedExpr)
-    }
-
-    // Generate the SQL fragments and their corresponding expected expressions 
for all CASTs
-    val castExpressionsData = 
typeStringsToTest.map(createCastNullAsTypeExpression)
-
-    // Extract just the SQL fragments for the SELECT statement
-    val selectClauses = castExpressionsData.map(_._1)
-
-    val sql =
-      s"""
-         |SELECT
-         |  ${selectClauses.mkString(",\n  ")},
-         |  4 >> 1
-      """.stripMargin
-
-    // Construct the list of ALL expected expressions for the Project node.
-    // This includes all the CAST expressions generated above, plus the 
ShiftRight expression.
-    val allExpectedExprs = castExpressionsData.map(_._2) :+
-      UnresolvedAlias(ShiftRight(Literal(4, IntegerType), Literal(1, 
IntegerType)))
-
-    // Define the expected logical plan
-    val expectedPlan = Project(
-      allExpectedExprs,
-      OneRowRelation()
-    )
-
-    assertEqual(sql, expectedPlan)
-  }
-
-  test("SPARK-52709-Invalid: Parsing should fail for empty ARRAY<> type") {
-    val sql = "SELECT CAST(null AS ARRAY<>)"
-    checkError(
-      exception = parseException(sql),
-      condition = "PARSE_SYNTAX_ERROR",
-      parameters = Map("error" -> "'<'", "hint" -> ": missing ')'")
-    )
-  }
-
-  test("SPARK-52709-Invalid: Parsing should fail for empty MAP<> type") {
-    val sql = "SELECT CAST(null AS MAP<>)"
-    checkError(
-      exception = parseException(sql),
-      condition = "PARSE_SYNTAX_ERROR",
-      parameters = Map("error" -> "'<'", "hint" -> ": missing ')'")
-    )
-  }
 }
-


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to