panbingkun commented on code in PR #46731:
URL: https://github.com/apache/spark/pull/46731#discussion_r1613020889


##########
sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala:
##########
@@ -1333,30 +1333,34 @@ class PlanResolutionSuite extends AnalysisTest {
             exception = intercept[AnalysisException] {
               parseAndResolve(sql3)
             },
-            errorClass = "_LEGACY_ERROR_TEMP_1331",
+            errorClass = "UNRESOLVED_COLUMN.WITH_SUGGESTION_AND_TABLE",
+            sqlState = "42703",
             parameters = Map(
-              "fieldName" -> "j",
-              "table" -> "spark_catalog.default.v1Table",
-              "schema" ->
-                """root
-                  | |-- i: integer (nullable = true)
-                  | |-- s: string (nullable = true)
-                  | |-- point: struct (nullable = true)
-                  | |    |-- x: integer (nullable = true)
-                  | |    |-- y: integer (nullable = true)
-                  |""".stripMargin),
+              "objectName" -> "`j`",
+              "tableName" -> "`spark_catalog`.`default`.`v1Table`",
+              "proposal" -> "`i`, `s`, `point`"
+              ),
             context = ExpectedContext(fragment = sql3, start = 0, stop = 55))
 
           val sql4 = s"ALTER TABLE $tblName ALTER COLUMN point.x TYPE bigint"
-          val e2 = intercept[AnalysisException] {
-            parseAndResolve(sql4)
-          }
           checkError(
-            exception = e2,
+            exception = intercept[AnalysisException] {
+              parseAndResolve(sql4)
+            },
             errorClass = "UNSUPPORTED_FEATURE.TABLE_OPERATION",
             sqlState = "0A000",
             parameters = Map("tableName" -> 
"`spark_catalog`.`default`.`v1Table`",
               "operation" -> "ALTER COLUMN with qualified column"))
+
+          val sql5 = s"ALTER TABLE $tblName ALTER COLUMN i SET NOT NULL"
+          checkError(
+            exception = intercept[AnalysisException] {
+              parseAndResolve(sql5)
+            },
+            errorClass = "UNSUPPORTED_FEATURE.TABLE_OPERATION",
+            sqlState = "0A000",
+            parameters = Map("tableName" -> 
"`spark_catalog`.`default`.`v1Table`",
+              "operation" -> "ALTER COLUMN specify NOT NULL"))

Review Comment:
   This looks weird
   what about `ALTER COLUMN ... SET NOT NULL`?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to