This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 67abc430140 [SPARK-44143][SQL][TESTS] Use checkError() to check 
Exception in *DDL*Suite
67abc430140 is described below

commit 67abc430140558e60c785b158e9199dc884fb15c
Author: panbingkun <pbk1...@gmail.com>
AuthorDate: Mon Jun 26 09:28:02 2023 +0300

    [SPARK-44143][SQL][TESTS] Use checkError() to check Exception in *DDL*Suite
    
    ### What changes were proposed in this pull request?
    The pr aims to use `checkError()` to check `Exception` in `*DDL*Suite`, 
include:
    - sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite
    - sql/core/src/test/scala/org/apache/spark/sql/sources/DDLSourceLoadSuite
    - sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite
    - 
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/Hive_2_1_DDLSuite
    
    ### Why are the changes needed?
    Migration on checkError() will make the tests independent from the text of 
error messages.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    - Manually test.
    - Pass GA.
    
    Closes #41699 from panbingkun/DDLSuite.
    
    Authored-by: panbingkun <pbk1...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../spark/sql/execution/command/DDLSuite.scala     | 454 ++++++++----
 .../spark/sql/sources/DDLSourceLoadSuite.scala     |  30 +-
 .../spark/sql/hive/execution/HiveDDLSuite.scala    | 769 ++++++++++++++-------
 .../sql/hive/execution/Hive_2_1_DDLSuite.scala     |  17 +-
 4 files changed, 865 insertions(+), 405 deletions(-)

diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 21e6980db8f..dd126027b36 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -189,10 +189,13 @@ class InMemoryCatalogedDDLSuite extends DDLSuite with 
SharedSparkSession {
       sql("CREATE TABLE s(a INT, b INT) USING parquet")
       val source = catalog.getTableMetadata(TableIdentifier("s"))
       assert(source.provider == Some("parquet"))
-      val e = intercept[AnalysisException] {
-        sql("CREATE TABLE t LIKE s USING org.apache.spark.sql.hive.orc")
-      }.getMessage
-      assert(e.contains("Hive built-in ORC data source must be used with Hive 
support enabled"))
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("CREATE TABLE t LIKE s USING org.apache.spark.sql.hive.orc")
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_1138",
+        parameters = Map.empty
+      )
     }
   }
 
@@ -284,13 +287,6 @@ trait DDLSuiteBase extends SQLTestUtils {
     }
   }
 
-  protected def assertUnsupported(query: String): Unit = {
-    val e = intercept[AnalysisException] {
-      sql(query)
-    }
-    assert(e.getMessage.toLowerCase(Locale.ROOT).contains("operation not 
allowed"))
-  }
-
   protected def maybeWrapException[T](expectException: Boolean)(body: => T): 
Unit = {
     if (expectException) intercept[AnalysisException] { body } else body
   }
@@ -431,9 +427,11 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
            |$partitionClause
          """.stripMargin
       if (userSpecifiedSchema.isEmpty && userSpecifiedPartitionCols.nonEmpty) {
-        val e = intercept[AnalysisException](sql(sqlCreateTable)).getMessage
-        assert(e.contains(
-          "not allowed to specify partition columns when the table schema is 
not defined"))
+        checkError(
+          exception = intercept[AnalysisException](sql(sqlCreateTable)),
+          errorClass = null,
+          parameters = Map.empty
+        )
       } else {
         sql(sqlCreateTable)
         val tableMetadata = 
spark.sessionState.catalog.getTableMetadata(TableIdentifier(tabName))
@@ -615,17 +613,21 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
             .option("path", dir1.getCanonicalPath)
             .saveAsTable("path_test")
 
-          val ex = intercept[AnalysisException] {
-            Seq((3L, "c")).toDF("v1", "v2")
-              .write
-              .mode(SaveMode.Append)
-              .format("json")
-              .option("path", dir2.getCanonicalPath)
-              .saveAsTable("path_test")
-          }.getMessage
-          assert(ex.contains(
-            s"The location of the existing table 
`$SESSION_CATALOG_NAME`.`default`.`path_test`"))
-
+          checkErrorMatchPVals(
+            exception = intercept[AnalysisException] {
+              Seq((3L, "c")).toDF("v1", "v2")
+                .write
+                .mode(SaveMode.Append)
+                .format("json")
+                .option("path", dir2.getCanonicalPath)
+                .saveAsTable("path_test")
+            },
+            errorClass = "_LEGACY_ERROR_TEMP_1160",
+            parameters = Map(
+              "identifier" -> s"`$SESSION_CATALOG_NAME`.`default`.`path_test`",
+              "existingTableLoc" -> ".*",
+              "tableDescLoc" -> ".*")
+          )
           checkAnswer(
             spark.table("path_test"), Row(1L, "a") :: Nil)
         }
@@ -792,14 +794,16 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
           Row("1997", "Ford") :: Nil)
 
         // Fails if creating a new view with the same name
-        intercept[TempTableAlreadyExistsException] {
-          sql(
-            s"""
-               |CREATE TEMPORARY VIEW testview
-               |USING 
org.apache.spark.sql.execution.datasources.csv.CSVFileFormat
-               |OPTIONS (PATH '${tmpFile.toURI}')
-             """.stripMargin)
-        }
+        checkError(
+          exception = intercept[TempTableAlreadyExistsException] {
+            sql(
+              s"""
+                 |CREATE TEMPORARY VIEW testview
+                 |USING 
org.apache.spark.sql.execution.datasources.csv.CSVFileFormat
+                 |OPTIONS (PATH '${tmpFile.toURI}')
+               """.stripMargin)},
+          errorClass = "TEMP_TABLE_OR_VIEW_ALREADY_EXISTS",
+          parameters = Map("relationName" -> "`testview`"))
       }
     }
   }
@@ -817,12 +821,16 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
           |)
         """.stripMargin)
 
-      val e = intercept[AnalysisException] {
-        sql("ALTER TABLE tab1 RENAME TO default.tab2")
-      }
-      assert(e.getMessage.contains(
-        s"RENAME TEMPORARY VIEW from '`tab1`' to '`default`.`tab2`': " +
-          "cannot specify database name 'default' in the destination table"))
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("ALTER TABLE tab1 RENAME TO default.tab2")
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_1074",
+        parameters = Map(
+          "oldName" -> "`tab1`",
+          "newName" -> "`default`.`tab2`",
+          "db" -> "default")
+      )
 
       val catalog = spark.sessionState.catalog
       assert(catalog.listTables("default") == Seq(TableIdentifier("tab1")))
@@ -842,12 +850,15 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
           |)
         """.stripMargin)
 
-      val e = intercept[AnalysisException] {
-        sql("ALTER TABLE view1 RENAME TO default.tab2")
-      }
-      assert(e.getMessage.contains(
-        s"RENAME TEMPORARY VIEW from '`view1`' to '`default`.`tab2`': " +
-          "cannot specify database name 'default' in the destination table"))
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("ALTER TABLE view1 RENAME TO default.tab2")
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_1074",
+        parameters = Map(
+          "oldName" -> "`view1`",
+          "newName" -> "`default`.`tab2`",
+          "db" -> "default"))
 
       val catalog = spark.sessionState.catalog
       assert(catalog.listTables("default") == Seq(TableIdentifier("view1")))
@@ -863,7 +874,11 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
       checkErrorTableNotFound(e, "`tab1`")
       sql("ALTER VIEW tab2 RENAME TO tab1")
       checkAnswer(spark.table("tab1"), spark.range(10).toDF())
-      intercept[AnalysisException] { spark.table("tab2") }
+      checkError(
+        exception = intercept[AnalysisException] { spark.table("tab2") },
+        errorClass = "TABLE_OR_VIEW_NOT_FOUND",
+        parameters = Map("relationName" -> "`tab2`")
+      )
     }
   }
 
@@ -943,10 +958,38 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
     val tableIdent = TableIdentifier("tab1", Some("dbx"))
     createDatabase(catalog, "dbx")
     createTable(catalog, tableIdent)
-    assertUnsupported("ALTER TABLE dbx.tab1 CLUSTERED BY (blood, lemon, grape) 
INTO 11 BUCKETS")
-    assertUnsupported("ALTER TABLE dbx.tab1 CLUSTERED BY (fuji) SORTED BY 
(grape) INTO 5 BUCKETS")
-    assertUnsupported("ALTER TABLE dbx.tab1 NOT CLUSTERED")
-    assertUnsupported("ALTER TABLE dbx.tab1 NOT SORTED")
+    val sql1 = "ALTER TABLE dbx.tab1 CLUSTERED BY (blood, lemon, grape) INTO 
11 BUCKETS"
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(sql1)
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_0035",
+      parameters = Map("message" -> "ALTER TABLE CLUSTERED BY"),
+      context = ExpectedContext(fragment = sql1, start = 0, stop = 70))
+    val sql2 = "ALTER TABLE dbx.tab1 CLUSTERED BY (fuji) SORTED BY (grape) 
INTO 5 BUCKETS"
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(sql2)
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_0035",
+      parameters = Map("message" -> "ALTER TABLE CLUSTERED BY"),
+      context = ExpectedContext(fragment = sql2, start = 0, stop = 72))
+    val sql3 = "ALTER TABLE dbx.tab1 NOT CLUSTERED"
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(sql3)
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_0035",
+      parameters = Map("message" -> "ALTER TABLE NOT CLUSTERED"),
+      context = ExpectedContext(fragment = sql3, start = 0, stop = 33))
+    val sql4 = "ALTER TABLE dbx.tab1 NOT SORTED"
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(sql4)
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_0035",
+      parameters = Map("message" -> "ALTER TABLE NOT SORTED"),
+      context = ExpectedContext(fragment = sql4, start = 0, stop = 30))
   }
 
   test("alter table: skew is not supported") {
@@ -954,20 +997,68 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
     val tableIdent = TableIdentifier("tab1", Some("dbx"))
     createDatabase(catalog, "dbx")
     createTable(catalog, tableIdent)
-    assertUnsupported("ALTER TABLE dbx.tab1 SKEWED BY (dt, country) ON " +
-      "(('2008-08-08', 'us'), ('2009-09-09', 'uk'), ('2010-10-10', 'cn'))")
-    assertUnsupported("ALTER TABLE dbx.tab1 SKEWED BY (dt, country) ON " +
-      "(('2008-08-08', 'us'), ('2009-09-09', 'uk')) STORED AS DIRECTORIES")
-    assertUnsupported("ALTER TABLE dbx.tab1 NOT SKEWED")
-    assertUnsupported("ALTER TABLE dbx.tab1 NOT STORED AS DIRECTORIES")
+    val sql1 = "ALTER TABLE dbx.tab1 SKEWED BY (dt, country) ON " +
+      "(('2008-08-08', 'us'), ('2009-09-09', 'uk'), ('2010-10-10', 'cn'))"
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(sql1)
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_0035",
+      parameters = Map("message" -> "ALTER TABLE SKEWED BY"),
+      context = ExpectedContext(fragment = sql1, start = 0, stop = 113)
+    )
+    val sql2 = "ALTER TABLE dbx.tab1 SKEWED BY (dt, country) ON " +
+      "(('2008-08-08', 'us'), ('2009-09-09', 'uk')) STORED AS DIRECTORIES"
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(sql2)
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_0035",
+      parameters = Map("message" -> "ALTER TABLE SKEWED BY"),
+      context = ExpectedContext(fragment = sql2, start = 0, stop = 113)
+    )
+    val sql3 = "ALTER TABLE dbx.tab1 NOT SKEWED"
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(sql3)
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_0035",
+      parameters = Map("message" -> "ALTER TABLE NOT SKEWED"),
+      context = ExpectedContext(fragment = sql3, start = 0, stop = 30)
+    )
+    val sql4 = "ALTER TABLE dbx.tab1 NOT STORED AS DIRECTORIES"
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(sql4)
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_0035",
+      parameters = Map("message" -> "ALTER TABLE NOT STORED AS DIRECTORIES"),
+      context = ExpectedContext(fragment = sql4, start = 0, stop = 45)
+    )
   }
 
   test("alter table: add partition is not supported for views") {
-    assertUnsupported("ALTER VIEW dbx.tab1 ADD IF NOT EXISTS PARTITION 
(b='2')")
+    val sql1 = "ALTER VIEW dbx.tab1 ADD IF NOT EXISTS PARTITION (b='2')"
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(sql1)
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_0035",
+      parameters = Map("message" -> "ALTER VIEW ... ADD PARTITION"),
+      context = ExpectedContext(fragment = sql1, start = 0, stop = 54)
+    )
   }
 
   test("alter table: drop partition is not supported for views") {
-    assertUnsupported("ALTER VIEW dbx.tab1 DROP IF EXISTS PARTITION (b='2')")
+    val sql1 = "ALTER VIEW dbx.tab1 DROP IF EXISTS PARTITION (b='2')"
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(sql1)
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_0035",
+      parameters = Map("message" -> "ALTER VIEW ... DROP PARTITION"),
+      context = ExpectedContext(fragment = sql1, start = 0, stop = 51)
+    )
   }
 
   test("drop view - temporary view") {
@@ -1032,9 +1123,14 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
     sql("ALTER TABLE tab1 SET TBLPROPERTIES ('kor' = 'belle', 'kar' = 'bol')")
     assert(getProps == Map("andrew" -> "or14", "kor" -> "belle", "kar" -> 
"bol"))
     // table to alter does not exist
-    intercept[AnalysisException] {
-      sql("ALTER TABLE does_not_exist SET TBLPROPERTIES ('winner' = 'loser')")
-    }
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql("ALTER TABLE does_not_exist SET TBLPROPERTIES ('winner' = 
'loser')")
+      },
+      errorClass = "TABLE_OR_VIEW_NOT_FOUND",
+      parameters = Map("relationName" -> "`does_not_exist`"),
+      context = ExpectedContext(fragment = "does_not_exist", start = 12, stop 
= 25)
+    )
   }
 
   protected def testUnsetProperties(isDatasourceTable: Boolean): Unit = {
@@ -1061,14 +1157,23 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
     sql("ALTER TABLE tab1 UNSET TBLPROPERTIES ('p')")
     assert(getProps == Map("c" -> "lan", "x" -> "y"))
     // table to alter does not exist
-    intercept[AnalysisException] {
-      sql("ALTER TABLE does_not_exist UNSET TBLPROPERTIES ('c' = 'lan')")
-    }
+    val sql1 = "ALTER TABLE does_not_exist UNSET TBLPROPERTIES ('c' = 'lan')"
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(sql1)
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_0035",
+      parameters = Map("message" -> "Values should not be specified for 
key(s): [c]"),
+      context = ExpectedContext(fragment = sql1, start = 0, stop = 59)
+    )
     // property to unset does not exist
-    val e = intercept[AnalysisException] {
-      sql("ALTER TABLE tab1 UNSET TBLPROPERTIES ('c', 'xyz')")
-    }
-    assert(e.getMessage.contains("xyz"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql("ALTER TABLE tab1 UNSET TBLPROPERTIES ('c', 'xyz')")
+      },
+      errorClass = "UNSET_NONEXISTENT_PROPERTIES",
+      parameters = Map("properties" -> "`xyz`", "table" -> 
"`spark_catalog`.`dbx`.`tab1`")
+    )
     // property to unset does not exist, but "IF EXISTS" is specified
     sql("ALTER TABLE tab1 UNSET TBLPROPERTIES IF EXISTS ('c', 'xyz')")
     assert(getProps == Map("x" -> "y"))
@@ -1100,20 +1205,27 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
     Seq("true", "false").foreach { caseSensitive =>
       withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive) {
         // partition to add already exists
-        var e = intercept[AnalysisException] {
-          sql("DROP TEMPORARY FUNCTION year")
-        }
-        assert(e.getMessage.contains("Cannot drop built-in function 'year'"))
-
-        e = intercept[AnalysisException] {
-          sql("DROP TEMPORARY FUNCTION YeAr")
-        }
-        assert(e.getMessage.contains("Cannot drop built-in function 'YeAr'"))
-
-        e = intercept[AnalysisException] {
-          sql("DROP TEMPORARY FUNCTION `YeAr`")
-        }
-        assert(e.getMessage.contains("Cannot drop built-in function 'YeAr'"))
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql("DROP TEMPORARY FUNCTION year")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1255",
+          parameters = Map("functionName" -> "year")
+        )
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql("DROP TEMPORARY FUNCTION YeAr")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1255",
+          parameters = Map("functionName" -> "YeAr")
+        )
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql("DROP TEMPORARY FUNCTION `YeAr`")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1255",
+          parameters = Map("functionName" -> "YeAr")
+        )
       }
     }
   }
@@ -1237,11 +1349,13 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
     withTable("tab1") {
       spark.range(10).write.saveAsTable("tab1")
       withView("view1") {
-        val e = intercept[AnalysisException] {
-          sql("CREATE TEMPORARY VIEW view1 (col1, col3) AS SELECT * FROM tab1")
-        }.getMessage
-        assert(e.contains("the SELECT clause (num: `1`) does not match")
-          && e.contains("CREATE VIEW (num: `2`)"))
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql("CREATE TEMPORARY VIEW view1 (col1, col3) AS SELECT * FROM 
tab1")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1277",
+          parameters = Map("analyzedPlanLength" -> "1", 
"userSpecifiedColumnsLength" -> "2")
+        )
       }
     }
   }
@@ -1286,17 +1400,37 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
     withTable("partitionedTable") {
       df.write.mode("overwrite").partitionBy("a", 
"b").saveAsTable("partitionedTable")
       // Misses some partition columns
-      intercept[AnalysisException] {
-        
df.write.mode("append").partitionBy("a").saveAsTable("partitionedTable")
-      }
+      checkError(
+        exception = intercept[AnalysisException] {
+          
df.write.mode("append").partitionBy("a").saveAsTable("partitionedTable")
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_1163",
+        parameters = Map(
+          "tableName" -> "spark_catalog.default.partitionedtable",
+          "specifiedPartCols" -> "a",
+          "existingPartCols" -> "a, b")
+      )
       // Wrong order
-      intercept[AnalysisException] {
-        df.write.mode("append").partitionBy("b", 
"a").saveAsTable("partitionedTable")
-      }
+      checkError(
+        exception = intercept[AnalysisException] {
+          df.write.mode("append").partitionBy("b", 
"a").saveAsTable("partitionedTable")
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_1163",
+        parameters = Map(
+          "tableName" -> "spark_catalog.default.partitionedtable",
+          "specifiedPartCols" -> "b, a",
+          "existingPartCols" -> "a, b")
+      )
       // Partition columns not specified
-      intercept[AnalysisException] {
-        df.write.mode("append").saveAsTable("partitionedTable")
-      }
+      checkError(
+        exception = intercept[AnalysisException] {
+          df.write.mode("append").saveAsTable("partitionedTable")
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_1163",
+        parameters = Map(
+          "tableName" -> "spark_catalog.default.partitionedtable",
+          "specifiedPartCols" -> "", "existingPartCols" -> "a, b")
+      )
       assert(sql("select * from partitionedTable").collect().size == 1)
       // Inserts new data successfully when partition columns are correctly 
specified in
       // partitionBy(...).
@@ -1329,12 +1463,13 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
         val tabName = s"$db.showcolumn"
         withTable(tabName) {
           sql(s"CREATE TABLE $tabName(col1 int, col2 string) USING parquet ")
-          val message = intercept[AnalysisException] {
-            sql(s"SHOW COLUMNS IN $db.showcolumn FROM 
${db.toUpperCase(Locale.ROOT)}")
-          }.getMessage
-          assert(message.contains(
-            s"SHOW COLUMNS with conflicting databases: " +
-              s"'${db.toUpperCase(Locale.ROOT)}' != '$db'"))
+          checkError(
+            exception = intercept[AnalysisException] {
+              sql(s"SHOW COLUMNS IN $db.showcolumn FROM 
${db.toUpperCase(Locale.ROOT)}")
+            },
+            errorClass = "_LEGACY_ERROR_TEMP_1057",
+            parameters = Map("dbA" -> db.toUpperCase(Locale.ROOT), "dbB" -> db)
+          )
         }
       }
     }
@@ -1343,10 +1478,13 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
   test("show columns - invalid db name") {
     withTable("tbl") {
       sql("CREATE TABLE tbl(col1 int, col2 string) USING parquet ")
-      val message = intercept[AnalysisException] {
-        sql("SHOW COLUMNS IN tbl FROM a.b.c")
-      }.getMessage
-      assert(message.contains("requires a single-part namespace"))
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("SHOW COLUMNS IN tbl FROM a.b.c")
+        },
+        errorClass = "REQUIRES_SINGLE_PART_NAMESPACE",
+        parameters = Map("sessionCatalog" -> "spark_catalog", "namespace" -> 
"`a`.`b`.`c`")
+      )
     }
   }
 
@@ -1904,10 +2042,16 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
   test("alter datasource table add columns - text format not supported") {
     withTable("t1") {
       sql("CREATE TABLE t1 (c1 string) USING text")
-      val e = intercept[AnalysisException] {
-        sql("ALTER TABLE t1 ADD COLUMNS (c2 int)")
-      }.getMessage
-      assert(e.contains("ALTER ADD COLUMNS does not support datasource table 
with type"))
+      checkErrorMatchPVals(
+        exception = intercept[AnalysisException] {
+          sql("ALTER TABLE t1 ADD COLUMNS (c2 int)")
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_1260",
+        parameters = Map(
+          "tableType" -> ("org\\.apache\\.spark\\.sql\\.execution\\." +
+            "datasources\\.v2\\.text\\.TextDataSourceV2.*"),
+          "table" -> ".*t1.*")
+      )
     }
   }
 
@@ -2006,10 +2150,14 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
   }
 
   test("set command rejects SparkConf entries") {
-    val ex = intercept[AnalysisException] {
-      sql(s"SET ${config.CPUS_PER_TASK.key} = 4")
-    }
-    assert(ex.getMessage.contains("Spark config"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(s"SET ${config.CPUS_PER_TASK.key} = 4")
+      },
+      errorClass = "CANNOT_MODIFY_CONFIG",
+      parameters = Map(
+        "key" -> "\"spark.task.cpus\"",
+        "docroot" -> "https://spark.apache.org/docs/latest";))
   }
 
   test("Refresh table before drop database cascade") {
@@ -2095,20 +2243,27 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
   test(s"Add a directory when 
${SQLConf.LEGACY_ADD_SINGLE_FILE_IN_ADD_FILE.key} set to true") {
     withTempDir { testDir =>
       withSQLConf(SQLConf.LEGACY_ADD_SINGLE_FILE_IN_ADD_FILE.key -> "true") {
-        val msg = intercept[SparkException] {
-          spark.sql(s"ADD FILE $testDir")
-        }.getMessage
-        assert(msg.contains("is a directory and recursive is not turned on"))
+        checkError(
+          exception = intercept[SparkException] {
+            sql(s"ADD FILE $testDir")
+          },
+          errorClass = null,
+          parameters = Map.empty
+        )
       }
     }
   }
 
   test("REFRESH FUNCTION") {
-    val msg = intercept[AnalysisException] {
-      sql("REFRESH FUNCTION md5")
-    }.getMessage
-    assert(msg.contains(
-      "md5 is a built-in/temporary function. 'REFRESH FUNCTION' expects a 
persistent function"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql("REFRESH FUNCTION md5")
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_1017",
+      parameters = Map(
+        "name" -> "md5",
+        "cmd" -> "REFRESH FUNCTION", "hintStr" -> ""),
+      context = ExpectedContext(fragment = "md5", start = 17, stop = 19))
     checkError(
       exception = intercept[AnalysisException] {
         sql("REFRESH FUNCTION default.md5")
@@ -2124,19 +2279,32 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
 
     withUserDefinedFunction("func1" -> true) {
       sql("CREATE TEMPORARY FUNCTION func1 AS 
'test.org.apache.spark.sql.MyDoubleAvg'")
-      val msg = intercept[AnalysisException] {
-        sql("REFRESH FUNCTION func1")
-      }.getMessage
-      assert(msg.contains("" +
-        "func1 is a built-in/temporary function. 'REFRESH FUNCTION' expects a 
persistent function"))
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("REFRESH FUNCTION func1")
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_1017",
+        parameters = Map("name" -> "func1", "cmd" -> "REFRESH FUNCTION", 
"hintStr" -> ""),
+        context = ExpectedContext(
+          fragment = "func1",
+          start = 17,
+          stop = 21)
+      )
     }
 
     withUserDefinedFunction("func1" -> false) {
       val func = FunctionIdentifier("func1", Some("default"))
       assert(!spark.sessionState.catalog.isRegisteredFunction(func))
-      intercept[AnalysisException] {
-        sql("REFRESH FUNCTION func1")
-      }
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("REFRESH FUNCTION func1")
+        },
+        errorClass = "UNRESOLVED_ROUTINE",
+        parameters = Map(
+          "routineName" -> "`func1`",
+          "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"),
+        context = ExpectedContext(fragment = "func1", start = 17, stop = 21)
+      )
       assert(!spark.sessionState.catalog.isRegisteredFunction(func))
 
       sql("CREATE FUNCTION func1 AS 'test.org.apache.spark.sql.MyDoubleAvg'")
@@ -2159,9 +2327,14 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
 
       spark.sessionState.catalog.externalCatalog.dropFunction("default", 
"func1")
       assert(spark.sessionState.catalog.isRegisteredFunction(func))
-      intercept[AnalysisException] {
-        sql("REFRESH FUNCTION func1")
-      }
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("REFRESH FUNCTION func1")
+        },
+        errorClass = "ROUTINE_NOT_FOUND",
+        parameters = Map("routineName" -> "`default`.`func1`")
+      )
+
       assert(!spark.sessionState.catalog.isRegisteredFunction(func))
 
       val function = CatalogFunction(func, "test.non.exists.udf", Seq.empty)
@@ -2186,11 +2359,14 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
       val rand = FunctionIdentifier("rand", Some("default"))
       sql("CREATE FUNCTION rand AS 'test.org.apache.spark.sql.MyDoubleAvg'")
       assert(!spark.sessionState.catalog.isRegisteredFunction(rand))
-      val msg = intercept[AnalysisException] {
-        sql("REFRESH FUNCTION rand")
-      }.getMessage
-      assert(msg.contains(
-        "rand is a built-in/temporary function. 'REFRESH FUNCTION' expects a 
persistent function"))
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("REFRESH FUNCTION rand")
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_1017",
+        parameters = Map("name" -> "rand", "cmd" -> "REFRESH FUNCTION", 
"hintStr" -> ""),
+        context = ExpectedContext(fragment = "rand", start = 17, stop = 20)
+      )
       assert(!spark.sessionState.catalog.isRegisteredFunction(rand))
       sql("REFRESH FUNCTION default.rand")
       assert(spark.sessionState.catalog.isRegisteredFunction(rand))
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLSourceLoadSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLSourceLoadSuite.scala
index 5d4ddeac663..b6fb83fa5b8 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLSourceLoadSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/sources/DDLSourceLoadSuite.scala
@@ -26,10 +26,17 @@ import org.apache.spark.sql.types._
 class DDLSourceLoadSuite extends DataSourceTest with SharedSparkSession {
 
   test("data sources with the same name - internal data sources") {
-    val e = intercept[AnalysisException] {
-      spark.read.format("Fluet da Bomb").load()
-    }
-    assert(e.getMessage.contains("Multiple sources found for Fluet da Bomb"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        spark.read.format("Fluet da Bomb").load()
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_1141",
+      parameters = Map(
+        "provider" -> "Fluet da Bomb",
+        "sourceNames" -> ("org.apache.spark.sql.sources.FakeSourceOne, " +
+          "org.apache.spark.sql.sources.FakeSourceTwo")
+      )
+    )
   }
 
   test("data sources with the same name - internal data source/external data 
source") {
@@ -38,10 +45,17 @@ class DDLSourceLoadSuite extends DataSourceTest with 
SharedSparkSession {
   }
 
   test("data sources with the same name - external data sources") {
-    val e = intercept[AnalysisException] {
-      spark.read.format("Fake external source").load()
-    }
-    assert(e.getMessage.contains("Multiple sources found for Fake external 
source"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        spark.read.format("Fake external source").load()
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_1141",
+      parameters = Map(
+        "provider" -> "Fake external source",
+        "sourceNames" -> ("org.apache.fakesource.FakeExternalSourceOne, " +
+          "org.apache.fakesource.FakeExternalSourceTwo")
+      )
+    )
   }
 
   test("load data source from format alias") {
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index 6e4094bea8c..f310f7ddfdf 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -25,7 +25,7 @@ import org.apache.hadoop.fs.Path
 import org.apache.parquet.format.converter.ParquetMetadataConverter.NO_FILTER
 import org.scalatest.BeforeAndAfterEach
 
-import org.apache.spark.SparkException
+import org.apache.spark.{SparkException, SparkUnsupportedOperationException}
 import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode}
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
@@ -176,10 +176,13 @@ class HiveCatalogedDDLSuite extends DDLSuite with 
TestHiveSingleton with BeforeA
       spark.sql("CREATE VIEW v AS SELECT STRUCT('a' AS `a`, 1 AS b) q")
       checkAnswer(sql("SELECT q.`a`, q.b FROM v"), Row("a", 1) :: Nil)
 
-      val err = intercept[SparkException] {
-        spark.sql("ALTER VIEW v AS SELECT STRUCT('a' AS `$a`, 1 AS b) q")
-      }.getMessage
-      assert(err.contains("Cannot recognize hive type string"))
+      checkError(
+        exception = intercept[SparkException] {
+          spark.sql("ALTER VIEW v AS SELECT STRUCT('a' AS `$a`, 1 AS b) q")
+        },
+        errorClass = "CANNOT_RECOGNIZE_HIVE_TYPE",
+        parameters = Map("fieldType" -> "\"STRUCT<$A:STRING,B:INT>\"", 
"fieldName" -> "`q`")
+      )
     }
   }
 
@@ -234,11 +237,17 @@ class HiveCatalogedDDLSuite extends DDLSuite with 
TestHiveSingleton with BeforeA
   test("SPARK-22431: negative alter table tests with nested types") {
     withTable("t1") {
       spark.sql("CREATE TABLE t1 (q STRUCT<col1:INT, col2:STRING>, i1 INT) 
USING hive")
-      val err = intercept[SparkException] {
-        spark.sql("ALTER TABLE t1 ADD COLUMNS (newcol1 STRUCT<`$col1`:STRING, 
col2:Int>)")
-      }.getMessage
-      assert(err.contains("Cannot recognize hive type string:"))
-   }
+      checkError(
+        exception = intercept[SparkException] {
+          spark.sql("ALTER TABLE t1 ADD COLUMNS (newcol1 
STRUCT<`$col1`:STRING, col2:Int>)")
+        },
+        errorClass = "CANNOT_RECOGNIZE_HIVE_TYPE",
+        parameters = Map(
+          "fieldType" -> "\"STRUCT<$COL1:STRING,COL2:INT>\"",
+          "fieldName" -> "`newcol1`"
+        )
+      )
+    }
   }
 
   test("SPARK-26630: table with old input format and without partitioned will 
use HadoopRDD") {
@@ -424,15 +433,20 @@ class HiveDDLSuite
       withTable("tab1", "tab2") {
         (("a", "b") :: Nil).toDF().write.json(tempDir.getCanonicalPath)
 
-        assertAnalysisError(
-          "CREATE TABLE tab1 USING hive",
-          "Unable to infer the schema. The schema specification is required to 
" +
-            s"create the table `$SESSION_CATALOG_NAME`.`default`.`tab1`")
-
-        assertAnalysisError(
-          s"CREATE TABLE tab2 USING hive location 
'${tempDir.getCanonicalPath}'",
-          "Unable to infer the schema. The schema specification is required to 
" +
-            s"create the table `$SESSION_CATALOG_NAME`.`default`.`tab2`")
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql("CREATE TABLE tab1 USING hive")
+          },
+          errorClass = null,
+          parameters = Map.empty
+        )
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"CREATE TABLE tab2 USING hive location 
'${tempDir.getCanonicalPath}'")
+          },
+          errorClass = null,
+          parameters = Map.empty
+        )
       }
     }
   }
@@ -561,9 +575,18 @@ class HiveDDLSuite
   }
 
   test("create partitioned table without specifying data type for the 
partition columns") {
-    assertAnalysisError(
-      "CREATE TABLE tbl(a int) PARTITIONED BY (b) STORED AS parquet",
-      "partition column `b` is not defined in table")
+    val sql1 = "CREATE TABLE tbl(a int) PARTITIONED BY (b) STORED AS parquet"
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(sql1)
+      },
+      errorClass = "COLUMN_NOT_DEFINED_IN_TABLE",
+      parameters = Map(
+        "colType" -> "partition",
+        "colName" -> "`b`",
+        "tableName" -> s"`$SESSION_CATALOG_NAME`.`default`.`tbl`",
+        "tableCols" -> "`a`")
+    )
   }
 
   test("add/drop partition with location - managed table") {
@@ -615,10 +638,15 @@ class HiveDDLSuite
   test("SPARK-19129: drop partition with a empty string will drop the whole 
table") {
     val df = spark.createDataFrame(Seq(("0", "a"), ("1", 
"b"))).toDF("partCol1", "name")
     
df.write.mode("overwrite").partitionBy("partCol1").saveAsTable("partitionedTable")
-    assertAnalysisError(
-      "alter table partitionedTable drop partition(partCol1='')",
-      "Partition spec is invalid. The spec ([partCol1=]) contains an empty " +
-        "partition column value")
+    val sql1 = "alter table partitionedTable drop partition(partCol1='')"
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql(sql1)
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_1076",
+      parameters = Map(
+        "details" -> "The spec ([partCol1=]) contains an empty partition 
column value")
+    )
   }
 
   test("add/drop partitions - external table") {
@@ -663,10 +691,15 @@ class HiveDDLSuite
         // After data insertion, all the directory are not empty
         assert(dirSet.forall(dir => dir.listFiles.nonEmpty))
 
-        assertAnalysisError(
-          s"ALTER TABLE $externalTab DROP PARTITION (ds='2008-04-09', 
unknownCol='12')",
-          "unknownCol is not a valid partition column in table " +
-            s"`$SESSION_CATALOG_NAME`.`default`.`exttable_with_partitions`")
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE $externalTab DROP PARTITION (ds='2008-04-09', 
unknownCol='12')")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1231",
+          parameters = Map(
+            "key" -> "unknownCol",
+            "tblName" -> 
s"`$SESSION_CATALOG_NAME`.`default`.`exttable_with_partitions`")
+        )
 
         sql(
           s"""
@@ -768,56 +801,39 @@ class HiveDDLSuite
         sql(s"ALTER VIEW $viewName UNSET TBLPROPERTIES ('p')")
         checkProperties(Map())
 
-        assertAnalysisError(
-          s"ALTER VIEW $viewName UNSET TBLPROPERTIES ('p')",
-          "Attempted to unset non-existent properties [`p`] in table " +
-            s"`$SESSION_CATALOG_NAME`.`default`.`view1`")
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER VIEW $viewName UNSET TBLPROPERTIES ('p')")
+          },
+          errorClass = "UNSET_NONEXISTENT_PROPERTIES",
+          parameters = Map(
+            "properties" -> "`p`",
+            "table" -> s"`$SESSION_CATALOG_NAME`.`default`.`view1`")
+        )
       }
     }
   }
 
-  private def assertAnalysisError(sqlText: String, message: String): Unit = {
-    val e = intercept[AnalysisException](sql(sqlText))
-    assert(e.message.contains(message))
-  }
-
-  private def assertAnalysisErrorClass(sqlText: String, errorClass: String,
-                                  parameters: Map[String, String]): Unit = {
+  private def assertAnalysisErrorClass(
+      sqlText: String,
+      errorClass: String,
+      parameters: Map[String, String]): Unit = {
     val e = intercept[AnalysisException](sql(sqlText))
     checkError(e,
       errorClass = errorClass, parameters = parameters)
   }
 
-  private def assertErrorForAlterTableOnView(sqlText: String): Unit = {
-    val message = intercept[AnalysisException](sql(sqlText)).getMessage
-    assert(message.contains("Cannot alter a view with ALTER TABLE. Please use 
ALTER VIEW instead"))
-  }
-
-  private def assertErrorForAlterViewOnTable(sqlText: String): Unit = {
-    val message = intercept[AnalysisException](sql(sqlText)).getMessage
-    assert(message.contains("Cannot alter a table with ALTER VIEW. Please use 
ALTER TABLE instead"))
-  }
-
-  private def assertErrorForAlterTableOnView(
-      sqlText: String, viewName: String, cmdName: String): Unit = {
-    assertAnalysisError(
-      sqlText,
-      s"$viewName is a view. '$cmdName' expects a table. Please use ALTER VIEW 
instead.")
-  }
-
-  private def assertErrorForAlterViewOnTable(
-      sqlText: String, tableName: String, cmdName: String): Unit = {
-    assertAnalysisError(
-      sqlText,
-      s"$tableName is a table. '$cmdName' expects a view. Please use ALTER 
TABLE instead.")
-  }
-
   test("create table - SET TBLPROPERTIES EXTERNAL to TRUE") {
     val tabName = "tab1"
     withTable(tabName) {
-      assertAnalysisError(
-        s"CREATE TABLE $tabName (height INT, length INT) 
TBLPROPERTIES('EXTERNAL'='TRUE')",
-        "Cannot set or change the preserved property key: 'EXTERNAL'")
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql(s"CREATE TABLE $tabName (height INT, length INT) " +
+            s"TBLPROPERTIES('EXTERNAL'='TRUE')")
+        },
+        errorClass = null,
+        parameters = Map.empty
+      )
     }
   }
 
@@ -828,9 +844,13 @@ class HiveDDLSuite
       sql(s"CREATE TABLE $tabName (height INT, length INT)")
       assert(
         catalog.getTableMetadata(TableIdentifier(tabName)).tableType == 
CatalogTableType.MANAGED)
-      assertAnalysisError(
-        s"ALTER TABLE $tabName SET TBLPROPERTIES ('EXTERNAL' = 'TRUE')",
-        "Cannot set or change the preserved property key: 'EXTERNAL'")
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql(s"ALTER TABLE $tabName SET TBLPROPERTIES ('EXTERNAL' = 'TRUE')")
+        },
+        errorClass = null,
+        parameters = Map.empty
+      )
       // The table type is not changed to external
       assert(
         catalog.getTableMetadata(TableIdentifier(tabName)).tableType == 
CatalogTableType.MANAGED)
@@ -856,69 +876,175 @@ class HiveDDLSuite
         assert(catalog.tableExists(TableIdentifier(oldViewName)))
         assert(!catalog.tableExists(TableIdentifier(newViewName)))
 
-        assertErrorForAlterViewOnTable(s"ALTER VIEW $tabName RENAME TO 
$newViewName")
-
-        assertErrorForAlterTableOnView(s"ALTER TABLE $oldViewName RENAME TO 
$newViewName")
-
-        assertErrorForAlterViewOnTable(
-          s"ALTER VIEW $tabName SET TBLPROPERTIES ('p' = 'an')",
-          tabName,
-          "ALTER VIEW ... SET TBLPROPERTIES")
-
-        assertErrorForAlterTableOnView(
-          s"ALTER TABLE $oldViewName SET TBLPROPERTIES ('p' = 'an')",
-          oldViewName,
-          "ALTER TABLE ... SET TBLPROPERTIES")
-
-        assertErrorForAlterViewOnTable(
-          s"ALTER VIEW $tabName UNSET TBLPROPERTIES ('p')",
-          tabName,
-          "ALTER VIEW ... UNSET TBLPROPERTIES")
-
-        assertErrorForAlterTableOnView(
-          s"ALTER TABLE $oldViewName UNSET TBLPROPERTIES ('p')",
-          oldViewName,
-          "ALTER TABLE ... UNSET TBLPROPERTIES")
-
-        assertErrorForAlterTableOnView(
-          s"ALTER TABLE $oldViewName SET LOCATION '/path/to/home'",
-          oldViewName,
-          "ALTER TABLE ... SET LOCATION ...")
-
-        assertErrorForAlterTableOnView(
-          s"ALTER TABLE $oldViewName SET SERDE 'whatever'",
-          oldViewName,
-          "ALTER TABLE ... SET [SERDE|SERDEPROPERTIES]")
-
-        assertErrorForAlterTableOnView(
-          s"ALTER TABLE $oldViewName SET SERDEPROPERTIES ('x' = 'y')",
-          oldViewName,
-          "ALTER TABLE ... SET [SERDE|SERDEPROPERTIES]")
-
-        assertErrorForAlterTableOnView(
-          s"ALTER TABLE $oldViewName PARTITION (a=1, b=2) SET SERDEPROPERTIES 
('x' = 'y')",
-          oldViewName,
-          "ALTER TABLE ... SET [SERDE|SERDEPROPERTIES]")
-
-        assertErrorForAlterTableOnView(
-          s"ALTER TABLE $oldViewName RECOVER PARTITIONS",
-          oldViewName,
-          "ALTER TABLE ... RECOVER PARTITIONS")
-
-        assertErrorForAlterTableOnView(
-          s"ALTER TABLE $oldViewName PARTITION (a='1') RENAME TO PARTITION 
(a='100')",
-          oldViewName,
-          "ALTER TABLE ... RENAME TO PARTITION")
-
-        assertErrorForAlterTableOnView(
-          s"ALTER TABLE $oldViewName ADD IF NOT EXISTS PARTITION (a='4', 
b='8')",
-          oldViewName,
-          "ALTER TABLE ... ADD PARTITION ...")
-
-        assertErrorForAlterTableOnView(
-          s"ALTER TABLE $oldViewName DROP IF EXISTS PARTITION (a='2')",
-          oldViewName,
-          "ALTER TABLE ... DROP PARTITION ...")
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER VIEW $tabName RENAME TO $newViewName")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1253",
+          parameters = Map.empty
+        )
+
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE $oldViewName RENAME TO $newViewName")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1252",
+          parameters = Map.empty
+        )
+
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER VIEW $tabName SET TBLPROPERTIES ('p' = 'an')")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1015",
+          parameters = Map(
+            "identifier" -> s"default.$tabName",
+            "cmd" -> "ALTER VIEW ... SET TBLPROPERTIES",
+            "hintStr" -> " Please use ALTER TABLE instead."),
+          context = ExpectedContext(fragment = tabName, start = 11, stop = 14)
+        )
+
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE $oldViewName SET TBLPROPERTIES ('p' = 'an')")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1013",
+          parameters = Map(
+            "nameParts" -> s"$SESSION_CATALOG_NAME.default.$oldViewName",
+            "viewStr" -> "view",
+            "cmd" -> "ALTER TABLE ... SET TBLPROPERTIES",
+            "hintStr" -> " Please use ALTER VIEW instead."),
+          context = ExpectedContext(fragment = oldViewName, start = 12, stop = 
16)
+        )
+
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER VIEW $tabName UNSET TBLPROPERTIES ('p')")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1015",
+          parameters = Map(
+            "identifier" -> s"default.$tabName",
+            "cmd" -> "ALTER VIEW ... UNSET TBLPROPERTIES",
+            "hintStr" -> " Please use ALTER TABLE instead."),
+          context = ExpectedContext(fragment = tabName, start = 11, stop = 14)
+        )
+
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE $oldViewName UNSET TBLPROPERTIES ('p')")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1013",
+          parameters = Map(
+            "nameParts" -> s"$SESSION_CATALOG_NAME.default.$oldViewName",
+            "viewStr" -> "view",
+            "cmd" -> "ALTER TABLE ... UNSET TBLPROPERTIES",
+            "hintStr" -> " Please use ALTER VIEW instead."),
+          context = ExpectedContext(fragment = oldViewName, start = 12, stop = 
16)
+        )
+
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE $oldViewName SET LOCATION '/path/to/home'")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1013",
+          parameters = Map(
+            "nameParts" -> s"$SESSION_CATALOG_NAME.default.$oldViewName",
+            "viewStr" -> "view",
+            "cmd" -> "ALTER TABLE ... SET LOCATION ...",
+            "hintStr" -> " Please use ALTER VIEW instead."),
+          context = ExpectedContext(fragment = oldViewName, start = 12, stop = 
16)
+        )
+
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE $oldViewName SET SERDE 'whatever'")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1013",
+          parameters = Map(
+            "nameParts" -> s"$SESSION_CATALOG_NAME.default.$oldViewName",
+            "viewStr" -> "view",
+            "cmd" -> "ALTER TABLE ... SET [SERDE|SERDEPROPERTIES]",
+            "hintStr" -> " Please use ALTER VIEW instead."),
+          context = ExpectedContext(fragment = oldViewName, start = 12, stop = 
16)
+        )
+
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE $oldViewName SET SERDEPROPERTIES ('x' = 'y')")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1013",
+          parameters = Map(
+            "nameParts" -> s"$SESSION_CATALOG_NAME.default.$oldViewName",
+            "viewStr" -> "view",
+            "cmd" -> "ALTER TABLE ... SET [SERDE|SERDEPROPERTIES]",
+            "hintStr" -> " Please use ALTER VIEW instead."),
+          context = ExpectedContext(fragment = oldViewName, start = 12, stop = 
16)
+        )
+
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE $oldViewName PARTITION (a=1, b=2) SET 
SERDEPROPERTIES ('x' = 'y')")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1013",
+          parameters = Map(
+            "nameParts" -> s"$SESSION_CATALOG_NAME.default.$oldViewName",
+            "viewStr" -> "view",
+            "cmd" -> "ALTER TABLE ... SET [SERDE|SERDEPROPERTIES]",
+            "hintStr" -> " Please use ALTER VIEW instead."),
+          context = ExpectedContext(fragment = oldViewName, start = 12, stop = 
16)
+        )
+
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE $oldViewName RECOVER PARTITIONS")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1013",
+          parameters = Map(
+            "nameParts" -> s"$SESSION_CATALOG_NAME.default.$oldViewName",
+            "viewStr" -> "view",
+            "cmd" -> "ALTER TABLE ... RECOVER PARTITIONS",
+            "hintStr" -> " Please use ALTER VIEW instead."),
+          context = ExpectedContext(fragment = oldViewName, start = 12, stop = 
16)
+        )
+
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE $oldViewName PARTITION (a='1') RENAME TO 
PARTITION (a='100')")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1013",
+          parameters = Map(
+            "nameParts" -> s"$SESSION_CATALOG_NAME.default.$oldViewName",
+            "viewStr" -> "view",
+            "cmd" -> "ALTER TABLE ... RENAME TO PARTITION",
+            "hintStr" -> " Please use ALTER VIEW instead."),
+          context = ExpectedContext(fragment = oldViewName, start = 12, stop = 
16)
+        )
+
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE $oldViewName ADD IF NOT EXISTS PARTITION (a='4', 
b='8')")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1013",
+          parameters = Map(
+            "nameParts" -> s"$SESSION_CATALOG_NAME.default.$oldViewName",
+            "viewStr" -> "view",
+            "cmd" -> "ALTER TABLE ... ADD PARTITION ...",
+            "hintStr" -> " Please use ALTER VIEW instead."),
+          context = ExpectedContext(fragment = oldViewName, start = 12, stop = 
16)
+        )
+
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE $oldViewName DROP IF EXISTS PARTITION (a='2')")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1013",
+          parameters = Map(
+            "nameParts" -> s"$SESSION_CATALOG_NAME.default.$oldViewName",
+            "viewStr" -> "view",
+            "cmd" -> "ALTER TABLE ... DROP PARTITION ...",
+            "hintStr" -> " Please use ALTER VIEW instead."),
+          context = ExpectedContext(fragment = oldViewName, start = 12, stop = 
16)
+        )
 
         assert(catalog.tableExists(TableIdentifier(tabName)))
         assert(catalog.tableExists(TableIdentifier(oldViewName)))
@@ -1065,7 +1191,7 @@ class HiveDDLSuite
           "operation" -> "DROP VIEW",
           "foundType" -> "MANAGED",
           "requiredType" -> "VIEW",
-          "objectName" -> "spark_catalog.default.tab1"
+          "objectName" -> s"$SESSION_CATALOG_NAME.default.tab1"
         )
       )
     }
@@ -1095,11 +1221,16 @@ class HiveDDLSuite
     withTable("tab1") {
       spark.range(10).write.saveAsTable("tab1")
       withView("view1") {
-        val e = intercept[AnalysisException] {
-          sql("CREATE VIEW view1 (col1, col3) AS SELECT * FROM tab1")
-        }.getMessage
-        assert(e.contains("the SELECT clause (num: `1`) does not match")
-          && e.contains("CREATE VIEW (num: `2`)"))
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql("CREATE VIEW view1 (col1, col3) AS SELECT * FROM tab1")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1277",
+          parameters = Map(
+            "analyzedPlanLength" -> "1",
+            "userSpecifiedColumnsLength" -> "2"
+          )
+        )
       }
     }
   }
@@ -1272,15 +1403,26 @@ class HiveDDLSuite
   test("drop default database") {
     Seq("true", "false").foreach { caseSensitive =>
       withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive) {
-        assertAnalysisError(
-          "DROP DATABASE default",
-          "Can not drop default database")
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql("DROP DATABASE default")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1067",
+          parameters = Map.empty
+        )
 
         // SQLConf.CASE_SENSITIVE does not affect the result
         // because the Hive metastore is not case sensitive.
-        assertAnalysisError(
-          "DROP DATABASE DeFault",
-          "Can not drop default database")
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql("DROP DATABASE DeFault")
+          },
+          errorClass = caseSensitive match {
+            case "false" => "_LEGACY_ERROR_TEMP_1067"
+            case _ => null
+          },
+          parameters = Map.empty
+        )
       }
     }
   }
@@ -1662,17 +1804,30 @@ class HiveDDLSuite
             Row(tabName, "spark_catalog", Array("default"), null, "MANAGED", 
false) :: Nil)
         assert(spark.catalog.getTable("default", indexTabName).name === 
indexTabName)
 
-        intercept[TableAlreadyExistsException] {
-          sql(s"CREATE TABLE $indexTabName(b int) USING hive")
-        }
-        intercept[TableAlreadyExistsException] {
-          sql(s"ALTER TABLE $tabName RENAME TO $indexTabName")
-        }
+        checkError(
+          exception = intercept[TableAlreadyExistsException] {
+            sql(s"CREATE TABLE $indexTabName(b int) USING hive")
+          },
+          errorClass = "TABLE_OR_VIEW_ALREADY_EXISTS",
+          parameters = Map("relationName" -> s"`default`.`$indexTabName`")
+        )
+
+        checkError(
+          exception = intercept[TableAlreadyExistsException] {
+            sql(s"ALTER TABLE $tabName RENAME TO $indexTabName")
+          },
+          errorClass = "TABLE_OR_VIEW_ALREADY_EXISTS",
+          parameters = Map("relationName" -> s"`default`.`$indexTabName`")
+        )
 
         // When tableExists is not invoked, we still can get an 
AnalysisException
-        assertAnalysisError(
-          s"DESCRIBE $indexTabName",
-          "Hive index table is not supported.")
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"DESCRIBE $indexTabName")
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_1220",
+          parameters = Map("tableType" -> "index table")
+        )
       } finally {
         client.runSqlHive(s"DROP INDEX IF EXISTS $indexName ON $tabName")
       }
@@ -1742,19 +1897,30 @@ class HiveDDLSuite
       sql("CREATE TABLE tbl(a INT) STORED AS parquet")
 
       Seq(DATASOURCE_PREFIX, STATISTICS_PREFIX).foreach { forbiddenPrefix =>
-        assertAnalysisError(
-          s"ALTER TABLE tbl SET TBLPROPERTIES ('${forbiddenPrefix}foo' = 
'loser')",
-          s"${forbiddenPrefix}foo")
-
-        assertAnalysisError(
-          s"ALTER TABLE tbl UNSET TBLPROPERTIES ('${forbiddenPrefix}foo')",
-          s"${(forbiddenPrefix.split(".") :+ "foo")
-            .map(part => s"`$part`")
-            .mkString(".")}")
-
-        assertAnalysisError(
-          s"CREATE TABLE tbl2 (a INT) TBLPROPERTIES 
('${forbiddenPrefix}foo'='anything')",
-          s"${forbiddenPrefix}foo")
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE tbl SET TBLPROPERTIES ('${forbiddenPrefix}foo' = 
'loser')")
+          },
+          errorClass = null,
+          parameters = Map.empty
+        )
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"ALTER TABLE tbl UNSET TBLPROPERTIES 
('${forbiddenPrefix}foo')")
+          },
+          errorClass = "UNSET_NONEXISTENT_PROPERTIES",
+          parameters = Map(
+            "properties" -> (s"${(forbiddenPrefix.split("\\.") :+ "foo").
+              map(part => s"`$part`").mkString(".")}"),
+            "table" -> "`spark_catalog`.`default`.`tbl`")
+        )
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(s"CREATE TABLE tbl2 (a INT) TBLPROPERTIES 
('${forbiddenPrefix}foo'='anything')")
+          },
+          errorClass = null,
+          parameters = Map.empty
+        )
       }
     }
   }
@@ -2248,9 +2414,13 @@ class HiveDDLSuite
             assert(loc.listFiles().length >= 1)
             checkAnswer(spark.table("t"), Row("1") :: Nil)
           } else {
-            assertAnalysisError(
-              "INSERT INTO TABLE t SELECT 1",
-              "java.net.URISyntaxException: Relative path in absolute URI: 
a:b")
+            checkError(
+               exception = intercept[AnalysisException] {
+                sql("INSERT INTO TABLE t SELECT 1")
+              },
+              errorClass = null,
+              parameters = Map.empty
+            )
           }
         }
 
@@ -2289,13 +2459,21 @@ class HiveDDLSuite
                 Row("1", "2") :: Row("1", "2017-03-03 12:13%3A14") :: Nil)
             }
           } else {
-            assertAnalysisError(
-              "INSERT INTO TABLE t1 PARTITION(b=2) SELECT 1",
-              "java.net.URISyntaxException: Relative path in absolute URI: 
a:b")
+            checkError(
+               exception = intercept[AnalysisException] {
+                sql("INSERT INTO TABLE t1 PARTITION(b=2) SELECT 1")
+              },
+              errorClass = null,
+              parameters = Map.empty
+            )
 
-            assertAnalysisError(
-              "INSERT INTO TABLE t1 PARTITION(b='2017-03-03 12:13%3A14') 
SELECT 1",
-              "java.net.URISyntaxException: Relative path in absolute URI: 
a:b")
+            checkError(
+               exception = intercept[AnalysisException] {
+                sql("INSERT INTO TABLE t1 PARTITION(b='2017-03-03 12:13%3A14') 
SELECT 1")
+              },
+              errorClass = null,
+              parameters = Map.empty
+            )
           }
         }
       }
@@ -2393,15 +2571,23 @@ class HiveDDLSuite
           } else {
             // hive catalog will still complains that c1 is duplicate column 
name because hive
             // identifiers are case insensitive.
-            assertAnalysisError(
-              "ALTER TABLE tab ADD COLUMNS (C2 string)",
-              "HiveException")
+            checkError(
+              exception = intercept[AnalysisException] {
+                sql("ALTER TABLE tab ADD COLUMNS (C2 string)")
+              },
+              errorClass = null,
+              parameters = Map.empty
+            )
 
             // hive catalog will still complains that c1 is duplicate column 
name because hive
             // identifiers are case insensitive.
-            assertAnalysisError(
-              "ALTER TABLE tab ADD COLUMNS (C1 string)",
-              "HiveException")
+            checkError(
+              exception = intercept[AnalysisException] {
+                sql("ALTER TABLE tab ADD COLUMNS (C1 string)")
+              },
+              errorClass = null,
+              parameters = Map.empty
+            )
           }
         }
       }
@@ -2411,13 +2597,24 @@ class HiveDDLSuite
   test("SPARK-36241: support creating tables with void datatype") {
     // CTAS with void type
     withTable("t1", "t2", "t3") {
-      assertAnalysisError(
-        "CREATE TABLE t1 USING PARQUET AS SELECT NULL AS null_col",
-        "Column `null_col` has a data type of void, which is not supported by 
Parquet.")
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("CREATE TABLE t1 USING PARQUET AS SELECT NULL AS null_col")
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_1150",
+        parameters = Map(
+          "field" -> "null_col",
+          "fieldType" -> "void",
+          "format" -> "Parquet")
+      )
 
-      assertAnalysisError(
-        "CREATE TABLE t2 STORED AS PARQUET AS SELECT null as null_col",
-        "Unknown field type: void")
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("CREATE TABLE t2 STORED AS PARQUET AS SELECT null as null_col")
+        },
+        errorClass = null,
+        parameters = Map.empty
+      )
 
       sql("CREATE TABLE t3 AS SELECT NULL AS null_col")
       checkAnswer(sql("SELECT * FROM t3"), Row(null))
@@ -2425,13 +2622,23 @@ class HiveDDLSuite
 
     // Create table with void type
     withTable("t1", "t2", "t3", "t4") {
-      assertAnalysisError(
-        "CREATE TABLE t1 (v VOID) USING PARQUET",
-        "Column `v` has a data type of void, which is not supported by 
Parquet.")
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("CREATE TABLE t1 (v VOID) USING PARQUET")
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_1150",
+        parameters = Map(
+          "field" -> "v",
+          "fieldType" -> "void",
+          "format" -> "Parquet"))
 
-      assertAnalysisError(
-        "CREATE TABLE t2 (v VOID) STORED AS PARQUET",
-        "Unknown field type: void")
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("CREATE TABLE t2 (v VOID) STORED AS PARQUET")
+        },
+        errorClass = null,
+        parameters = Map.empty
+      )
 
       sql("CREATE TABLE t3 (v VOID) USING hive")
       checkAnswer(sql("SELECT * FROM t3"), Seq.empty)
@@ -2632,9 +2839,13 @@ class HiveDDLSuite
   test("load command for non local invalid path validation") {
     withTable("tbl") {
       sql("CREATE TABLE tbl(i INT, j STRING) USING hive")
-      assertAnalysisError(
-        "load data inpath '/doesnotexist.csv' into table tbl",
-        "LOAD DATA input path does not exist")
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("load data inpath '/doesnotexist.csv' into table tbl")
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_1265",
+        parameters = Map("path" -> "/doesnotexist.csv")
+      )
     }
   }
 
@@ -2669,14 +2880,17 @@ class HiveDDLSuite
 
     Seq("nested,column", "nested:column", "nested;column").foreach { 
nestedColumnName =>
       withTable("t") {
-        val e = intercept[AnalysisException] {
-          spark.range(1)
-            .select(struct(lit(0).as(nestedColumnName)).as("toplevel"))
-            .write
-            .format("hive")
-            .saveAsTable("t")
-        }.getMessage
-        assert(e.contains(expectedMsg))
+        checkError(
+          exception = intercept[AnalysisException] {
+            spark.range(1)
+              .select(struct(lit(0).as(nestedColumnName)).as("toplevel"))
+              .write
+              .format("hive")
+              .saveAsTable("t")
+          },
+          errorClass = null,
+          parameters = Map.empty
+        )
       }
     }
   }
@@ -2807,39 +3021,66 @@ class HiveDDLSuite
       sql("CREATE TABLE sourceDsTable(a INT, b INT) USING PARQUET")
 
       // row format doesn't work in create targetDsTable
-      assertAnalysisError(
-        """
-          |CREATE TABLE targetDsTable LIKE sourceHiveTable USING PARQUET
-          |ROW FORMAT SERDE 
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-        """.stripMargin,
-        "Operation not allowed: CREATE TABLE LIKE ... USING ... ROW FORMAT 
SERDE")
+      val sql1 =
+        """CREATE TABLE targetDsTable LIKE sourceHiveTable USING PARQUET
+          |ROW FORMAT SERDE 
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'""".stripMargin
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql(sql1)
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_0035",
+        parameters = Map(
+          "message" -> ("CREATE TABLE LIKE ... USING ... ROW FORMAT SERDE " +
+            "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe")),
+        context = ExpectedContext(fragment = sql1, start = 0, stop = 130)
+      )
 
       // row format doesn't work with provider hive
-      assertAnalysisError(
-        """
-          |CREATE TABLE targetHiveTable LIKE sourceHiveTable USING hive
+      val sql2 =
+        """CREATE TABLE targetHiveTable LIKE sourceHiveTable USING hive
           |ROW FORMAT SERDE 
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-          |WITH SERDEPROPERTIES ('test' = 'test')
-        """.stripMargin,
-        "Operation not allowed: CREATE TABLE LIKE ... USING ... ROW FORMAT 
SERDE")
+          |WITH SERDEPROPERTIES ('test' = 'test')""".stripMargin
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql(sql2)
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_0035",
+        parameters = Map(
+          "message" -> ("CREATE TABLE LIKE ... USING ... ROW FORMAT SERDE " +
+            "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe")),
+        context = ExpectedContext(fragment = sql2, start = 0, stop = 168)
+      )
 
       // row format doesn't work without 'STORED AS'
-      assertAnalysisError(
-        """
-          |CREATE TABLE targetDsTable LIKE sourceDsTable
+      val sql3 =
+        """CREATE TABLE targetDsTable LIKE sourceDsTable
           |ROW FORMAT SERDE 
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-          |WITH SERDEPROPERTIES ('test' = 'test')
-        """.stripMargin,
-        "'ROW FORMAT' must be used with 'STORED AS'")
+          |WITH SERDEPROPERTIES ('test' = 'test')""".stripMargin
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql(sql3)
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_0047",
+        parameters = Map.empty,
+        context = ExpectedContext(fragment = sql3, start = 0, stop = 153)
+      )
 
       // 'INPUTFORMAT' and 'OUTPUTFORMAT' conflict with 'USING'
-      assertAnalysisError(
-        """
-          |CREATE TABLE targetDsTable LIKE sourceDsTable USING format
+      val sql4 =
+        """CREATE TABLE targetDsTable LIKE sourceDsTable USING format
           |STORED AS INPUTFORMAT 'inFormat' OUTPUTFORMAT 'outFormat'
-          |ROW FORMAT SERDE 
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-        """.stripMargin,
-        "Operation not allowed: CREATE TABLE LIKE ... USING ... STORED AS")
+          |ROW FORMAT SERDE 
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'""".stripMargin
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql(sql4)
+        },
+        errorClass = "_LEGACY_ERROR_TEMP_0035",
+        parameters = Map(
+          "message" -> ("CREATE TABLE LIKE ... USING ... STORED AS " +
+            "INPUTFORMAT inFormat OUTPUTFORMAT outFormat ROW FORMAT " +
+            "SERDE org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe")),
+        context = ExpectedContext(fragment = sql4, start = 0, stop = 185)
+      )
     }
   }
 
@@ -2899,13 +3140,23 @@ class HiveDDLSuite
         // negative case
         hiveFormats.filterNot(allowSerdeFileFormats.contains(_)).foreach { 
format =>
           withTable("targetTable") {
-            assertAnalysisError(
-              s"""
-                 |CREATE TABLE targetTable LIKE $sourceTable
+            val sql1 =
+              s"""CREATE TABLE targetTable LIKE $sourceTable
                  |ROW FORMAT SERDE 
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-                 |STORED AS $format
-              """.stripMargin,
-              s"ROW FORMAT SERDE is incompatible with format 
'${format.toLowerCase(Locale.ROOT)}'")
+                 |STORED AS $format""".stripMargin
+            checkError(
+              exception = intercept[AnalysisException] {
+                sql(sql1)
+              },
+              errorClass = "_LEGACY_ERROR_TEMP_0035",
+              parameters = Map(
+                "message" -> (s"ROW FORMAT SERDE is incompatible with format " 
+
+                  s"'${format.toLowerCase(Locale.ROOT)}', which also specifies 
a serde")),
+              context = ExpectedContext(
+                fragment = sql1,
+                start = 0,
+                stop = 110 + sourceTable.length + format.length)
+            )
           }
         }
       }
@@ -2928,13 +3179,23 @@ class HiveDDLSuite
           assert(table.storage.serde === Some(expectedSerde.get.serde.get))
 
           // negative case
-          assertAnalysisError(
-            s"""
-               |CREATE TABLE targetTable LIKE $sourceTable
+          val sql1 =
+            s"""CREATE TABLE targetTable LIKE $sourceTable
                |ROW FORMAT DELIMITED
-               |STORED AS PARQUET
-            """.stripMargin,
-            "ROW FORMAT DELIMITED is only compatible with 'textfile'")
+               |STORED AS PARQUET""".stripMargin
+          checkError(
+            exception = intercept[AnalysisException] {
+              sql(sql1)
+            },
+            errorClass = "_LEGACY_ERROR_TEMP_0035",
+            parameters = Map(
+              "message" -> ("ROW FORMAT DELIMITED is only compatible " +
+                "with 'textfile', not 'parquet'")),
+            context = ExpectedContext(
+              fragment = sql1,
+              start = 0,
+              stop = 68 + sourceTable.length)
+          )
         }
       }
 
@@ -3046,11 +3307,13 @@ class HiveDDLSuite
            |""".stripMargin,
         s"CREATE TABLE $tbl (dt INTERVAL HOUR TO MINUTE)"
       ).foreach { sqlCmd =>
-        val errMsg = intercept[UnsupportedOperationException] {
-          sql(sqlCmd)
-        }.getMessage
-        assert(errMsg.contains(s"Hive table 
`$SESSION_CATALOG_NAME`.`default`.`$tbl` with " +
-          "ANSI intervals is not supported"))
+        checkError(
+          exception = intercept[SparkUnsupportedOperationException] {
+            sql(sqlCmd)
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_2276",
+          parameters = Map("tableName" -> 
s"`$SESSION_CATALOG_NAME`.`default`.`$tbl`")
+        )
       }
     }
   }
@@ -3095,9 +3358,13 @@ class HiveDDLSuite
   }
 
   test("SPARK-38216: Fail early if all the columns are partitioned columns") {
-    assertAnalysisError(
-      "CREATE TABLE tab (c1 int) PARTITIONED BY (c1) STORED AS PARQUET",
-      "Cannot use all columns for partition columns")
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql("CREATE TABLE tab (c1 int) PARTITIONED BY (c1) STORED AS PARQUET")
+      },
+      errorClass = null,
+      parameters = Map.empty
+    )
   }
 
   test("SPARK-43359: Delete table not allowed") {
@@ -3110,7 +3377,7 @@ class HiveDDLSuite
       checkError(e,
         errorClass = "UNSUPPORTED_FEATURE.TABLE_OPERATION",
         parameters = Map(
-          "tableName" -> s"`spark_catalog`.`default`.`$tbl`",
+          "tableName" -> s"`$SESSION_CATALOG_NAME`.`default`.`$tbl`",
           "operation" -> "DELETE")
       )
     }
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/Hive_2_1_DDLSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/Hive_2_1_DDLSuite.scala
index cdef3c01087..60a02461c93 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/Hive_2_1_DDLSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/Hive_2_1_DDLSuite.scala
@@ -100,13 +100,16 @@ class Hive_2_1_DDLSuite extends SparkFunSuite with 
TestHiveSingleton {
   }
 
   test("SPARK-21617: ALTER TABLE with incompatible schema on Hive-compatible 
table") {
-    val exception = intercept[AnalysisException] {
-      testAlterTable(
-        "t1",
-        "CREATE TABLE t1 (c1 string) USING parquet",
-        StructType(Array(StructField("c2", IntegerType))))
-    }
-    assert(exception.getMessage().contains("types incompatible with the 
existing columns"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        testAlterTable(
+          "t1",
+          "CREATE TABLE t1 (c1 string) USING parquet",
+          StructType(Array(StructField("c2", IntegerType))))
+      },
+      errorClass = null,
+      parameters = Map.empty
+    )
   }
 
   private def testAlterTable(


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to