This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 0c6ea478d6b [SPARK-43862][SQL] Assign a name to the error class 
_LEGACY_ERROR_TEMP_(1254 & 1315)
0c6ea478d6b is described below

commit 0c6ea478d6b448caab5c969be122159acef2bbeb
Author: panbingkun <pbk1...@gmail.com>
AuthorDate: Tue May 30 14:18:26 2023 +0300

    [SPARK-43862][SQL] Assign a name to the error class 
_LEGACY_ERROR_TEMP_(1254 & 1315)
    
    ### What changes were proposed in this pull request?
    The pr aims to
    1. Assign a name to the error class, include:
      - _LEGACY_ERROR_TEMP_1254 -> UNSUPPORTED_OVERWRITE.PATH
      - _LEGACY_ERROR_TEMP_1315 -> UNSUPPORTED_OVERWRITE.TABLE
    
    2. Convert _LEGACY_ERROR_TEMP_0002 to INTERNAL_ERROR.
    
    ### Why are the changes needed?
    - The changes improve the error framework.
    - Because the subclass `SparkSqlAstBuilder` of `AstBuilder` has already 
override methods `visitInsertOverwriteDir` and `visitInsertOverwriteHiveDir`. 
In reality, `SparkSqlParser` is used in the Spark base code , and 
`SparkSqlAstBuilder` is used, The two exceptions mentioned above in AstBuilder 
will not be thrown through the user's perspective.
    
https://github.com/apache/spark/blob/88f69d6f92860823b1a90bc162ebca2b7c8132fc/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala#L46-L47
    
    - visitInsertOverwriteDir
    
https://github.com/apache/spark/blob/88f69d6f92860823b1a90bc162ebca2b7c8132fc/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala#L802-L834
    
    - visitInsertOverwriteHiveDir
    
https://github.com/apache/spark/blob/88f69d6f92860823b1a90bc162ebca2b7c8132fc/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala#L848-L866
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    - Manual testing:
    $ build/sbt "test:testOnly *DDLParserSuite"
    $ build/sbt "test:testOnly *InsertSuite"
    $ build/sbt "test:testOnly *MetastoreDataSourcesSuite"
    $ build/sbt "test:testOnly *HiveDDLSuite"
    
    - Pass GA.
    
    Closes #41367 from panbingkun/LEGACY_ERROR_TEMP_1254.
    
    Authored-by: panbingkun <pbk1...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   | 32 +++++-----
 .../spark/sql/catalyst/parser/AstBuilder.scala     |  4 +-
 .../spark/sql/errors/QueryCompilationErrors.scala  | 18 +++---
 .../spark/sql/errors/QueryParsingErrors.scala      |  5 +-
 .../spark/sql/catalyst/analysis/AnalysisTest.scala |  5 ++
 .../spark/sql/catalyst/parser/DDLParserSuite.scala | 20 +++++++
 .../org/apache/spark/sql/DataFrameWriter.scala     |  6 +-
 .../apache/spark/sql/execution/command/ddl.scala   | 12 +++-
 .../execution/datasources/DataSourceStrategy.scala |  2 +-
 .../org/apache/spark/sql/sources/InsertSuite.scala | 70 ++++++++++++++++------
 .../spark/sql/hive/MetastoreDataSourcesSuite.scala | 35 ++++++-----
 .../spark/sql/hive/execution/HiveDDLSuite.scala    | 11 ++--
 12 files changed, 149 insertions(+), 71 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 07ff6e1c7c2..8c3ba1e190d 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -2320,6 +2320,23 @@
       "grouping()/grouping_id() can only be used with 
GroupingSets/Cube/Rollup."
     ]
   },
+  "UNSUPPORTED_OVERWRITE" : {
+    "message" : [
+      "Can't overwrite the target that is also being read from."
+    ],
+    "subClass" : {
+      "PATH" : {
+        "message" : [
+          "The target path is <path>."
+        ]
+      },
+      "TABLE" : {
+        "message" : [
+          "The target table is <table>."
+        ]
+      }
+    }
+  },
   "UNSUPPORTED_SAVE_MODE" : {
     "message" : [
       "The save mode <saveMode> is not supported for:"
@@ -2477,11 +2494,6 @@
       "Invalid InsertIntoContext."
     ]
   },
-  "_LEGACY_ERROR_TEMP_0002" : {
-    "message" : [
-      "INSERT OVERWRITE DIRECTORY is not supported."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_0004" : {
     "message" : [
       "Empty source for merge: you should specify a source table/subquery in 
merge."
@@ -3669,11 +3681,6 @@
       "Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1254" : {
-    "message" : [
-      "Cannot overwrite a path that is also being read from."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1255" : {
     "message" : [
       "Cannot drop built-in function '<functionName>'."
@@ -3921,11 +3928,6 @@
       "'<operation>' does not support bucketBy and sortBy right now."
     ]
   },
-  "_LEGACY_ERROR_TEMP_1315" : {
-    "message" : [
-      "Cannot overwrite table <tableName> that is also being read from."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_1316" : {
     "message" : [
       "Invalid partition transformation: <expr>."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index a2ada0ef5fc..957c8130ede 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -370,7 +370,7 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] 
with SQLConfHelper wit
    */
   override def visitInsertOverwriteDir(
       ctx: InsertOverwriteDirContext): InsertDirParams = withOrigin(ctx) {
-    throw QueryParsingErrors.insertOverwriteDirectoryUnsupportedError(ctx)
+    throw QueryParsingErrors.insertOverwriteDirectoryUnsupportedError()
   }
 
   /**
@@ -378,7 +378,7 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] 
with SQLConfHelper wit
    */
   override def visitInsertOverwriteHiveDir(
       ctx: InsertOverwriteHiveDirContext): InsertDirParams = withOrigin(ctx) {
-    throw QueryParsingErrors.insertOverwriteDirectoryUnsupportedError(ctx)
+    throw QueryParsingErrors.insertOverwriteDirectoryUnsupportedError()
   }
 
   private def getTableAliasWithoutColumnAlias(
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 1d80ca22550..f4ca9147f91 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -2497,10 +2497,16 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
       messageParameters = Map.empty)
   }
 
-  def cannotOverwritePathBeingReadFromError(): Throwable = {
+  def cannotOverwritePathBeingReadFromError(path: String): Throwable = {
     new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1254",
-      messageParameters = Map.empty)
+      errorClass = "UNSUPPORTED_OVERWRITE.PATH",
+      messageParameters = Map("path" -> path))
+  }
+
+  def cannotOverwriteTableThatIsBeingReadFromError(tableIdent: 
TableIdentifier): Throwable = {
+    new AnalysisException(
+      errorClass = "UNSUPPORTED_OVERWRITE.TABLE",
+      messageParameters = Map("table" -> toSQLId(tableIdent.nameParts)))
   }
 
   def cannotDropBuiltinFuncError(functionName: String): Throwable = {
@@ -2956,12 +2962,6 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase {
     new TableAlreadyExistsException(tableIdent.nameParts)
   }
 
-  def cannotOverwriteTableThatIsBeingReadFromError(tableName: String): 
Throwable = {
-    new AnalysisException(
-      errorClass = "_LEGACY_ERROR_TEMP_1315",
-      messageParameters = Map("tableName" -> tableName))
-  }
-
   def invalidPartitionTransformationError(expr: Expression): Throwable = {
     new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1316",
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
index 7414ce2cf51..07445f932b5 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
@@ -21,6 +21,7 @@ import java.util.Locale
 
 import org.antlr.v4.runtime.ParserRuleContext
 
+import org.apache.spark.SparkException
 import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
 import org.apache.spark.sql.catalyst.trees.Origin
@@ -36,8 +37,8 @@ private[sql] object QueryParsingErrors extends 
QueryErrorsBase {
     new ParseException(errorClass = "_LEGACY_ERROR_TEMP_0001", ctx)
   }
 
-  def insertOverwriteDirectoryUnsupportedError(ctx: InsertIntoContext): 
Throwable = {
-    new ParseException(errorClass = "_LEGACY_ERROR_TEMP_0002", ctx)
+  def insertOverwriteDirectoryUnsupportedError(): Throwable = {
+    SparkException.internalError("INSERT OVERWRITE DIRECTORY is not 
supported.")
   }
 
   def columnAliasInOperationNotAllowedError(op: String, ctx: 
TableAliasContext): Throwable = {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
index 5e7395d905d..997308c6ef4 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisTest.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql.catalyst.analysis
 import java.net.URI
 import java.util.Locale
 
+import org.apache.spark.SparkException
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.{QueryPlanningTracker, TableIdentifier}
 import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, 
CatalogStorageFormat, CatalogTable, CatalogTableType, InMemoryCatalog, 
SessionCatalog, TemporaryViewRelation}
@@ -207,4 +208,8 @@ trait AnalysisTest extends PlanTest {
   protected def parseException(parser: String => Any)(sqlText: String): 
ParseException = {
     intercept[ParseException](parser(sqlText))
   }
+
+  protected def internalException(parser: String => Any)(sqlText: String): 
SparkException = {
+    intercept[SparkException](parser(sqlText))
+  }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
index c8759bf09d6..08700a84851 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
@@ -42,6 +42,10 @@ class DDLParserSuite extends AnalysisTest {
     comparePlans(parsePlan(sql), expected, checkAnalysis = false)
   }
 
+  private def internalException(sqlText: String): SparkThrowable = {
+    super.internalException(parsePlan)(sqlText)
+  }
+
   test("create/replace table using - schema") {
     val createSql = "CREATE TABLE my_tab(a INT COMMENT 'test', b STRING NOT 
NULL) USING parquet"
     val replaceSql = "REPLACE TABLE my_tab(a INT COMMENT 'test', b STRING NOT 
NULL) USING parquet"
@@ -2845,4 +2849,20 @@ class DDLParserSuite extends AnalysisTest {
         ExpectedContext(fragment = "b STRING FIRST COMMENT \"abc\" AFTER y", 
start = 30, stop = 65)
     )
   }
+
+  test("AstBuilder don't support `INSERT OVERWRITE DIRECTORY`") {
+    val insertDirSql =
+      s"""
+         | INSERT OVERWRITE LOCAL DIRECTORY
+         | USING parquet
+         | OPTIONS (
+         |  path 'xxx'
+         | )
+         | SELECT i from t1""".stripMargin
+
+    checkError(
+      exception = internalException(insertDirSql),
+      errorClass = "INTERNAL_ERROR",
+      parameters = Map("message" -> "INSERT OVERWRITE DIRECTORY is not 
supported."))
+  }
 }
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
index 3f97533199a..da93fdf58e9 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriter.scala
@@ -658,11 +658,13 @@ final class DataFrameWriter[T] private[sql](ds: 
Dataset[T]) {
         EliminateSubqueryAliases(tableRelation) match {
           // check if the table is a data source table (the relation is a 
BaseRelation).
           case LogicalRelation(dest: BaseRelation, _, _, _) if 
srcRelations.contains(dest) =>
-            throw 
QueryCompilationErrors.cannotOverwriteTableThatIsBeingReadFromError(tableName)
+            throw 
QueryCompilationErrors.cannotOverwriteTableThatIsBeingReadFromError(
+              qualifiedIdent)
           // check hive table relation when overwrite mode
           case relation: HiveTableRelation
               if srcRelations.contains(relation.tableMeta.identifier) =>
-            throw 
QueryCompilationErrors.cannotOverwriteTableThatIsBeingReadFromError(tableName)
+            throw 
QueryCompilationErrors.cannotOverwriteTableThatIsBeingReadFromError(
+              qualifiedIdent)
           case _ => // OK
         }
 
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
index b2f5c66a35a..b4c98108efa 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
@@ -1030,14 +1030,22 @@ object DDLUtils extends Logging {
   /**
    * Throws exception if outputPath tries to overwrite inputpath.
    */
-  def verifyNotReadPath(query: LogicalPlan, outputPath: Path) : Unit = {
+  def verifyNotReadPath(
+      query: LogicalPlan,
+      outputPath: Path,
+      table: Option[CatalogTable] = None) : Unit = {
     val inputPaths = query.collect {
       case LogicalRelation(r: HadoopFsRelation, _, _, _) =>
         r.location.rootPaths
     }.flatten
 
     if (inputPaths.contains(outputPath)) {
-      throw QueryCompilationErrors.cannotOverwritePathBeingReadFromError()
+      table match {
+        case Some(v) =>
+          throw 
QueryCompilationErrors.cannotOverwriteTableThatIsBeingReadFromError(v.identifier)
+        case _ =>
+          throw 
QueryCompilationErrors.cannotOverwritePathBeingReadFromError(outputPath.toString)
+      }
     }
   }
 }
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala
index dd79e9b26d4..818dc4eb31c 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala
@@ -224,7 +224,7 @@ object DataSourceAnalysis extends Rule[LogicalPlan] {
       // We write to staging directories and move to final partition 
directories after writing
       // job is done. So it is ok to have outputPath try to overwrite 
inputpath.
       if (overwrite && !insertCommand.dynamicPartitionOverwrite) {
-        DDLUtils.verifyNotReadPath(actualQuery, outputPath)
+        DDLUtils.verifyNotReadPath(actualQuery, outputPath, table)
       }
       insertCommand
   }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
index d8e0a05f262..1df860ef9c4 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
@@ -252,15 +252,12 @@ class InsertSuite extends DataSourceTest with 
SharedSparkSession {
   }
 
   test("it is not allowed to write to a table while querying it.") {
-    val message = intercept[AnalysisException] {
-      sql(
-        s"""
-        |INSERT OVERWRITE TABLE jsonTable SELECT a, b FROM jsonTable
-      """.stripMargin)
-    }.getMessage
-    assert(
-      message.contains("Cannot overwrite a path that is also being read 
from."),
-      "INSERT OVERWRITE to a table while querying it should not be allowed.")
+    checkErrorMatchPVals(
+      exception = intercept[AnalysisException] {
+        sql("INSERT OVERWRITE TABLE jsonTable SELECT a, b FROM jsonTable")
+      },
+      errorClass = "UNSUPPORTED_OVERWRITE.PATH",
+      parameters = Map("path" -> ".*"))
   }
 
   test("SPARK-30112: it is allowed to write to a table while querying it for " 
+
@@ -296,16 +293,16 @@ class InsertSuite extends DataSourceTest with 
SharedSparkSession {
             checkAnswer(spark.table("insertTable"),
               Row(2, 1, 1) :: Row(3, 1, 2) :: Row(4, 1, 3) :: Nil)
           } else {
-            val message = intercept[AnalysisException] {
-              sql(
-                """
-                  |INSERT OVERWRITE TABLE insertTable PARTITION(part1=1, part2)
-                  |SELECT i + 1, part2 FROM insertTable
-                """.stripMargin)
-            }.getMessage
-            assert(
-              message.contains("Cannot overwrite a path that is also being 
read from."),
-              "INSERT OVERWRITE to a table while querying it should not be 
allowed.")
+            checkError(
+              exception = intercept[AnalysisException] {
+                sql(
+                  """
+                    |INSERT OVERWRITE TABLE insertTable PARTITION(part1=1, 
part2)
+                    |SELECT i + 1, part2 FROM insertTable
+                  """.stripMargin)
+              },
+              errorClass = "UNSUPPORTED_OVERWRITE.TABLE",
+              parameters = Map("table" -> 
"`spark_catalog`.`default`.`inserttable`"))
           }
         }
       }
@@ -2375,6 +2372,41 @@ class InsertSuite extends DataSourceTest with 
SharedSparkSession {
       checkAnswer(spark.table("t2"), Row(-1))
     }
   }
+
+  test("UNSUPPORTED_OVERWRITE.TABLE: Can't overwrite a table that is also 
being read from") {
+    val tableName = "t1"
+    withTable(tableName) {
+      sql(s"CREATE TABLE $tableName (a STRING, b INT) USING parquet")
+      checkError(
+        exception = intercept[AnalysisException] {
+          
spark.table(tableName).write.mode(SaveMode.Overwrite).saveAsTable(tableName)
+        },
+        errorClass = "UNSUPPORTED_OVERWRITE.TABLE",
+        parameters = Map("table" -> s"`spark_catalog`.`default`.`$tableName`")
+      )
+    }
+  }
+
+  test("UNSUPPORTED_OVERWRITE.PATH: Can't overwrite a path that is also being 
read from") {
+    val tableName = "t1"
+    withTable(tableName) {
+      withTempDir { dir =>
+        val path = dir.getCanonicalPath
+        sql(s"CREATE TABLE $tableName(i int) USING parquet LOCATION '$path'")
+        val insertDirSql =
+          s"""
+             |INSERT OVERWRITE LOCAL DIRECTORY '$path'
+             |USING parquet
+             |SELECT i from $tableName""".stripMargin
+        checkError(
+          exception = intercept[AnalysisException] {
+            sql(insertDirSql)
+          },
+          errorClass = "UNSUPPORTED_OVERWRITE.PATH",
+          parameters = Map("path" -> ("file:" + path)))
+      }
+    }
+  }
 }
 
 class FileExistingTestFileSystem extends RawLocalFileSystem {
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index 706dfa82abb..6fd0d971a5e 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -1251,17 +1251,21 @@ class MetastoreDataSourcesSuite extends QueryTest
       checkAnswer(table(tableName),
         Seq(Row(1, 2), Row(1, 2)))
 
-      var e = intercept[AnalysisException] {
-        table(tableName).write.mode(SaveMode.Overwrite).saveAsTable(tableName)
-      }
-      assert(e.getMessage.contains(
-        s"Cannot overwrite table $SESSION_CATALOG_NAME.default.$tableName " +
-        "that is also being read from"))
+      checkError(
+        exception = intercept[AnalysisException] {
+          
table(tableName).write.mode(SaveMode.Overwrite).saveAsTable(tableName)
+        },
+        errorClass = "UNSUPPORTED_OVERWRITE.TABLE",
+        parameters = Map("table" -> 
s"`$SESSION_CATALOG_NAME`.`default`.`tab1`")
+      )
 
-      e = intercept[AnalysisException] {
-        
table(tableName).write.mode(SaveMode.ErrorIfExists).saveAsTable(tableName)
-      }
-      checkErrorTableAlreadyExists(e, 
s"`$SESSION_CATALOG_NAME`.`default`.`$tableName`")
+      checkError(
+        exception = intercept[AnalysisException] {
+          
table(tableName).write.mode(SaveMode.ErrorIfExists).saveAsTable(tableName)
+        },
+        errorClass = "TABLE_OR_VIEW_ALREADY_EXISTS",
+        parameters = Map("relationName" -> 
s"`$SESSION_CATALOG_NAME`.`default`.`tab1`")
+      )
     }
   }
 
@@ -1285,10 +1289,13 @@ class MetastoreDataSourcesSuite extends QueryTest
         table(tableName),
         Seq(Row(1, 2), Row(1, 2), Row(1, 2), Row(1, 2), Row(1, 2), Row(1, 2), 
Row(1, 2), Row(1, 2)))
 
-      val e = intercept[AnalysisException] {
-        table(tableName).write.mode(SaveMode.Overwrite).insertInto(tableName)
-      }.getMessage
-      assert(e.contains(s"Cannot overwrite a path that is also being read 
from"))
+      checkError(
+        exception = intercept[AnalysisException] {
+          table(tableName).write.mode(SaveMode.Overwrite).insertInto(tableName)
+        },
+        errorClass = "UNSUPPORTED_OVERWRITE.TABLE",
+        parameters = Map("table" -> 
s"`$SESSION_CATALOG_NAME`.`default`.`tab1`")
+      )
     }
   }
 
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index f64ffee7a66..6e4094bea8c 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -1855,11 +1855,12 @@ class HiveDDLSuite
       }
       assert(e2.message.contains("Creating bucketed Hive serde table is not 
supported yet"))
 
-      val e3 = intercept[AnalysisException] {
-        
spark.table("t").write.format("hive").mode("overwrite").saveAsTable("t")
-      }
-      assert(e3.message.contains(s"Cannot overwrite table 
$SESSION_CATALOG_NAME.default.t " +
-        "that is also being read from"))
+      checkError(
+        exception = intercept[AnalysisException] {
+          
spark.table("t").write.format("hive").mode("overwrite").saveAsTable("t")
+        },
+        errorClass = "UNSUPPORTED_OVERWRITE.TABLE",
+        parameters = Map("table" -> s"`$SESSION_CATALOG_NAME`.`default`.`t`"))
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to