This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new c773aed87d64 [SPARK-47189][SQL] Tweak column error names and text
c773aed87d64 is described below

commit c773aed87d649f24ca896ffc2a1b09534588e088
Author: Nicholas Chammas <nicholas.cham...@gmail.com>
AuthorDate: Tue Feb 27 18:04:00 2024 +0300

    [SPARK-47189][SQL] Tweak column error names and text
    
    ### What changes were proposed in this pull request?
    
    Tweak the names and text for a few errors so they read more naturally (and 
correctly).
    
    ### Why are the changes needed?
    
    Just minor English improvements.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, these are user-facing error messages.
    
    ### How was this patch tested?
    
    No testing apart from CI.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #45276 from nchammas/column-error-tweak.
    
    Authored-by: Nicholas Chammas <nicholas.cham...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 common/utils/src/main/resources/error/error-classes.json     |  6 +++---
 docs/sql-error-conditions.md                                 |  6 +++---
 .../org/apache/spark/sql/errors/QueryParsingErrors.scala     |  2 +-
 .../apache/spark/sql/catalyst/parser/DDLParserSuite.scala    | 12 ++++++------
 4 files changed, 13 insertions(+), 13 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-classes.json 
b/common/utils/src/main/resources/error/error-classes.json
index 17ef8e5fe469..57ee72c7637f 100644
--- a/common/utils/src/main/resources/error/error-classes.json
+++ b/common/utils/src/main/resources/error/error-classes.json
@@ -492,15 +492,15 @@
     },
     "sqlState" : "54000"
   },
-  "COLUMN_ALIASES_IS_NOT_ALLOWED" : {
+  "COLUMN_ALIASES_NOT_ALLOWED" : {
     "message" : [
-      "Columns aliases are not allowed in <op>."
+      "Column aliases are not allowed in <op>."
     ],
     "sqlState" : "42601"
   },
   "COLUMN_ALREADY_EXISTS" : {
     "message" : [
-      "The column <columnName> already exists. Consider to choose another name 
or rename the existing column."
+      "The column <columnName> already exists. Choose another name or rename 
the existing column."
     ],
     "sqlState" : "42711"
   },
diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md
index bb982a77fca0..f47a67b3d31f 100644
--- a/docs/sql-error-conditions.md
+++ b/docs/sql-error-conditions.md
@@ -400,17 +400,17 @@ Can't create array with `<numberOfElements>` elements 
which exceeding the array
 
 For more details see 
[COLLECTION_SIZE_LIMIT_EXCEEDED](sql-error-conditions-collection-size-limit-exceeded-error-class.html)
 
-### COLUMN_ALIASES_IS_NOT_ALLOWED
+### COLUMN_ALIASES_NOT_ALLOWED
 
 [SQLSTATE: 
42601](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
 
-Columns aliases are not allowed in `<op>`.
+Column aliases are not allowed in `<op>`.
 
 ### COLUMN_ALREADY_EXISTS
 
 [SQLSTATE: 
42711](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
 
-The column `<columnName>` already exists. Consider to choose another name or 
rename the existing column.
+The column `<columnName>` already exists. Choose another name or rename the 
existing column.
 
 ### COLUMN_NOT_DEFINED_IN_TABLE
 
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
index daab055608f5..37793d97ccc3 100644
--- 
a/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
+++ 
b/sql/api/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
@@ -42,7 +42,7 @@ private[sql] object QueryParsingErrors extends 
DataTypeErrorsBase {
 
   def columnAliasInOperationNotAllowedError(op: String, ctx: 
TableAliasContext): Throwable = {
     new ParseException(
-      errorClass = "COLUMN_ALIASES_IS_NOT_ALLOWED",
+      errorClass = "COLUMN_ALIASES_NOT_ALLOWED",
       messageParameters = Map("op" -> toSQLStmt(op)),
       ctx.identifierList())
   }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
index 7704469b2986..f80cad7dd200 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
@@ -1794,11 +1794,11 @@ class DDLParserSuite extends AnalysisTest {
         EqualTo(UnresolvedAttribute("t.a"), Literal(2))))
   }
 
-  test("delete from table: columns aliases is not allowed") {
+  test("delete from table: column aliases are not allowed") {
     val sql = "DELETE FROM testcat.ns1.ns2.tbl AS t(a,b,c,d) WHERE d = 2"
     checkError(
       exception = parseException(sql),
-      errorClass = "COLUMN_ALIASES_IS_NOT_ALLOWED",
+      errorClass = "COLUMN_ALIASES_NOT_ALLOWED",
       parameters = Map("op" -> "DELETE"),
       context = ExpectedContext(
         fragment = sql,
@@ -1833,14 +1833,14 @@ class DDLParserSuite extends AnalysisTest {
         Some(EqualTo(UnresolvedAttribute("t.c"), Literal(2)))))
   }
 
-  test("update table: columns aliases is not allowed") {
+  test("update table: column aliases are not allowed") {
     val sql =
       """UPDATE testcat.ns1.ns2.tbl AS t(a,b,c,d)
         |SET b='Robert', c=32
         |WHERE d=2""".stripMargin
     checkError(
       exception = parseException(sql),
-      errorClass = "COLUMN_ALIASES_IS_NOT_ALLOWED",
+      errorClass = "COLUMN_ALIASES_NOT_ALLOWED",
       parameters = Map("op" -> "UPDATE"),
       context = ExpectedContext(
         fragment = sql,
@@ -2027,7 +2027,7 @@ class DDLParserSuite extends AnalysisTest {
         Seq.empty))
   }
 
-  test("merge into table: columns aliases are not allowed") {
+  test("merge into table: column aliases are not allowed") {
     Seq("target(c1, c2)" -> "source", "target" -> "source(c1, c2)").foreach {
       case (targetAlias, sourceAlias) =>
         val sql = s"""MERGE INTO testcat1.ns1.ns2.tbl AS $targetAlias
@@ -2040,7 +2040,7 @@ class DDLParserSuite extends AnalysisTest {
           .stripMargin
         checkError(
           exception = parseException(sql),
-          errorClass = "COLUMN_ALIASES_IS_NOT_ALLOWED",
+          errorClass = "COLUMN_ALIASES_NOT_ALLOWED",
           parameters = Map("op" -> "MERGE"),
           context = ExpectedContext(
             fragment = sql,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to