This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 7e0679be8aa [SPARK-44338][SQL] Fix view schema mismatch error message
7e0679be8aa is described below

commit 7e0679be8aa1aea531417907da7c66d86b74302a
Author: Wenchen Fan <wenc...@databricks.com>
AuthorDate: Thu Jul 13 10:32:43 2023 -0700

    [SPARK-44338][SQL] Fix view schema mismatch error message
    
    ### What changes were proposed in this pull request?
    
    This fixes a minor issue that the view schema mismatch error message may 
have an extra space between view name and `AS SELECT`. It also takes the chance 
to simplify the view test a little bit, to always use `tableIdentifier` to 
produce the view name.
    
    ### Why are the changes needed?
    
    small fix of the error message
    
    ### Does this PR introduce _any_ user-facing change?
    
    no
    
    ### How was this patch tested?
    
    existing tests
    
    Closes #41898 from cloud-fan/view.
    
    Authored-by: Wenchen Fan <wenc...@databricks.com>
    Signed-off-by: Gengliang Wang <gengli...@apache.org>
---
 .../sql/catalyst/catalog/SessionCatalog.scala      |  6 +--
 .../columnresolution-negative.sql.out              |  2 +-
 .../results/columnresolution-negative.sql.out      |  2 +-
 .../spark/sql/execution/SQLViewTestSuite.scala     | 53 +++++++++-------------
 4 files changed, 27 insertions(+), 36 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 135dc17ad95..392c911ddb8 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -909,11 +909,11 @@ class SessionCatalog(
       val viewText = metadata.viewText.get
       val userSpecifiedColumns =
         if (metadata.schema.fieldNames.toSeq == metadata.viewQueryColumnNames) 
{
-          ""
+          " "
         } else {
-          s"(${metadata.schema.fieldNames.mkString(", ")})"
+          s" (${metadata.schema.fieldNames.mkString(", ")}) "
         }
-      Some(s"CREATE OR REPLACE VIEW $viewName $userSpecifiedColumns AS 
$viewText")
+      Some(s"CREATE OR REPLACE VIEW $viewName${userSpecifiedColumns}AS 
$viewText")
     }
   }
 
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/columnresolution-negative.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/columnresolution-negative.sql.out
index 95f3e53ff60..c80b9e82ab0 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/columnresolution-negative.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/columnresolution-negative.sql.out
@@ -417,7 +417,7 @@ org.apache.spark.sql.AnalysisException
     "actualCols" : "[]",
     "colName" : "i1",
     "expectedNum" : "1",
-    "suggestion" : "CREATE OR REPLACE VIEW spark_catalog.mydb1.v1  AS SELECT * 
FROM t1",
+    "suggestion" : "CREATE OR REPLACE VIEW spark_catalog.mydb1.v1 AS SELECT * 
FROM t1",
     "viewName" : "`spark_catalog`.`mydb1`.`v1`"
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out
index 4d700b0a142..b3e62faa3b3 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/columnresolution-negative.sql.out
@@ -460,7 +460,7 @@ org.apache.spark.sql.AnalysisException
     "actualCols" : "[]",
     "colName" : "i1",
     "expectedNum" : "1",
-    "suggestion" : "CREATE OR REPLACE VIEW spark_catalog.mydb1.v1  AS SELECT * 
FROM t1",
+    "suggestion" : "CREATE OR REPLACE VIEW spark_catalog.mydb1.v1 AS SELECT * 
FROM t1",
     "viewName" : "`spark_catalog`.`mydb1`.`v1`"
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
index 2956a6345bf..1bee93fa429 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
@@ -41,10 +41,7 @@ abstract class SQLViewTestSuite extends QueryTest with 
SQLTestUtils {
 
   protected def viewTypeString: String
   protected def formattedViewName(viewName: String): String
-  protected def fullyQualifiedViewName(viewName: String): String
   protected def tableIdentifier(viewName: String): TableIdentifier
-  protected def tableIdentQuotedString(viewName: String): String =
-    tableIdentifier(viewName).quotedString
 
   def createView(
       viewName: String,
@@ -185,10 +182,10 @@ abstract class SQLViewTestSuite extends QueryTest with 
SQLTestUtils {
           },
           errorClass = "RECURSIVE_VIEW",
           parameters = Map(
-            "viewIdent" -> tableIdentQuotedString("v1"),
-            "newPath" -> (s"${tableIdentQuotedString("v1")} " +
-              s"-> ${tableIdentQuotedString("v2")} " +
-              s"-> ${tableIdentQuotedString("v1")}"))
+            "viewIdent" -> tableIdentifier("v1").quotedString,
+            "newPath" -> (s"${tableIdentifier("v1").quotedString} " +
+              s"-> ${tableIdentifier("v2").quotedString} " +
+              s"-> ${tableIdentifier("v1").quotedString}"))
         )
       }
     }
@@ -206,10 +203,10 @@ abstract class SQLViewTestSuite extends QueryTest with 
SQLTestUtils {
           },
           errorClass = "RECURSIVE_VIEW",
           parameters = Map(
-            "viewIdent" -> tableIdentQuotedString("v1"),
-            "newPath" -> (s"${tableIdentQuotedString("v1")} " +
-              s"-> ${tableIdentQuotedString("v2")} " +
-              s"-> ${tableIdentQuotedString("v1")}"))
+            "viewIdent" -> tableIdentifier("v1").quotedString,
+            "newPath" -> (s"${tableIdentifier("v1").quotedString} " +
+              s"-> ${tableIdentifier("v2").quotedString} " +
+              s"-> ${tableIdentifier("v1").quotedString}"))
         )
       }
     }
@@ -230,10 +227,10 @@ abstract class SQLViewTestSuite extends QueryTest with 
SQLTestUtils {
           },
           errorClass = "_LEGACY_ERROR_TEMP_1009",
           parameters = Map(
-            "identifier" -> tableIdentQuotedString("view0"),
+            "identifier" -> tableIdentifier("view0").quotedString,
             "maxNestedViewDepth" -> "10",
             "config" -> s"${MAX_NESTED_VIEW_DEPTH.key}"),
-          context = ExpectedContext("VIEW", fullyQualifiedViewName("view1"),
+          context = ExpectedContext("VIEW", 
tableIdentifier("view1").unquotedString,
             14, 13 + formattedViewName("view0").length, 
formattedViewName("view0"))
         )
       }
@@ -315,7 +312,7 @@ abstract class SQLViewTestSuite extends QueryTest with 
SQLTestUtils {
             sql(s"SELECT * FROM $viewName").collect()
           }
           checkErrorTableNotFound(e, "`t`",
-            ExpectedContext("VIEW", fullyQualifiedViewName("testview"), 14, 
14, "t"))
+            ExpectedContext("VIEW", 
tableIdentifier("testview").unquotedString, 14, 14, "t"))
         }
       }
     }
@@ -364,7 +361,7 @@ abstract class SQLViewTestSuite extends QueryTest with 
SQLTestUtils {
           parameters = Map("fieldName" -> "`i`", "fields" -> "`j`"),
           context = ExpectedContext(
             fragment = "s.i",
-            objectName = fullyQualifiedViewName("v"),
+            objectName = tableIdentifier("v").unquotedString,
             objectType = "VIEW",
             startIndex = 7,
             stopIndex = 9))
@@ -524,7 +521,6 @@ abstract class TempViewTestSuite extends SQLViewTestSuite {
 class LocalTempViewTestSuite extends TempViewTestSuite with SharedSparkSession 
{
   override protected def viewTypeString: String = "TEMPORARY VIEW"
   override protected def formattedViewName(viewName: String): String = viewName
-  override protected def fullyQualifiedViewName(viewName: String): String = 
viewName
   override protected def tableIdentifier(viewName: String): TableIdentifier = {
     TableIdentifier(viewName)
   }
@@ -539,9 +535,6 @@ class GlobalTempViewTestSuite extends TempViewTestSuite 
with SharedSparkSession
   override protected def formattedViewName(viewName: String): String = {
     s"$db.$viewName"
   }
-  override protected def fullyQualifiedViewName(viewName: String): String = {
-    s"$db.$viewName"
-  }
   override protected def tableIdentifier(viewName: String): TableIdentifier = {
     TableIdentifier(viewName, Some(db))
   }
@@ -568,9 +561,6 @@ class PersistedViewTestSuite extends SQLViewTestSuite with 
SharedSparkSession {
   private def db: String = "default"
   override protected def viewTypeString: String = "VIEW"
   override protected def formattedViewName(viewName: String): String = 
s"$db.$viewName"
-  override protected def fullyQualifiedViewName(viewName: String): String = {
-    s"spark_catalog.$db.$viewName"
-  }
   override protected def tableIdentifier(viewName: String): TableIdentifier = {
     TableIdentifier(viewName, Some(db), Some(SESSION_CATALOG_NAME))
   }
@@ -582,7 +572,7 @@ class PersistedViewTestSuite extends SQLViewTestSuite with 
SharedSparkSession {
           sql("CREATE VIEW v AS SELECT count(*) FROM VALUES (1), (2), (3) 
t(a)")
         },
         errorClass = "CREATE_PERMANENT_VIEW_WITHOUT_ALIAS",
-        parameters = Map("name" -> "`spark_catalog`.`default`.`v`", "attr" -> 
"\"count(1)\"")
+        parameters = Map("name" -> tableIdentifier("v").quotedString, "attr" 
-> "\"count(1)\"")
       )
       sql("CREATE VIEW v AS SELECT count(*) AS cnt FROM VALUES (1), (2), (3) 
t(a)")
       checkAnswer(sql("SELECT * FROM v"), Seq(Row(3)))
@@ -596,7 +586,7 @@ class PersistedViewTestSuite extends SQLViewTestSuite with 
SharedSparkSession {
           sql("CREATE VIEW v AS SELECT * FROM (SELECT a + b FROM VALUES (1, 2) 
t(a, b))")
         },
         errorClass = "CREATE_PERMANENT_VIEW_WITHOUT_ALIAS",
-        parameters = Map("name" -> "`spark_catalog`.`default`.`v`", "attr" -> 
"\"(a + b)\"")
+        parameters = Map("name" -> tableIdentifier("v").quotedString, "attr" 
-> "\"(a + b)\"")
       )
       sql("CREATE VIEW v AS SELECT * FROM (SELECT a + b AS col FROM VALUES (1, 
2) t(a, b))")
       checkAnswer(sql("SELECT * FROM v"), Seq(Row(3)))
@@ -611,7 +601,7 @@ class PersistedViewTestSuite extends SQLViewTestSuite with 
SharedSparkSession {
           sql("ALTER VIEW v AS SELECT count(*) FROM VALUES (1), (2), (3) t(a)")
         },
         errorClass = "CREATE_PERMANENT_VIEW_WITHOUT_ALIAS",
-        parameters = Map("name" -> "`spark_catalog`.`default`.`v`", "attr" -> 
"\"count(1)\"")
+        parameters = Map("name" -> tableIdentifier("v").quotedString, "attr" 
-> "\"count(1)\"")
       )
     }
   }
@@ -640,13 +630,13 @@ class PersistedViewTestSuite extends SQLViewTestSuite 
with SharedSparkSession {
         val e = intercept[AnalysisException] {
           sql(s"SELECT * FROM test_view")
         }
+        val unquotedViewName = tableIdentifier("test_view").unquotedString
         checkError(
           exception = e,
           errorClass = "INCOMPATIBLE_VIEW_SCHEMA_CHANGE",
           parameters = Map(
-            "viewName" -> "`spark_catalog`.`default`.`test_view`",
-            "suggestion" ->
-              "CREATE OR REPLACE VIEW spark_catalog.default.test_view  AS 
SELECT * FROM t",
+            "viewName" -> tableIdentifier("test_view").quotedString,
+            "suggestion" -> s"CREATE OR REPLACE VIEW $unquotedViewName AS 
SELECT * FROM t",
             "actualCols" -> "[]", "colName" -> "col_j",
             "expectedNum" -> "1")
         )
@@ -672,7 +662,7 @@ class PersistedViewTestSuite extends SQLViewTestSuite with 
SharedSparkSession {
             errorClass = "INVALID_TEMP_OBJ_REFERENCE",
             parameters = Map(
               "obj" -> "VIEW",
-              "objName" -> s"`$SESSION_CATALOG_NAME`.`default`.`v1`",
+              "objName" -> tableIdentifier("v1").quotedString,
               "tempObj" -> "VIEW",
               "tempObjName" -> "`v2`"))
           val tempFunctionName = "temp_udf"
@@ -686,7 +676,7 @@ class PersistedViewTestSuite extends SQLViewTestSuite with 
SharedSparkSession {
               errorClass = "INVALID_TEMP_OBJ_REFERENCE",
               parameters = Map(
                 "obj" -> "VIEW",
-                "objName" -> s"`$SESSION_CATALOG_NAME`.`default`.`v1`",
+                "objName" -> tableIdentifier("v1").quotedString,
                 "tempObj" -> "FUNCTION",
                 "tempObjName" -> s"`$tempFunctionName`"))
           }
@@ -729,7 +719,8 @@ class PersistedViewTestSuite extends SQLViewTestSuite with 
SharedSparkSession {
           sql("SELECT * FROM v")
         },
         errorClass = "INVALID_VIEW_TEXT",
-        parameters = Map("viewText" -> "DROP VIEW v", "viewName" -> 
"`spark_catalog`.`default`.`v`")
+        parameters = Map(
+          "viewText" -> "DROP VIEW v", "viewName" -> 
tableIdentifier("v").quotedString)
       )
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to