This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new ac035c2ca2e [SPARK-41538][FOLLOWUP][TESTS] Move a metadata column test 
case to MetadataColumnSuite
ac035c2ca2e is described below

commit ac035c2ca2e578aa5ceb56d50d4326cb2576a369
Author: Gengliang Wang <gengli...@apache.org>
AuthorDate: Fri Jan 6 00:15:31 2023 -0800

    [SPARK-41538][FOLLOWUP][TESTS] Move a metadata column test case to 
MetadataColumnSuite
    
    ### What changes were proposed in this pull request?
    
    Move the new test case for Metadata column in 
https://github.com/apache/spark/pull/39081 to `MetadataColumnSuite`
    
    ### Why are the changes needed?
    
    All metadata column related test cases should go into 
`MetadataColumnSuite`. For example:
    
    - https://github.com/apache/spark/pull/37758/
    - https://github.com/apache/spark/pull/39152
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    GA tests
    
    Closes #39425 from gengliangwang/moveTest.
    
    Authored-by: Gengliang Wang <gengli...@apache.org>
    Signed-off-by: Gengliang Wang <gengli...@apache.org>
---
 .../scala/org/apache/spark/sql/SQLQuerySuite.scala | 39 ----------------------
 .../spark/sql/connector/MetadataColumnSuite.scala  | 39 ++++++++++++++++++++++
 2 files changed, 39 insertions(+), 39 deletions(-)

diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index b1b3ec97d1e..3d171a04caf 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -4596,45 +4596,6 @@ class SQLQuerySuite extends QueryTest with 
SharedSparkSession with AdaptiveSpark
       sql("SELECT /*+ hash(t2) */ * FROM t1 join t2 on c1 = c2")
     }
   }
-
-  test("SPARK-41538: Metadata column should be appended at the end of 
project") {
-    val tableName = "table_1"
-    val viewName = "view_1"
-    withTable(tableName) {
-      withView(viewName) {
-        sql(s"CREATE TABLE $tableName (a ARRAY<STRING>, s STRUCT<id: STRING>) 
USING parquet")
-        val id = "id1"
-        sql(s"INSERT INTO $tableName values(ARRAY('a'), named_struct('id', 
'$id'))")
-        sql(
-          s"""
-             |CREATE VIEW $viewName (id)
-             |AS WITH source AS (
-             |    SELECT * FROM $tableName
-             |),
-             |renamed AS (
-             |    SELECT s.id FROM source
-             |)
-             |SELECT id FROM renamed
-             |""".stripMargin)
-        val query =
-          s"""
-             |with foo AS (
-             |  SELECT '$id' as id
-             |),
-             |bar AS (
-             |  SELECT '$id' as id
-             |)
-             |SELECT
-             |  1
-             |FROM foo
-             |FULL OUTER JOIN bar USING(id)
-             |FULL OUTER JOIN $viewName USING(id)
-             |WHERE foo.id IS NOT NULL
-             |""".stripMargin
-        checkAnswer(sql(query), Row(1))
-      }
-    }
-  }
 }
 
 case class Foo(bar: Option[String])
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/MetadataColumnSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/MetadataColumnSuite.scala
index 2fd3c2b105d..4f617bc707b 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/MetadataColumnSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/MetadataColumnSuite.scala
@@ -348,6 +348,45 @@ class MetadataColumnSuite extends DatasourceV2SQLBase {
       
assert(df.union(fieldNameMismatchDf).queryExecution.analyzed.metadataOutput.isEmpty)
     }
   }
+
+  test("SPARK-41538: Metadata column should be appended at the end of 
project") {
+    val tableName = "table_1"
+    val viewName = "view_1"
+    withTable(tableName) {
+      withView(viewName) {
+        sql(s"CREATE TABLE $tableName (a ARRAY<STRING>, s STRUCT<id: STRING>) 
USING parquet")
+        val id = "id1"
+        sql(s"INSERT INTO $tableName values(ARRAY('a'), named_struct('id', 
'$id'))")
+        sql(
+          s"""
+             |CREATE VIEW $viewName (id)
+             |AS WITH source AS (
+             |    SELECT * FROM $tableName
+             |),
+             |renamed AS (
+             |    SELECT s.id FROM source
+             |)
+             |SELECT id FROM renamed
+             |""".stripMargin)
+        val query =
+          s"""
+             |with foo AS (
+             |  SELECT '$id' as id
+             |),
+             |bar AS (
+             |  SELECT '$id' as id
+             |)
+             |SELECT
+             |  1
+             |FROM foo
+             |FULL OUTER JOIN bar USING(id)
+             |FULL OUTER JOIN $viewName USING(id)
+             |WHERE foo.id IS NOT NULL
+             |""".stripMargin
+        checkAnswer(sql(query), Row(1))
+      }
+    }
+  }
 }
 
 class MetadataTestTable(


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to