This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new a3432428e760 [SPARK-46953][TEST] Wrap withTable for a test in 
ResolveDefaultColumnsSuite
a3432428e760 is described below

commit a3432428e760fc16610cfe3380d3bdea7654f75d
Author: Kent Yao <y...@apache.org>
AuthorDate: Fri Feb 2 15:17:43 2024 +0800

    [SPARK-46953][TEST] Wrap withTable for a test in ResolveDefaultColumnsSuite
    
    ### What changes were proposed in this pull request?
    
    The table is not cleaned up after this test; test retries or upcoming new 
tests reused 't' as the table name will fail with TAEE.
    
    ### Why are the changes needed?
    
    fix tests as FOLLOWUP of SPARK-43742
    
    ### Does this PR introduce _any_ user-facing change?
    
    no
    ### How was this patch tested?
    
    this test itself
    ### Was this patch authored or co-authored using generative AI tooling?
    
    no
    
    Closes #44993 from yaooqinn/SPARK-43742.
    
    Authored-by: Kent Yao <y...@apache.org>
    Signed-off-by: Kent Yao <y...@apache.org>
---
 .../spark/sql/ResolveDefaultColumnsSuite.scala     | 104 +++++++++++----------
 1 file changed, 53 insertions(+), 51 deletions(-)

diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/ResolveDefaultColumnsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/ResolveDefaultColumnsSuite.scala
index 29b2796d25aa..00529559a485 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/ResolveDefaultColumnsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/ResolveDefaultColumnsSuite.scala
@@ -76,57 +76,59 @@ class ResolveDefaultColumnsSuite extends QueryTest with 
SharedSparkSession {
   }
 
   test("INSERT into partitioned tables") {
-    sql("create table t(c1 int, c2 int, c3 int, c4 int) using parquet 
partitioned by (c3, c4)")
-
-    // INSERT without static partitions
-    checkError(
-      exception = intercept[AnalysisException] {
-        sql("insert into t values (1, 2, 3)")
-      },
-      errorClass = "INSERT_COLUMN_ARITY_MISMATCH.NOT_ENOUGH_DATA_COLUMNS",
-      parameters = Map(
-        "tableName" -> "`spark_catalog`.`default`.`t`",
-        "tableColumns" -> "`c1`, `c2`, `c3`, `c4`",
-        "dataColumns" -> "`col1`, `col2`, `col3`"))
-
-    // INSERT without static partitions but with column list
-    sql("truncate table t")
-    sql("insert into t (c2, c1, c4) values (1, 2, 3)")
-    checkAnswer(spark.table("t"), Row(2, 1, null, 3))
-
-    // INSERT with static partitions
-    sql("truncate table t")
-    checkError(
-      exception = intercept[AnalysisException] {
-        sql("insert into t partition(c3=3, c4=4) values (1)")
-      },
-      errorClass = "INSERT_PARTITION_COLUMN_ARITY_MISMATCH",
-      parameters = Map(
-        "tableName" -> "`spark_catalog`.`default`.`t`",
-        "tableColumns" -> "`c1`, `c2`, `c3`, `c4`",
-        "dataColumns" -> "`col1`",
-        "staticPartCols" -> "`c3`, `c4`"))
-
-    // INSERT with static partitions and with column list
-    sql("truncate table t")
-    sql("insert into t partition(c3=3, c4=4) (c2) values (1)")
-    checkAnswer(spark.table("t"), Row(null, 1, 3, 4))
-
-    // INSERT with partial static partitions
-    sql("truncate table t")
-    checkError(
-      exception = intercept[AnalysisException] {
-        sql("insert into t partition(c3=3, c4) values (1, 2)")
-      },
-      errorClass = "INSERT_PARTITION_COLUMN_ARITY_MISMATCH",
-      parameters = Map(
-        "tableName" -> "`spark_catalog`.`default`.`t`",
-        "tableColumns" -> "`c1`, `c2`, `c3`, `c4`",
-        "dataColumns" -> "`col1`, `col2`",
-        "staticPartCols" -> "`c3`"))
-
-    // INSERT with partial static partitions and with column list is not 
allowed
-    intercept[AnalysisException](sql("insert into t partition(c3=3, c4) (c1) 
values (1, 4)"))
+    withTable("t") {
+      sql("create table t(c1 int, c2 int, c3 int, c4 int) using parquet 
partitioned by (c3, c4)")
+
+      // INSERT without static partitions
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("insert into t values (1, 2, 3)")
+        },
+        errorClass = "INSERT_COLUMN_ARITY_MISMATCH.NOT_ENOUGH_DATA_COLUMNS",
+        parameters = Map(
+          "tableName" -> "`spark_catalog`.`default`.`t`",
+          "tableColumns" -> "`c1`, `c2`, `c3`, `c4`",
+          "dataColumns" -> "`col1`, `col2`, `col3`"))
+
+      // INSERT without static partitions but with column list
+      sql("truncate table t")
+      sql("insert into t (c2, c1, c4) values (1, 2, 3)")
+      checkAnswer(spark.table("t"), Row(2, 1, null, 3))
+
+      // INSERT with static partitions
+      sql("truncate table t")
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("insert into t partition(c3=3, c4=4) values (1)")
+        },
+        errorClass = "INSERT_PARTITION_COLUMN_ARITY_MISMATCH",
+        parameters = Map(
+          "tableName" -> "`spark_catalog`.`default`.`t`",
+          "tableColumns" -> "`c1`, `c2`, `c3`, `c4`",
+          "dataColumns" -> "`col1`",
+          "staticPartCols" -> "`c3`, `c4`"))
+
+      // INSERT with static partitions and with column list
+      sql("truncate table t")
+      sql("insert into t partition(c3=3, c4=4) (c2) values (1)")
+      checkAnswer(spark.table("t"), Row(null, 1, 3, 4))
+
+      // INSERT with partial static partitions
+      sql("truncate table t")
+      checkError(
+        exception = intercept[AnalysisException] {
+          sql("insert into t partition(c3=3, c4) values (1, 2)")
+        },
+        errorClass = "INSERT_PARTITION_COLUMN_ARITY_MISMATCH",
+        parameters = Map(
+          "tableName" -> "`spark_catalog`.`default`.`t`",
+          "tableColumns" -> "`c1`, `c2`, `c3`, `c4`",
+          "dataColumns" -> "`col1`, `col2`",
+          "staticPartCols" -> "`c3`"))
+
+      // INSERT with partial static partitions and with column list is not 
allowed
+      intercept[AnalysisException](sql("insert into t partition(c3=3, c4) (c1) 
values (1, 4)"))
+    }
   }
 
   test("SPARK-43085: Column DEFAULT assignment for target tables with 
multi-part names") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to