This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new ef545d6ce57 [SPARK-40798][SQL][TESTS][FOLLOW-UP] Improve test coverage
ef545d6ce57 is described below

commit ef545d6ce579db1070d260426ab8cbf6e2853c28
Author: ulysses-you <ulyssesyo...@gmail.com>
AuthorDate: Wed Nov 9 18:07:40 2022 +0300

    [SPARK-40798][SQL][TESTS][FOLLOW-UP] Improve test coverage
    
    ### What changes were proposed in this pull request?
    
    Add ansi test in 
`org.apache.spark.sql.execution.command.v2.AlterTableAddPartitionSuite`
    
    ### Why are the changes needed?
    
    Improve test coverage with both ansi on/off
    
    ### Does this PR introduce _any_ user-facing change?
    
    no
    
    ### How was this patch tested?
    
    Pass CI
    
    Closes #38580 from ulysses-you/test.
    
    Authored-by: ulysses-you <ulyssesyo...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../command/v2/AlterTableAddPartitionSuite.scala   | 30 +++++++++++++++++-----
 1 file changed, 24 insertions(+), 6 deletions(-)

diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
index c33d9b0101a..09ebd4af4ec 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.sql.execution.command.v2
 
+import org.apache.spark.SparkNumberFormatException
 import org.apache.spark.sql.{AnalysisException, Row}
 import org.apache.spark.sql.catalyst.analysis.PartitionsAlreadyExistException
 import org.apache.spark.sql.execution.command
@@ -129,12 +130,29 @@ class AlterTableAddPartitionSuite
     withNamespaceAndTable("ns", "tbl") { t =>
       sql(s"CREATE TABLE $t (c int) $defaultUsing PARTITIONED BY (p int)")
 
-      withSQLConf(
-          SQLConf.SKIP_TYPE_VALIDATION_ON_ALTER_PARTITION.key -> "true",
-          SQLConf.ANSI_ENABLED.key -> "false") {
-        sql(s"ALTER TABLE $t ADD PARTITION (p='aaa')")
-        checkPartitions(t, Map("p" -> defaultPartitionName))
-        sql(s"ALTER TABLE $t DROP PARTITION (p=null)")
+      withSQLConf(SQLConf.SKIP_TYPE_VALIDATION_ON_ALTER_PARTITION.key -> 
"true") {
+        withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
+          checkError(
+            exception = intercept[SparkNumberFormatException] {
+              sql(s"ALTER TABLE $t ADD PARTITION (p='aaa')")
+            },
+            errorClass = "CAST_INVALID_INPUT",
+            parameters = Map(
+              "ansiConfig" -> "\"spark.sql.ansi.enabled\"",
+              "expression" -> "'aaa'",
+              "sourceType" -> "\"STRING\"",
+              "targetType" -> "\"INT\""),
+            context = ExpectedContext(
+              fragment = s"ALTER TABLE $t ADD PARTITION (p='aaa')",
+              start = 0,
+              stop = 35 + t.length))
+        }
+
+        withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") {
+          sql(s"ALTER TABLE $t ADD PARTITION (p='aaa')")
+          checkPartitions(t, Map("p" -> defaultPartitionName))
+          sql(s"ALTER TABLE $t DROP PARTITION (p=null)")
+        }
       }
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to