This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.1 by this push:
     new e551817  [SPARK-33970][SQL][TEST] Add test default partition in 
metastoredirectsql
e551817 is described below

commit e551817fefeaaff70426661bfe780645cbc8b619
Author: Yuming Wang <yumw...@ebay.com>
AuthorDate: Mon Jan 11 14:19:53 2021 +0900

    [SPARK-33970][SQL][TEST] Add test default partition in metastoredirectsql
    
    ### What changes were proposed in this pull request?
    
    This pr add test default partition in metastoredirectsql.
    
    ### Why are the changes needed?
    
    Improve test.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    N/A
    
    Closes #31109 from wangyum/SPARK-33970.
    
    Authored-by: Yuming Wang <yumw...@ebay.com>
    Signed-off-by: HyukjinKwon <gurwls...@apache.org>
    (cherry picked from commit f77eeb0451e60b8c4db377538d381e05f7771cf4)
    Signed-off-by: HyukjinKwon <gurwls...@apache.org>
---
 .../hive/client/HivePartitionFilteringSuite.scala  | 71 +++++++++++++++++++++-
 1 file changed, 70 insertions(+), 1 deletion(-)

diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HivePartitionFilteringSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HivePartitionFilteringSuite.scala
index dc56e6b..fd83395 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HivePartitionFilteringSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HivePartitionFilteringSuite.scala
@@ -28,6 +28,7 @@ import org.scalatest.BeforeAndAfterAll
 
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.catalog._
+import 
org.apache.spark.sql.catalyst.catalog.ExternalCatalogUtils.DEFAULT_PARTITION_NAME
 import org.apache.spark.sql.catalyst.dsl.expressions._
 import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.internal.SQLConf
@@ -39,10 +40,13 @@ class HivePartitionFilteringSuite(version: String)
 
   private val tryDirectSqlKey = 
HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL.varname
 
+  // Support default partition in metastoredirectsql since HIVE-11898(Hive 
2.0.0).
+  private val defaultPartition = if (version.toDouble >= 2) 
Some(DEFAULT_PARTITION_NAME) else None
+
   private val dsValue = 20170101 to 20170103
   private val hValue = 0 to 4
   private val chunkValue = Seq("aa", "ab", "ba", "bb")
-  private val dateValue = Seq("2019-01-01", "2019-01-02", "2019-01-03")
+  private val dateValue = Seq("2019-01-01", "2019-01-02", "2019-01-03") ++ 
defaultPartition
   private val dateStrValue = Seq("2020-01-01", "2020-01-02", "2020-01-03")
   private val testPartitionCount =
     dsValue.size * hValue.size * chunkValue.size * dateValue.size * 
dateStrValue.size
@@ -424,6 +428,71 @@ class HivePartitionFilteringSuite(version: String)
       dateStrValue)
   }
 
+  test("getPartitionsByFilter: IS NULL / IS NOT NULL") {
+    // returns all partitions
+    Seq(attr("d").isNull, attr("d").isNotNull).foreach { filterExpr =>
+      testMetastorePartitionFiltering(
+        filterExpr,
+        dsValue,
+        hValue,
+        chunkValue,
+        dateValue,
+        dateStrValue)
+    }
+  }
+
+  test("getPartitionsByFilter: IS NULL / IS NOT NULL with other filter") {
+    Seq(attr("d").isNull, attr("d").isNotNull).foreach { filterExpr =>
+      testMetastorePartitionFiltering(
+        filterExpr && attr("d") === Date.valueOf("2019-01-01"),
+        dsValue,
+        hValue,
+        chunkValue,
+        Seq("2019-01-01"),
+        dateStrValue)
+    }
+  }
+
+  test("getPartitionsByFilter: d =!= 2019-01-01") {
+    testMetastorePartitionFiltering(
+      attr("d") =!= Date.valueOf("2019-01-01"),
+      dsValue,
+      hValue,
+      chunkValue,
+      Seq("2019-01-02", "2019-01-03"),
+      dateStrValue)
+  }
+
+  test("getPartitionsByFilter: d =!= 2019-01-01 || IS NULL") {
+    testMetastorePartitionFiltering(
+      attr("d") =!= Date.valueOf("2019-01-01") || attr("d").isNull,
+      dsValue,
+      hValue,
+      chunkValue,
+      dateValue,
+      dateStrValue)
+  }
+
+  test("getPartitionsByFilter: d <=> 2019-01-01") {
+    testMetastorePartitionFiltering(
+      attr("d") <=> Date.valueOf("2019-01-01"),
+      dsValue,
+      hValue,
+      chunkValue,
+      dateValue,
+      dateStrValue)
+  }
+
+  test("getPartitionsByFilter: d <=> null") {
+    testMetastorePartitionFiltering(
+      attr("d") <=> Literal(null, DateType),
+      dsValue,
+      hValue,
+      chunkValue,
+      dateValue,
+      dateStrValue)
+  }
+
   private def testMetastorePartitionFiltering(
       filterExpr: Expression,
       expectedDs: Seq[Int],


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to