This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new 6879519cafa [SPARK-37377][SQL][FOLLOWUP] Fix the partitioned join of 
one side test case not match
6879519cafa is described below

commit 6879519cafa913f61a5f6029125e41cf79e80168
Author: Jia Fan <fanjiaemi...@qq.com>
AuthorDate: Fri Jul 28 19:14:56 2023 +0800

    [SPARK-37377][SQL][FOLLOWUP] Fix the partitioned join of one side test case 
not match
    
    ### What changes were proposed in this pull request?
    The partitioned join of one side test case not match with description.
    Current, always two side reports partitioning not one.
    
    ### Why are the changes needed?
    Fix test case
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    add new test.
    
    Closes #42144 from Hisoka-X/SPARK-37377_one_side_test_case.
    
    Authored-by: Jia Fan <fanjiaemi...@qq.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
    (cherry picked from commit 20bb6c0c5a84345bb09ac3cab6267a5747b6be05)
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../org/apache/spark/sql/connector/KeyGroupedPartitioningSuite.scala   | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/KeyGroupedPartitioningSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/KeyGroupedPartitioningSuite.scala
index 8be3c6d9e13..880c30ba9f9 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/KeyGroupedPartitioningSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/KeyGroupedPartitioningSuite.scala
@@ -313,9 +313,8 @@ class KeyGroupedPartitioningSuite extends 
DistributionAndOrderingSuiteBase {
 
   test("partitioned join: only one side reports partitioning") {
     val customers_partitions = Array(bucket(4, "customer_id"))
-    val orders_partitions = Array(bucket(2, "customer_id"))
 
-    testWithCustomersAndOrders(customers_partitions, orders_partitions, 2)
+    testWithCustomersAndOrders(customers_partitions, Array.empty, 2)
   }
 
   private val items: String = "items"


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to