This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 7429223cfd6 [SPARK-38959][SQL][FOLLOW-UP] Address feedback for 
RowLevelOperationRuntimeGroupFiltering
7429223cfd6 is described below

commit 7429223cfd6c53f9d847d58e43190d2a0311f6c4
Author: aokolnychyi <aokolnyc...@apple.com>
AuthorDate: Tue Nov 8 16:24:59 2022 +0800

    [SPARK-38959][SQL][FOLLOW-UP] Address feedback for 
RowLevelOperationRuntimeGroupFiltering
    
    ### What changes were proposed in this pull request?
    
    This PR is to address the feedback on PR #36304 after that change was 
merged.
    
    ### Why are the changes needed?
    
    These changes are needed for better code quality.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Existing tests.
    
    Closes #38526 from aokolnychyi/spark-38959-follow-up.
    
    Authored-by: aokolnychyi <aokolnyc...@apple.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../dynamicpruning/RowLevelOperationRuntimeGroupFiltering.scala      | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/dynamicpruning/RowLevelOperationRuntimeGroupFiltering.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/dynamicpruning/RowLevelOperationRuntimeGroupFiltering.scala
index 232c320bcd4..d9dad43532e 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/dynamicpruning/RowLevelOperationRuntimeGroupFiltering.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/dynamicpruning/RowLevelOperationRuntimeGroupFiltering.scala
@@ -78,9 +78,8 @@ case class 
RowLevelOperationRuntimeGroupFiltering(optimizeSubqueries: Rule[Logic
     // clone the relation and assign new expr IDs to avoid conflicts
     matchingRowsPlan transformUpWithNewOutput {
       case r: DataSourceV2Relation if r eq relation =>
-        val oldOutput = r.output
-        val newOutput = oldOutput.map(_.newInstance())
-        r.copy(output = newOutput) -> oldOutput.zip(newOutput)
+        val newRelation = r.newInstance()
+        newRelation -> r.output.zip(newRelation.output)
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to