This is an automated email from the ASF dual-hosted git repository.

alamb pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion.git


The following commit(s) were added to refs/heads/main by this push:
     new 7fd04a3ed1 fix: rewrite fetch, skip of the Limit node in correct order 
(#14496)
7fd04a3ed1 is described below

commit 7fd04a3ed158f49258f400ec73f46360893b1a9a
Author: Yingwen <[email protected]>
AuthorDate: Fri Feb 7 03:29:04 2025 +0800

    fix: rewrite fetch, skip of the Limit node in correct order (#14496)
    
    * fix: rewrite fetch, skip of the Limit node in correct order
    
    * style: fix clippy
---
 datafusion/expr/src/logical_plan/plan.rs | 64 +++++++++++++++++++++++---------
 1 file changed, 46 insertions(+), 18 deletions(-)

diff --git a/datafusion/expr/src/logical_plan/plan.rs 
b/datafusion/expr/src/logical_plan/plan.rs
index c0a580d89f..daf1a1375e 100644
--- a/datafusion/expr/src/logical_plan/plan.rs
+++ b/datafusion/expr/src/logical_plan/plan.rs
@@ -959,8 +959,9 @@ impl LogicalPlan {
                         expr.len()
                     );
                 }
-                let new_skip = skip.as_ref().and_then(|_| expr.pop());
+                // `LogicalPlan::expressions()` returns in [skip, fetch] 
order, so we can pop from the end.
                 let new_fetch = fetch.as_ref().and_then(|_| expr.pop());
+                let new_skip = skip.as_ref().and_then(|_| expr.pop());
                 let input = self.only_input(inputs)?;
                 Ok(LogicalPlan::Limit(Limit {
                     skip: new_skip.map(Box::new),
@@ -4293,23 +4294,50 @@ digraph {
 
     #[test]
     fn test_limit_with_new_children() {
-        let limit = LogicalPlan::Limit(Limit {
-            skip: None,
-            fetch: Some(Box::new(Expr::Literal(
-                ScalarValue::new_ten(&DataType::UInt32).unwrap(),
-            ))),
-            input: Arc::new(LogicalPlan::Values(Values {
-                schema: Arc::new(DFSchema::empty()),
-                values: vec![vec![]],
-            })),
-        });
-        let new_limit = limit
-            .with_new_exprs(
-                limit.expressions(),
-                limit.inputs().into_iter().cloned().collect(),
-            )
-            .unwrap();
-        assert_eq!(limit, new_limit);
+        let input = Arc::new(LogicalPlan::Values(Values {
+            schema: Arc::new(DFSchema::empty()),
+            values: vec![vec![]],
+        }));
+        let cases = [
+            LogicalPlan::Limit(Limit {
+                skip: None,
+                fetch: None,
+                input: Arc::clone(&input),
+            }),
+            LogicalPlan::Limit(Limit {
+                skip: None,
+                fetch: Some(Box::new(Expr::Literal(
+                    ScalarValue::new_ten(&DataType::UInt32).unwrap(),
+                ))),
+                input: Arc::clone(&input),
+            }),
+            LogicalPlan::Limit(Limit {
+                skip: Some(Box::new(Expr::Literal(
+                    ScalarValue::new_ten(&DataType::UInt32).unwrap(),
+                ))),
+                fetch: None,
+                input: Arc::clone(&input),
+            }),
+            LogicalPlan::Limit(Limit {
+                skip: Some(Box::new(Expr::Literal(
+                    ScalarValue::new_one(&DataType::UInt32).unwrap(),
+                ))),
+                fetch: Some(Box::new(Expr::Literal(
+                    ScalarValue::new_ten(&DataType::UInt32).unwrap(),
+                ))),
+                input,
+            }),
+        ];
+
+        for limit in cases {
+            let new_limit = limit
+                .with_new_exprs(
+                    limit.expressions(),
+                    limit.inputs().into_iter().cloned().collect(),
+                )
+                .unwrap();
+            assert_eq!(limit, new_limit);
+        }
     }
 
     #[test]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to