allisonwang-db commented on code in PR #42915:
URL: https://github.com/apache/spark/pull/42915#discussion_r1327519954


##########
python/pyspark/worker.py:
##########
@@ -810,28 +847,26 @@ def check_return_value(res):
                         },
                     )
 
-            def evaluate(*args: pd.Series, **kwargs: pd.Series):
-                if len(args) == 0 and len(kwargs) == 0:
+            def evaluate(*args: pd.Series):
+                if len(args) == 0:
                     res = func()
                     check_return_value(res)
                     yield verify_result(pd.DataFrame(res)), arrow_return_type
                 else:
                     # Create tuples from the input pandas Series, each tuple
                     # represents a row across all Series.
-                    keys = list(kwargs.keys())
-                    len_args = len(args)
-                    row_tuples = zip(*args, *[kwargs[key] for key in keys])
+                    row_tuples = zip(*args)
                     for row in row_tuples:
-                        res = func(
-                            *row[:len_args],
-                            **{key: row[len_args + i] for i, key in 
enumerate(keys)},
-                        )
+                        res = func(*row)
                         check_return_value(res)
                         yield verify_result(pd.DataFrame(res)), 
arrow_return_type
 
             return evaluate
 
-        eval = wrap_arrow_udtf(getattr(udtf, "eval"), return_type)
+        eval_func_kwargs_support, args_kwargs_offsets = wrap_kwargs_support(

Review Comment:
   It would be really good to comment here on why we need to wrap the kwargs 
separately. This can provide valuable context for those who work on this code 
in the future. :)



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to