This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new 5d12625021ac [SPARK-45016][PYTHON][CONNECT] Add missing 
`try_remote_functions` annotations
5d12625021ac is described below

commit 5d12625021ac78bc5a11c18fa8fa8de0fdb43ab4
Author: Ruifeng Zheng <ruife...@apache.org>
AuthorDate: Wed Aug 30 11:19:56 2023 -0700

    [SPARK-45016][PYTHON][CONNECT] Add missing `try_remote_functions` 
annotations
    
    ### What changes were proposed in this pull request?
    Add missing `try_remote_functions` annotations
    
    ### Why are the changes needed?
    to enable these functions in Connect
    
    ### Does this PR introduce _any_ user-facing change?
    yes
    
    ### How was this patch tested?
    reused doctest
    
    ### Was this patch authored or co-authored using generative AI tooling?
    NO
    
    Closes #42734 from zhengruifeng/add_missing_annotation.
    
    Authored-by: Ruifeng Zheng <ruife...@apache.org>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
    (cherry picked from commit caceb888510a34b9684259914470448fab29493b)
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 python/pyspark/sql/functions.py | 8 ++++++++
 1 file changed, 8 insertions(+)

diff --git a/python/pyspark/sql/functions.py b/python/pyspark/sql/functions.py
index aaf4823b3e1d..4aafcf90d8f4 100644
--- a/python/pyspark/sql/functions.py
+++ b/python/pyspark/sql/functions.py
@@ -3415,6 +3415,7 @@ def pmod(dividend: Union["ColumnOrName", float], divisor: 
Union["ColumnOrName",
     return _invoke_binary_math_function("pmod", dividend, divisor)
 
 
+@try_remote_functions
 def width_bucket(
     v: "ColumnOrName",
     min: "ColumnOrName",
@@ -6939,6 +6940,7 @@ def to_timestamp(col: "ColumnOrName", format: 
Optional[str] = None) -> Column:
         return _invoke_function("to_timestamp", _to_java_column(col), format)
 
 
+@try_remote_functions
 def try_to_timestamp(col: "ColumnOrName", format: Optional["ColumnOrName"] = 
None) -> Column:
     """
     Parses the `col` with the `format` to a timestamp. The function always
@@ -10329,6 +10331,7 @@ def character_length(str: "ColumnOrName") -> Column:
     return _invoke_function_over_columns("character_length", str)
 
 
+@try_remote_functions
 def try_to_binary(col: "ColumnOrName", format: Optional["ColumnOrName"] = 
None) -> Column:
     """
     This is a special version of `to_binary` that performs the same operation, 
but returns a NULL
@@ -13637,6 +13640,7 @@ def map_zip_with(
     return _invoke_higher_order_function("MapZipWith", [col1, col2], [f])
 
 
+@try_remote_functions
 def str_to_map(
     text: "ColumnOrName",
     pairDelim: Optional["ColumnOrName"] = None,
@@ -14062,6 +14066,7 @@ def make_interval(
     )
 
 
+@try_remote_functions
 def make_timestamp(
     years: "ColumnOrName",
     months: "ColumnOrName",
@@ -14133,6 +14138,7 @@ def make_timestamp(
         )
 
 
+@try_remote_functions
 def make_timestamp_ltz(
     years: "ColumnOrName",
     months: "ColumnOrName",
@@ -14203,6 +14209,7 @@ def make_timestamp_ltz(
         )
 
 
+@try_remote_functions
 def make_timestamp_ntz(
     years: "ColumnOrName",
     months: "ColumnOrName",
@@ -14256,6 +14263,7 @@ def make_timestamp_ntz(
     )
 
 
+@try_remote_functions
 def make_ym_interval(
     years: Optional["ColumnOrName"] = None,
     months: Optional["ColumnOrName"] = None,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to