This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new dceb8bdc72ef [SPARK-46767][PYTHON][DOCS] Refine docstring of 
`abs/acos/acosh`
dceb8bdc72ef is described below

commit dceb8bdc72ef24ffa1eb5c1820e6350207f042f5
Author: yangjie01 <yangji...@baidu.com>
AuthorDate: Sat Jan 20 17:39:01 2024 +0800

    [SPARK-46767][PYTHON][DOCS] Refine docstring of `abs/acos/acosh`
    
    ### What changes were proposed in this pull request?
    This pr refine docstring of  `abs/acos/acosh` and add some new examples.
    
    ### Why are the changes needed?
    To improve PySpark documentation
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass Github Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #44794 from LuciferYang/math-functions-1.
    
    Authored-by: yangjie01 <yangji...@baidu.com>
    Signed-off-by: yangjie01 <yangji...@baidu.com>
---
 python/pyspark/sql/functions/builtin.py | 169 ++++++++++++++++++++++++++------
 1 file changed, 137 insertions(+), 32 deletions(-)

diff --git a/python/pyspark/sql/functions/builtin.py 
b/python/pyspark/sql/functions/builtin.py
index 1f6d86de28dc..62400accba10 100644
--- a/python/pyspark/sql/functions/builtin.py
+++ b/python/pyspark/sql/functions/builtin.py
@@ -734,7 +734,7 @@ def try_sum(col: "ColumnOrName") -> Column:
 @_try_remote_functions
 def abs(col: "ColumnOrName") -> Column:
     """
-    Computes the absolute value.
+    Mathematical Function: Computes the absolute value of the given column or 
expression.
 
     .. versionadded:: 1.3.0
 
@@ -744,22 +744,66 @@ def abs(col: "ColumnOrName") -> Column:
     Parameters
     ----------
     col : :class:`~pyspark.sql.Column` or str
-        target column to compute on.
+        The target column or expression to compute the absolute value on.
 
     Returns
     -------
     :class:`~pyspark.sql.Column`
-        column for computed results.
+        A new column object representing the absolute value of the input.
 
     Examples
     --------
-    >>> df = spark.range(1)
-    >>> df.select(abs(lit(-1))).show()
-    +-------+
-    |abs(-1)|
-    +-------+
-    |      1|
-    +-------+
+    Example 1: Compute the absolute value of a negative number
+
+    >>> from pyspark.sql import functions as sf
+    >>> df = spark.createDataFrame([(1, -1), (2, -2), (3, -3)], ["id", 
"value"])
+    >>> df.select(sf.abs(df.value)).show()
+    +----------+
+    |abs(value)|
+    +----------+
+    |         1|
+    |         2|
+    |         3|
+    +----------+
+
+    Example 2: Compute the absolute value of an expression
+
+    >>> from pyspark.sql import functions as sf
+    >>> df = spark.createDataFrame([(1, 1), (2, -2), (3, 3)], ["id", "value"])
+    >>> df.select(sf.abs(df.id - df.value)).show()
+    +-----------------+
+    |abs((id - value))|
+    +-----------------+
+    |                0|
+    |                4|
+    |                0|
+    +-----------------+
+
+    Example 3: Compute the absolute value of a column with null values
+
+    >>> from pyspark.sql import functions as sf
+    >>> df = spark.createDataFrame([(1, None), (2, -2), (3, None)], ["id", 
"value"])
+    >>> df.select(sf.abs(df.value)).show()
+    +----------+
+    |abs(value)|
+    +----------+
+    |      NULL|
+    |         2|
+    |      NULL|
+    +----------+
+
+    Example 4: Compute the absolute value of a column with double values
+
+    >>> from pyspark.sql import functions as sf
+    >>> df = spark.createDataFrame([(1, -1.5), (2, -2.5), (3, -3.5)], ["id", 
"value"])
+    >>> df.select(sf.abs(df.value)).show()
+    +----------+
+    |abs(value)|
+    +----------+
+    |       1.5|
+    |       2.5|
+    |       3.5|
+    +----------+
     """
     return _invoke_function_over_columns("abs", col)
 
@@ -1478,7 +1522,8 @@ def product(col: "ColumnOrName") -> Column:
 @_try_remote_functions
 def acos(col: "ColumnOrName") -> Column:
     """
-    Computes inverse cosine of the input column.
+    Mathematical Function: Computes the inverse cosine (also known as 
arccosine)
+    of the given column or expression.
 
     .. versionadded:: 1.4.0
 
@@ -1488,23 +1533,54 @@ def acos(col: "ColumnOrName") -> Column:
     Parameters
     ----------
     col : :class:`~pyspark.sql.Column` or str
-        target column to compute on.
+        The target column or expression to compute the inverse cosine on.
 
     Returns
     -------
     :class:`~pyspark.sql.Column`
-        inverse cosine of `col`, as if computed by `java.lang.Math.acos()`
+        A new column object representing the inverse cosine of the input.
 
     Examples
     --------
-    >>> df = spark.range(1, 3)
-    >>> df.select(acos(df.id)).show()
-    +--------+
-    |ACOS(id)|
-    +--------+
-    |     0.0|
-    |     NaN|
-    +--------+
+    Example 1: Compute the inverse cosine of a column of numbers
+
+    >>> from pyspark.sql import functions as sf
+    >>> df = spark.createDataFrame([(-1.0,), (-0.5,), (0.0,), (0.5,), (1.0,)], 
["value"])
+    >>> df.select("value", sf.acos("value")).show()
+    +-----+------------------+
+    |value|       ACOS(value)|
+    +-----+------------------+
+    | -1.0| 3.141592653589...|
+    | -0.5|2.0943951023931...|
+    |  0.0|1.5707963267948...|
+    |  0.5|1.0471975511965...|
+    |  1.0|               0.0|
+    +-----+------------------+
+
+    Example 2: Compute the inverse cosine of a column with null values
+
+    >>> from pyspark.sql import functions as sf
+    >>> from pyspark.sql.types import StructType, StructField, IntegerType
+    >>> schema = StructType([StructField("value", IntegerType(), True)])
+    >>> df = spark.createDataFrame([(None,)], schema=schema)
+    >>> df.select(sf.acos(df.value)).show()
+    +-----------+
+    |ACOS(value)|
+    +-----------+
+    |       NULL|
+    +-----------+
+
+    Example 3: Compute the inverse cosine of a column with values outside the 
valid range
+
+    >>> from pyspark.sql import functions as sf
+    >>> df = spark.createDataFrame([(2,), (-2,)], ["value"])
+    >>> df.select(sf.acos(df.value)).show()
+    +-----------+
+    |ACOS(value)|
+    +-----------+
+    |        NaN|
+    |        NaN|
+    +-----------+
     """
     return _invoke_function_over_columns("acos", col)
 
@@ -1512,7 +1588,8 @@ def acos(col: "ColumnOrName") -> Column:
 @_try_remote_functions
 def acosh(col: "ColumnOrName") -> Column:
     """
-    Computes inverse hyperbolic cosine of the input column.
+    Mathematical Function: Computes the inverse hyperbolic cosine (also known 
as arcosh)
+    of the given column or expression.
 
     .. versionadded:: 3.1.0
 
@@ -1522,23 +1599,51 @@ def acosh(col: "ColumnOrName") -> Column:
     Parameters
     ----------
     col : :class:`~pyspark.sql.Column` or str
-        target column to compute on.
+        The target column or expression to compute the inverse hyperbolic 
cosine on.
 
     Returns
     -------
     :class:`~pyspark.sql.Column`
-        the column for computed results.
+        A new column object representing the inverse hyperbolic cosine of the 
input.
 
     Examples
     --------
-    >>> df = spark.range(2)
-    >>> df.select(acosh(col("id"))).show()
-    +---------+
-    |ACOSH(id)|
-    +---------+
-    |      NaN|
-    |      0.0|
-    +---------+
+    Example 1: Compute the inverse hyperbolic cosine of a column of numbers
+
+    >>> from pyspark.sql import functions as sf
+    >>> df = spark.createDataFrame([(1,), (2,)], ["value"])
+    >>> df.select("value", sf.acosh(df.value)).show()
+    +-----+------------------+
+    |value|      ACOSH(value)|
+    +-----+------------------+
+    |    1|               0.0|
+    |    2|1.3169578969248...|
+    +-----+------------------+
+
+    Example 2: Compute the inverse hyperbolic cosine of a column with null 
values
+
+    >>> from pyspark.sql import functions as sf
+    >>> from pyspark.sql.types import StructType, StructField, IntegerType
+    >>> schema = StructType([StructField("value", IntegerType(), True)])
+    >>> df = spark.createDataFrame([(None,)], schema=schema)
+    >>> df.select(sf.acosh(df.value)).show()
+    +------------+
+    |ACOSH(value)|
+    +------------+
+    |        NULL|
+    +------------+
+
+    Example 3: Compute the inverse hyperbolic cosine of a column with values 
less than 1
+
+    >>> from pyspark.sql import functions as sf
+    >>> df = spark.createDataFrame([(0.5,), (-0.5,)], ["value"])
+    >>> df.select(sf.acosh(df.value)).show()
+    +------------+
+    |ACOSH(value)|
+    +------------+
+    |         NaN|
+    |         NaN|
+    +------------+
     """
     return _invoke_function_over_columns("acosh", col)
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to