This is an automated email from the ASF dual-hosted git repository.

ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 15c98e0e5e07 [SPARK-48954] try_mod() replaces try_remainder()
15c98e0e5e07 is described below

commit 15c98e0e5e070d61e32a8eec935488efd9605480
Author: Serge Rielau <se...@rielau.com>
AuthorDate: Sun Jul 21 16:47:46 2024 +0800

    [SPARK-48954] try_mod() replaces try_remainder()
    
    ### What changes were proposed in this pull request?
    
    for consistency try_remainder() gets renamed to try_mod().
    this is Spark 4.0.0 only, so no need for config.
    
    ### Why are the changes needed?
    
    To keep consistent naming.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, replaces try_remainder() with try_mod()
    
    ### How was this patch tested?
    
    Existing try_remainder() tests
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #47427 from srielau/SPARK-48954-try-mod.
    
    Authored-by: Serge Rielau <se...@rielau.com>
    Signed-off-by: Ruifeng Zheng <ruife...@apache.org>
---
 .../scala/org/apache/spark/sql/functions.scala     |  2 +-
 docs/sql-ref-ansi-compliance.md                    |  2 +-
 .../source/reference/pyspark.sql/functions.rst     |  2 +-
 python/pyspark/sql/connect/functions/builtin.py    |  6 ++--
 python/pyspark/sql/functions/builtin.py            | 32 +++++++++++-----------
 .../sql/tests/connect/test_connect_column.py       |  8 ++----
 .../sql/catalyst/analysis/FunctionRegistry.scala   |  2 +-
 .../spark/sql/catalyst/expressions/TryEval.scala   |  4 +--
 .../sql/catalyst/expressions/arithmetic.scala      |  2 +-
 .../sql/catalyst/expressions/TryEvalSuite.scala    |  2 +-
 .../scala/org/apache/spark/sql/functions.scala     |  2 +-
 .../sql-functions/sql-expression-schema.md         |  2 +-
 .../org/apache/spark/sql/MathFunctionsSuite.scala  |  6 ++--
 13 files changed, 34 insertions(+), 38 deletions(-)

diff --git 
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/functions.scala
 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/functions.scala
index 02b25dd6cbb5..c0bf9c9d013c 100644
--- 
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/functions.scala
+++ 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/functions.scala
@@ -1947,7 +1947,7 @@ object functions {
    * @group math_funcs
    * @since 4.0.0
    */
-  def try_remainder(left: Column, right: Column): Column = 
Column.fn("try_remainder", left, right)
+  def try_mod(left: Column, right: Column): Column = Column.fn("try_mod", 
left, right)
 
   /**
    * Returns `left``*``right` and the result is null on overflow. The 
acceptable input types are
diff --git a/docs/sql-ref-ansi-compliance.md b/docs/sql-ref-ansi-compliance.md
index 54f9fd439548..443bc8409efc 100644
--- a/docs/sql-ref-ansi-compliance.md
+++ b/docs/sql-ref-ansi-compliance.md
@@ -374,7 +374,7 @@ When ANSI mode is on, it throws exceptions for invalid 
operations. You can use t
   - `try_subtract`: identical to the add operator `-`, except that it returns 
`NULL` result instead of throwing an exception on integral value overflow.
   - `try_multiply`: identical to the add operator `*`, except that it returns 
`NULL` result instead of throwing an exception on integral value overflow.
   - `try_divide`: identical to the division operator `/`, except that it 
returns `NULL` result instead of throwing an exception on dividing 0.
-  - `try_remainder`: identical to the remainder operator `%`, except that it 
returns `NULL` result instead of throwing an exception on dividing 0.
+  - `try_mod`: identical to the remainder operator `%`, except that it returns 
`NULL` result instead of throwing an exception on dividing 0.
   - `try_sum`: identical to the function `sum`, except that it returns `NULL` 
result instead of throwing an exception on integral/decimal/interval value 
overflow.
   - `try_avg`: identical to the function `avg`, except that it returns `NULL` 
result instead of throwing an exception on decimal/interval value overflow.
   - `try_element_at`: identical to the function `element_at`, except that it 
returns `NULL` result instead of throwing an exception on array's index out of 
bound.
diff --git a/python/docs/source/reference/pyspark.sql/functions.rst 
b/python/docs/source/reference/pyspark.sql/functions.rst
index c7ae525429ca..7585448204f6 100644
--- a/python/docs/source/reference/pyspark.sql/functions.rst
+++ b/python/docs/source/reference/pyspark.sql/functions.rst
@@ -142,8 +142,8 @@ Mathematical Functions
     tanh
     try_add
     try_divide
+    try_mod
     try_multiply
-    try_remainder
     try_subtract
     unhex
     width_bucket
diff --git a/python/pyspark/sql/connect/functions/builtin.py 
b/python/pyspark/sql/connect/functions/builtin.py
index 7f7ea3c6f45d..37398ca9ccf3 100644
--- a/python/pyspark/sql/connect/functions/builtin.py
+++ b/python/pyspark/sql/connect/functions/builtin.py
@@ -934,11 +934,11 @@ def try_divide(left: "ColumnOrName", right: 
"ColumnOrName") -> Column:
 try_divide.__doc__ = pysparkfuncs.try_divide.__doc__
 
 
-def try_remainder(left: "ColumnOrName", right: "ColumnOrName") -> Column:
-    return _invoke_function_over_columns("try_remainder", left, right)
+def try_mod(left: "ColumnOrName", right: "ColumnOrName") -> Column:
+    return _invoke_function_over_columns("try_mod", left, right)
 
 
-try_remainder.__doc__ = pysparkfuncs.try_remainder.__doc__
+try_mod.__doc__ = pysparkfuncs.try_mod.__doc__
 
 
 def try_multiply(left: "ColumnOrName", right: "ColumnOrName") -> Column:
diff --git a/python/pyspark/sql/functions/builtin.py 
b/python/pyspark/sql/functions/builtin.py
index 87b4fbc49518..6ee311780520 100644
--- a/python/pyspark/sql/functions/builtin.py
+++ b/python/pyspark/sql/functions/builtin.py
@@ -658,7 +658,7 @@ def try_divide(left: "ColumnOrName", right: "ColumnOrName") 
-> Column:
 
 
 @_try_remote_functions
-def try_remainder(left: "ColumnOrName", right: "ColumnOrName") -> Column:
+def try_mod(left: "ColumnOrName", right: "ColumnOrName") -> Column:
     """
     Returns the remainder after `dividend`/`divisor`.  Its result is
     always null if `divisor` is 0.
@@ -679,14 +679,14 @@ def try_remainder(left: "ColumnOrName", right: 
"ColumnOrName") -> Column:
     >>> import pyspark.sql.functions as sf
     >>> spark.createDataFrame(
     ...     [(6000, 15), (3, 2), (1234, 0)], ["a", "b"]
-    ... ).select(sf.try_remainder("a", "b")).show()
-    +-------------------+
-    |try_remainder(a, b)|
-    +-------------------+
-    |                  0|
-    |                  1|
-    |               NULL|
-    +-------------------+
+    ... ).select(sf.try_mod("a", "b")).show()
+    +-------------+
+    |try_mod(a, b)|
+    +-------------+
+    |            0|
+    |            1|
+    |         NULL|
+    +-------------+
 
     Example 2: Exception during division, resulting in NULL when ANSI mode is 
on
 
@@ -695,16 +695,16 @@ def try_remainder(left: "ColumnOrName", right: 
"ColumnOrName") -> Column:
     >>> spark.conf.set("spark.sql.ansi.enabled", "true")
     >>> try:
     ...     df = spark.range(1)
-    ...     df.select(sf.try_remainder(df.id, sf.lit(0))).show()
+    ...     df.select(sf.try_mod(df.id, sf.lit(0))).show()
     ... finally:
     ...     spark.conf.set("spark.sql.ansi.enabled", origin)
-    +--------------------+
-    |try_remainder(id, 0)|
-    +--------------------+
-    |                NULL|
-    +--------------------+
+    +--------------+
+    |try_mod(id, 0)|
+    +--------------+
+    |          NULL|
+    +--------------+
     """
-    return _invoke_function_over_columns("try_remainder", left, right)
+    return _invoke_function_over_columns("try_mod", left, right)
 
 
 @_try_remote_functions
diff --git a/python/pyspark/sql/tests/connect/test_connect_column.py 
b/python/pyspark/sql/tests/connect/test_connect_column.py
index c797087aef0a..39b46153d4b1 100644
--- a/python/pyspark/sql/tests/connect/test_connect_column.py
+++ b/python/pyspark/sql/tests/connect/test_connect_column.py
@@ -824,12 +824,8 @@ class SparkConnectColumnTests(SparkConnectSQLTestCase):
         )
 
         self.assert_eq(
-            cdf.select(
-                cdf.a % cdf["b"], cdf["a"] % 2, CF.try_remainder(CF.lit(12), 
cdf.c)
-            ).toPandas(),
-            sdf.select(
-                sdf.a % sdf["b"], sdf["a"] % 2, SF.try_remainder(SF.lit(12), 
sdf.c)
-            ).toPandas(),
+            cdf.select(cdf.a % cdf["b"], cdf["a"] % 2, CF.try_mod(CF.lit(12), 
cdf.c)).toPandas(),
+            sdf.select(sdf.a % sdf["b"], sdf["a"] % 2, SF.try_mod(SF.lit(12), 
sdf.c)).toPandas(),
         )
 
         self.assert_eq(
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index d47a34c3626c..739fac1f33fd 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -451,7 +451,7 @@ object FunctionRegistry {
     // "try_*" function which always return Null instead of runtime error.
     expression[TryAdd]("try_add"),
     expression[TryDivide]("try_divide"),
-    expression[TryRemainder]("try_remainder"),
+    expression[TryMod]("try_mod"),
     expression[TrySubtract]("try_subtract"),
     expression[TryMultiply]("try_multiply"),
     expression[TryElementAt]("try_element_at"),
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/TryEval.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/TryEval.scala
index 05eafe01906a..b7d0ffdb75fb 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/TryEval.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/TryEval.scala
@@ -150,7 +150,7 @@ case class TryDivide(left: Expression, right: Expression, 
replacement: Expressio
   since = "4.0.0",
   group = "math_funcs")
 // scalastyle:on line.size.limit
-case class TryRemainder(left: Expression, right: Expression, replacement: 
Expression)
+case class TryMod(left: Expression, right: Expression, replacement: Expression)
   extends RuntimeReplaceable with InheritAnalysisRules {
   def this(left: Expression, right: Expression) = this(left, right,
     (left.dataType, right.dataType) match {
@@ -160,7 +160,7 @@ case class TryRemainder(left: Expression, right: 
Expression, replacement: Expres
     }
   )
 
-  override def prettyName: String = "try_remainder"
+  override def prettyName: String = "try_mod"
 
   override def parameters: Seq[Expression] = Seq(left, right)
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
index f1b192a3e21f..f889c3ebc4d9 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
@@ -904,7 +904,7 @@ case class Remainder(
 
   override def inputType: AbstractDataType = NumericType
 
-  // `try_remainder` has exactly the same behavior as the legacy divide, so 
here it only executes
+  // `try_mod` has exactly the same behavior as the legacy divide, so here it 
only executes
   // the error code path when `evalMode` is `ANSI`.
   protected override def failOnError: Boolean = evalMode == EvalMode.ANSI
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TryEvalSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TryEvalSuite.scala
index e082f2e3accc..cb5d02edfed9 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TryEvalSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/TryEvalSuite.scala
@@ -46,7 +46,7 @@ class TryEvalSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     }
   }
 
-  test("try_remainder") {
+  test("try_mod") {
     Seq(
       (3.0, 2.0, 1.0),
       (1.0, 0.0, null),
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
index dd9b8cd26ad2..88303b1979a7 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
@@ -1950,7 +1950,7 @@ object functions {
    * @group math_funcs
    * @since 4.0.0
    */
-  def try_remainder(left: Column, right: Column): Column = 
Column.fn("try_remainder", left, right)
+  def try_mod(left: Column, right: Column): Column = Column.fn("try_mod", 
left, right)
 
   /**
    * Returns `left``*``right` and the result is null on overflow. The 
acceptable input types are
diff --git a/sql/core/src/test/resources/sql-functions/sql-expression-schema.md 
b/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
index 228c8f5df692..7f0e20ba345e 100644
--- a/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
+++ b/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
@@ -352,9 +352,9 @@
 | org.apache.spark.sql.catalyst.expressions.TryAesDecrypt | try_aes_decrypt | 
SELECT 
try_aes_decrypt(unhex('6E7CA17BBB468D3084B5744BCA729FB7B2B7BCB8E4472847D02670489D95FA97DBBA7D3210'),
 '0000111122223333', 'GCM') | 
struct<try_aes_decrypt(unhex(6E7CA17BBB468D3084B5744BCA729FB7B2B7BCB8E4472847D02670489D95FA97DBBA7D3210),
 0000111122223333, GCM, DEFAULT, ):binary> |
 | org.apache.spark.sql.catalyst.expressions.TryDivide | try_divide | SELECT 
try_divide(3, 2) | struct<try_divide(3, 2):double> |
 | org.apache.spark.sql.catalyst.expressions.TryElementAt | try_element_at | 
SELECT try_element_at(array(1, 2, 3), 2) | struct<try_element_at(array(1, 2, 
3), 2):int> |
+| org.apache.spark.sql.catalyst.expressions.TryMod | try_mod | SELECT 
try_mod(3, 2) | struct<try_mod(3, 2):int> |
 | org.apache.spark.sql.catalyst.expressions.TryMultiply | try_multiply | 
SELECT try_multiply(2, 3) | struct<try_multiply(2, 3):int> |
 | org.apache.spark.sql.catalyst.expressions.TryReflect | try_reflect | SELECT 
try_reflect('java.util.UUID', 'randomUUID') | 
struct<try_reflect(java.util.UUID, randomUUID):string> |
-| org.apache.spark.sql.catalyst.expressions.TryRemainder | try_remainder | 
SELECT try_remainder(3, 2) | struct<try_remainder(3, 2):int> |
 | org.apache.spark.sql.catalyst.expressions.TrySubtract | try_subtract | 
SELECT try_subtract(2, 1) | struct<try_subtract(2, 1):int> |
 | org.apache.spark.sql.catalyst.expressions.TryToBinary | try_to_binary | 
SELECT try_to_binary('abc', 'utf-8') | struct<try_to_binary(abc, utf-8):binary> 
|
 | org.apache.spark.sql.catalyst.expressions.TryToNumber | try_to_number | 
SELECT try_to_number('454', '999') | struct<try_to_number(454, 
999):decimal(3,0)> |
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/MathFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/MathFunctionsSuite.scala
index ac14b345a762..f1d0815c181b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/MathFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/MathFunctionsSuite.scala
@@ -707,15 +707,15 @@ class MathFunctionsSuite extends QueryTest with 
SharedSparkSession {
       df1.select(try_divide(make_interval(col("year"), col("month")), lit(0))))
   }
 
-  test("try_remainder") {
+  test("try_mod") {
     val df = Seq((10, 3), (5, 5), (5, 0)).toDF("birth", "age")
-    checkAnswer(df.selectExpr("try_remainder(birth, age)"), Seq(Row(1), 
Row(0), Row(null)))
+    checkAnswer(df.selectExpr("try_mod(birth, age)"), Seq(Row(1), Row(0), 
Row(null)))
 
     val dfDecimal = Seq(
       (BigDecimal(10), BigDecimal(3)),
       (BigDecimal(5), BigDecimal(5)),
       (BigDecimal(5), BigDecimal(0))).toDF("birth", "age")
-    checkAnswer(dfDecimal.selectExpr("try_remainder(birth, age)"), Seq(Row(1), 
Row(0), Row(null)))
+    checkAnswer(dfDecimal.selectExpr("try_mod(birth, age)"), Seq(Row(1), 
Row(0), Row(null)))
   }
 
   test("try_element_at") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to