This is an automated email from the ASF dual-hosted git repository.

ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 4a44a9ec4a44 [SPARK-48884][PYTHON] Remove unused helper function 
`PythonSQLUtils.makeInterval`
4a44a9ec4a44 is described below

commit 4a44a9ec4a442e49220a1a4ca19858c2babd33bf
Author: Ruifeng Zheng <ruife...@apache.org>
AuthorDate: Tue Jul 16 10:31:50 2024 +0800

    [SPARK-48884][PYTHON] Remove unused helper function 
`PythonSQLUtils.makeInterval`
    
    ### What changes were proposed in this pull request?
    Remove unused helper function `PythonSQLUtils.makeInterval`
    
    ### Why are the changes needed?
    As a followup cleanup of 
https://github.com/apache/spark/commit/bd14d6412a3124eecce1493fcad436280915ba71
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    CI
    
    ### Was this patch authored or co-authored using generative AI tooling?
    NO
    
    Closes #47330 from zhengruifeng/py_sql_utils_cleanup.
    
    Authored-by: Ruifeng Zheng <ruife...@apache.org>
    Signed-off-by: Ruifeng Zheng <ruife...@apache.org>
---
 .../apache/spark/sql/api/python/PythonSQLUtils.scala   | 18 ------------------
 1 file changed, 18 deletions(-)

diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/api/python/PythonSQLUtils.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/api/python/PythonSQLUtils.scala
index eb8c1d65a8b5..79c5249b3669 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/api/python/PythonSQLUtils.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/api/python/PythonSQLUtils.scala
@@ -20,11 +20,9 @@ package org.apache.spark.sql.api.python
 import java.io.InputStream
 import java.net.Socket
 import java.nio.channels.Channels
-import java.util.Locale
 
 import net.razorvine.pickle.{Pickler, Unpickler}
 
-import org.apache.spark.SparkException
 import org.apache.spark.api.python.DechunkedInputStream
 import org.apache.spark.internal.{Logging, MDC}
 import org.apache.spark.internal.LogKeys.CLASS_LOADER
@@ -149,22 +147,6 @@ private[sql] object PythonSQLUtils extends Logging {
 
   def nullIndex(e: Column): Column = Column(NullIndex(e.expr))
 
-  def makeInterval(unit: String, e: Column): Column = {
-    val zero = MakeInterval(years = Literal(0), months = Literal(0), weeks = 
Literal(0),
-      days = Literal(0), hours = Literal(0), mins = Literal(0), secs = 
Literal(0))
-
-    unit.toUpperCase(Locale.ROOT) match {
-      case "YEAR" => Column(zero.copy(years = e.expr))
-      case "MONTH" => Column(zero.copy(months = e.expr))
-      case "WEEK" => Column(zero.copy(weeks = e.expr))
-      case "DAY" => Column(zero.copy(days = e.expr))
-      case "HOUR" => Column(zero.copy(hours = e.expr))
-      case "MINUTE" => Column(zero.copy(mins = e.expr))
-      case "SECOND" => Column(zero.copy(secs = e.expr))
-      case _ => throw SparkException.internalError(s"Got the unexpected unit 
'$unit'.")
-    }
-  }
-
   def pandasProduct(e: Column, ignoreNA: Boolean): Column = {
     Column(PandasProduct(e.expr, ignoreNA).toAggregateExpression(false))
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to