This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 7668226 Revert "[SPARK-33995][SQL] Expose make_interval as a Scala function" 7668226 is described below commit 76682268d746e72f0e8aa4cc64860e0bfd90f1ed Author: Max Gekk <max.g...@gmail.com> AuthorDate: Wed Jun 30 09:26:35 2021 +0300 Revert "[SPARK-33995][SQL] Expose make_interval as a Scala function" ### What changes were proposed in this pull request? This reverts commit e6753c9402b5c40d9e2af662f28bd4f07a0bae17. ### Why are the changes needed? The `make_interval` function aims to construct values of the legacy interval type `CalendarIntervalType` which will be substituted by ANSI interval types (see SPARK-27790). Since the function has not been released yet, it would be better to don't expose it via public API at all. ### Does this PR introduce _any_ user-facing change? Should not since the `make_interval` function has not been released yet. ### How was this patch tested? By existing test suites, and GA/jenkins builds. Closes #33143 from MaxGekk/revert-make_interval. Authored-by: Max Gekk <max.g...@gmail.com> Signed-off-by: Max Gekk <max.g...@gmail.com> --- .../scala/org/apache/spark/sql/functions.scala | 25 -------- .../apache/spark/sql/JavaDateFunctionsSuite.java | 68 ---------------------- .../org/apache/spark/sql/DateFunctionsSuite.scala | 40 ------------- 3 files changed, 133 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala index c446d6b..ecd60ff 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala @@ -2929,31 +2929,6 @@ object functions { ////////////////////////////////////////////////////////////////////////////////////////////// /** - * (Scala-specific) Creates a datetime interval - * - * @param years Number of years - * @param months Number of months - * @param weeks Number of weeks - * @param days Number of days - * @param hours Number of hours - * @param mins Number of mins - * @param secs Number of secs - * @return A datetime interval - * @group datetime_funcs - * @since 3.2.0 - */ - def make_interval( - years: Column = lit(0), - months: Column = lit(0), - weeks: Column = lit(0), - days: Column = lit(0), - hours: Column = lit(0), - mins: Column = lit(0), - secs: Column = lit(0)): Column = withExpr { - MakeInterval(years.expr, months.expr, weeks.expr, days.expr, hours.expr, mins.expr, secs.expr) - } - - /** * Returns the date that is `numMonths` after `startDate`. * * @param startDate A date, timestamp or string. If a string, the data must be in a format that diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDateFunctionsSuite.java b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDateFunctionsSuite.java deleted file mode 100644 index 2d1de77..0000000 --- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDateFunctionsSuite.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package test.org.apache.spark.sql; - -import org.apache.spark.sql.Column; -import org.apache.spark.sql.Dataset; -import org.apache.spark.sql.Row; -import org.apache.spark.sql.RowFactory; -import org.apache.spark.sql.test.TestSparkSession; -import org.apache.spark.sql.types.StructType; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import java.sql.Date; -import java.util.*; - -import static org.apache.spark.sql.types.DataTypes.*; -import static org.apache.spark.sql.functions.*; - -public class JavaDateFunctionsSuite { - private transient TestSparkSession spark; - - @Before - public void setUp() { - spark = new TestSparkSession(); - } - - @After - public void tearDown() { - spark.stop(); - spark = null; - } - - @Test - public void makeIntervalWorksWithJava() { - Column twoYears = make_interval(lit(2), lit(0), lit(0), lit(0), lit(0), lit(0), lit(0)); - List<Row> rows = Arrays.asList( - RowFactory.create(Date.valueOf("2014-06-30"), Date.valueOf("2016-06-30")), - RowFactory.create(Date.valueOf("2015-05-01"), Date.valueOf("2017-05-01")), - RowFactory.create(Date.valueOf("2018-12-30"), Date.valueOf("2020-12-30"))); - StructType schema = createStructType(Arrays.asList( - createStructField("some_date", DateType, false), - createStructField("expected", DateType, false))); - Dataset<Row> df = spark.createDataFrame(rows, schema) - .withColumn("plus_two_years", col("some_date").plus(twoYears)); - Assert.assertTrue(Arrays.equals( - (Row[]) df.select(df.col("plus_two_years")).collect(), - (Row[]) df.select(df.col("expected")).collect())); - } - -} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala index d927953..34b1654 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala @@ -323,46 +323,6 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession { Row(Timestamp.valueOf("2015-12-27 00:00:00")))) } - test("function make_interval") { - val t1 = Timestamp.valueOf("2015-10-01 00:00:01") - val t2 = Timestamp.valueOf("2016-02-29 00:00:02") - val df = Seq((t1), (t2)).toDF("t") - // adds two hours to times - checkAnswer( - df.select(col("t") + make_interval(hours = lit(2))), - Seq(Row(Timestamp.valueOf("2015-10-01 02:00:01")), - Row(Timestamp.valueOf("2016-02-29 02:00:02")))) - // adds four days and two hours to times - checkAnswer( - df.select(col("t") + make_interval(hours = lit(2), days = lit(4))), - Seq(Row(Timestamp.valueOf("2015-10-05 02:00:01")), - Row(Timestamp.valueOf("2016-03-04 02:00:02")))) - // subtracts two hours from times - checkAnswer( - df.select(col("t") + make_interval(hours = lit(-2))), - Seq(Row(Timestamp.valueOf("2015-09-30 22:00:01")), - Row(Timestamp.valueOf("2016-02-28 22:00:02")))) - - val d1 = Date.valueOf("2015-08-31") - val d2 = Date.valueOf("2015-02-28") - val df2 = Seq((d1), (d2)).toDF("d") - // adding an hour to a date does nothing - checkAnswer( - df2.select(col("d") + make_interval(hours = lit(1))), - Seq(Row(Date.valueOf("2015-08-31")), - Row(Date.valueOf("2015-02-28")))) - // adds three years to date - checkAnswer( - df2.select(col("d") + make_interval(years = lit(3))), - Seq(Row(Date.valueOf("2018-08-31")), - Row(Date.valueOf("2018-02-28")))) - // subtracts 1 week, one day from date - checkAnswer( - df2.select(col("d") - make_interval(weeks = lit(1), days = lit(1))), - Seq(Row(Date.valueOf("2015-08-23")), - Row(Date.valueOf("2015-02-20")))) - } - test("function add_months") { val d1 = Date.valueOf("2015-08-31") val d2 = Date.valueOf("2015-02-28") --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org