Repository: spark
Updated Branches:
  refs/heads/master c740bed17 -> 9bbe0171c


[SPARK-8196][SQL] Fix null handling & documentation for next_day.

The original patch didn't handle nulls correctly for next_day.

Author: Reynold Xin <r...@databricks.com>

Closes #7718 from rxin/next_day and squashes the following commits:

616a425 [Reynold Xin] Merged DatetimeExpressionsSuite into DateFunctionsSuite.
faa78cf [Reynold Xin] Merged DatetimeFunctionsSuite into DateExpressionsSuite.
6c4fb6a [Reynold Xin] [SPARK-8196][SQL] Fix null handling & documentation for 
next_day.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9bbe0171
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9bbe0171
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9bbe0171

Branch: refs/heads/master
Commit: 9bbe0171cb434edb160fad30ea2d4221f525c919
Parents: c740bed
Author: Reynold Xin <r...@databricks.com>
Authored: Tue Jul 28 09:43:39 2015 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Tue Jul 28 09:43:39 2015 -0700

----------------------------------------------------------------------
 .../sql/catalyst/expressions/Expression.scala   | 12 ++---
 .../expressions/datetimeFunctions.scala         | 46 +++++++++++-------
 .../sql/catalyst/expressions/literals.scala     |  2 +-
 .../spark/sql/catalyst/util/DateTimeUtils.scala |  2 +-
 .../expressions/DateExpressionsSuite.scala      | 43 +++++++++++++----
 .../expressions/DatetimeFunctionsSuite.scala    | 37 ---------------
 .../expressions/ExpressionEvalHelper.scala      |  1 +
 .../expressions/NonFoldableLiteral.scala        | 50 ++++++++++++++++++++
 .../scala/org/apache/spark/sql/functions.scala  | 20 +++++---
 .../apache/spark/sql/DateFunctionsSuite.scala   | 21 ++++++++
 .../spark/sql/DatetimeExpressionsSuite.scala    | 48 -------------------
 11 files changed, 158 insertions(+), 124 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/9bbe0171/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
index cb4c3f2..03e36c7 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
@@ -355,9 +355,9 @@ abstract class BinaryExpression extends Expression {
    * @param f accepts two variable names and returns Java code to compute the 
output.
    */
   protected def defineCodeGen(
-    ctx: CodeGenContext,
-    ev: GeneratedExpressionCode,
-    f: (String, String) => String): String = {
+      ctx: CodeGenContext,
+      ev: GeneratedExpressionCode,
+      f: (String, String) => String): String = {
     nullSafeCodeGen(ctx, ev, (eval1, eval2) => {
       s"${ev.primitive} = ${f(eval1, eval2)};"
     })
@@ -372,9 +372,9 @@ abstract class BinaryExpression extends Expression {
    *          and returns Java code to compute the output.
    */
   protected def nullSafeCodeGen(
-    ctx: CodeGenContext,
-    ev: GeneratedExpressionCode,
-    f: (String, String) => String): String = {
+      ctx: CodeGenContext,
+      ev: GeneratedExpressionCode,
+      f: (String, String) => String): String = {
     val eval1 = left.gen(ctx)
     val eval2 = right.gen(ctx)
     val resultCode = f(eval1.primitive, eval2.primitive)

http://git-wip-us.apache.org/repos/asf/spark/blob/9bbe0171/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala
index b00a1b2..c37afc1 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeFunctions.scala
@@ -276,8 +276,6 @@ case class LastDay(startDate: Expression) extends 
UnaryExpression with ImplicitC
 
   override def dataType: DataType = DateType
 
-  override def prettyName: String = "last_day"
-
   override def nullSafeEval(date: Any): Any = {
     val days = date.asInstanceOf[Int]
     DateTimeUtils.getLastDayOfMonth(days)
@@ -289,12 +287,16 @@ case class LastDay(startDate: Expression) extends 
UnaryExpression with ImplicitC
       s"$dtu.getLastDayOfMonth($sd)"
     })
   }
+
+  override def prettyName: String = "last_day"
 }
 
 /**
  * Returns the first date which is later than startDate and named as dayOfWeek.
  * For example, NextDay(2015-07-27, Sunday) would return 2015-08-02, which is 
the first
- * sunday later than 2015-07-27.
+ * Sunday later than 2015-07-27.
+ *
+ * Allowed "dayOfWeek" is defined in [[DateTimeUtils.getDayOfWeekFromString]].
  */
 case class NextDay(startDate: Expression, dayOfWeek: Expression)
   extends BinaryExpression with ImplicitCastInputTypes {
@@ -318,22 +320,32 @@ case class NextDay(startDate: Expression, dayOfWeek: 
Expression)
 
   override protected def genCode(ctx: CodeGenContext, ev: 
GeneratedExpressionCode): String = {
     nullSafeCodeGen(ctx, ev, (sd, dowS) => {
-      val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
-      val dow = ctx.freshName("dow")
-      val genDow = if (right.foldable) {
-        val dowVal = DateTimeUtils.getDayOfWeekFromString(
-          dayOfWeek.eval(InternalRow.empty).asInstanceOf[UTF8String])
-        s"int $dow = $dowVal;"
-      } else {
-        s"int $dow = $dtu.getDayOfWeekFromString($dowS);"
-      }
-      genDow + s"""
-        if ($dow == -1) {
-          ${ev.isNull} = true;
+      val dateTimeUtilClass = DateTimeUtils.getClass.getName.stripSuffix("$")
+      val dayOfWeekTerm = ctx.freshName("dayOfWeek")
+      if (dayOfWeek.foldable) {
+        val input = dayOfWeek.eval().asInstanceOf[UTF8String]
+        if ((input eq null) || DateTimeUtils.getDayOfWeekFromString(input) == 
-1) {
+          s"""
+             |${ev.isNull} = true;
+           """.stripMargin
         } else {
-          ${ev.primitive} = $dtu.getNextDateForDayOfWeek($sd, $dow);
+          val dayOfWeekValue = DateTimeUtils.getDayOfWeekFromString(input)
+          s"""
+             |${ev.primitive} = 
$dateTimeUtilClass.getNextDateForDayOfWeek($sd, $dayOfWeekValue);
+           """.stripMargin
         }
-       """
+      } else {
+        s"""
+           |int $dayOfWeekTerm = 
$dateTimeUtilClass.getDayOfWeekFromString($dowS);
+           |if ($dayOfWeekTerm == -1) {
+           |  ${ev.isNull} = true;
+           |} else {
+           |  ${ev.primitive} = 
$dateTimeUtilClass.getNextDateForDayOfWeek($sd, $dayOfWeekTerm);
+           |}
+         """.stripMargin
+      }
     })
   }
+
+  override def prettyName: String = "next_day"
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/9bbe0171/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
index 85060b7..064a172 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
@@ -118,7 +118,7 @@ case class Literal protected (value: Any, dataType: 
DataType)
             super.genCode(ctx, ev)
           } else {
             ev.isNull = "false"
-            ev.primitive = s"${value}"
+            ev.primitive = s"${value}D"
             ""
           }
         case ByteType | ShortType =>

http://git-wip-us.apache.org/repos/asf/spark/blob/9bbe0171/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index 2e28fb9..8b0b80c 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -575,7 +575,7 @@ object DateTimeUtils {
   }
 
   /**
-   * Returns Day of week from String. Starting from Thursday, marked as 0.
+   * Returns day of week from String. Starting from Thursday, marked as 0.
    * (Because 1970-01-01 is Thursday).
    */
   def getDayOfWeekFromString(string: UTF8String): Int = {

http://git-wip-us.apache.org/repos/asf/spark/blob/9bbe0171/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
index 4d2d337..30c5769 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
@@ -32,6 +32,19 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
   val d = new Date(sdf.parse("2015-04-08 13:10:15").getTime)
   val ts = new Timestamp(sdf.parse("2013-11-08 13:10:15").getTime)
 
+  test("datetime function current_date") {
+    val d0 = DateTimeUtils.millisToDays(System.currentTimeMillis())
+    val cd = CurrentDate().eval(EmptyRow).asInstanceOf[Int]
+    val d1 = DateTimeUtils.millisToDays(System.currentTimeMillis())
+    assert(d0 <= cd && cd <= d1 && d1 - d0 <= 1)
+  }
+
+  test("datetime function current_timestamp") {
+    val ct = 
DateTimeUtils.toJavaTimestamp(CurrentTimestamp().eval(EmptyRow).asInstanceOf[Long])
+    val t1 = System.currentTimeMillis()
+    assert(math.abs(t1 - ct.getTime) < 5000)
+  }
+
   test("DayOfYear") {
     val sdfDay = new SimpleDateFormat("D")
     (2002 to 2004).foreach { y =>
@@ -264,14 +277,28 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
   }
 
   test("next_day") {
+    def testNextDay(input: String, dayOfWeek: String, output: String): Unit = {
+      checkEvaluation(
+        NextDay(Literal(Date.valueOf(input)), NonFoldableLiteral(dayOfWeek)),
+        DateTimeUtils.fromJavaDate(Date.valueOf(output)))
+      checkEvaluation(
+        NextDay(Literal(Date.valueOf(input)), Literal(dayOfWeek)),
+        DateTimeUtils.fromJavaDate(Date.valueOf(output)))
+    }
+    testNextDay("2015-07-23", "Mon", "2015-07-27")
+    testNextDay("2015-07-23", "mo", "2015-07-27")
+    testNextDay("2015-07-23", "Tue", "2015-07-28")
+    testNextDay("2015-07-23", "tu", "2015-07-28")
+    testNextDay("2015-07-23", "we", "2015-07-29")
+    testNextDay("2015-07-23", "wed", "2015-07-29")
+    testNextDay("2015-07-23", "Thu", "2015-07-30")
+    testNextDay("2015-07-23", "TH", "2015-07-30")
+    testNextDay("2015-07-23", "Fri", "2015-07-24")
+    testNextDay("2015-07-23", "fr", "2015-07-24")
+
+    checkEvaluation(NextDay(Literal(Date.valueOf("2015-07-23")), 
Literal("xx")), null)
+    checkEvaluation(NextDay(Literal.create(null, DateType), Literal("xx")), 
null)
     checkEvaluation(
-      NextDay(Literal(Date.valueOf("2015-07-23")), Literal("Thu")),
-      DateTimeUtils.fromJavaDate(Date.valueOf("2015-07-30")))
-    checkEvaluation(
-      NextDay(Literal(Date.valueOf("2015-07-23")), Literal("THURSDAY")),
-      DateTimeUtils.fromJavaDate(Date.valueOf("2015-07-30")))
-    checkEvaluation(
-      NextDay(Literal(Date.valueOf("2015-07-23")), Literal("th")),
-      DateTimeUtils.fromJavaDate(Date.valueOf("2015-07-30")))
+      NextDay(Literal(Date.valueOf("2015-07-23")), Literal.create(null, 
StringType)), null)
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/9bbe0171/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala
deleted file mode 100644
index 1618c24..0000000
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DatetimeFunctionsSuite.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql.catalyst.expressions
-
-import org.apache.spark.SparkFunSuite
-import org.apache.spark.sql.catalyst.util.DateTimeUtils
-
-class DatetimeFunctionsSuite extends SparkFunSuite with ExpressionEvalHelper {
-  test("datetime function current_date") {
-    val d0 = DateTimeUtils.millisToDays(System.currentTimeMillis())
-    val cd = CurrentDate().eval(EmptyRow).asInstanceOf[Int]
-    val d1 = DateTimeUtils.millisToDays(System.currentTimeMillis())
-    assert(d0 <= cd && cd <= d1 && d1 - d0 <= 1)
-  }
-
-  test("datetime function current_timestamp") {
-    val ct = 
DateTimeUtils.toJavaTimestamp(CurrentTimestamp().eval(EmptyRow).asInstanceOf[Long])
-    val t1 = System.currentTimeMillis()
-    assert(math.abs(t1 - ct.getTime) < 5000)
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/spark/blob/9bbe0171/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
index 136368b..0c8611d 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala
@@ -82,6 +82,7 @@ trait ExpressionEvalHelper {
           s"""
             |Code generation of $expression failed:
             |$e
+            |${e.getStackTraceString}
           """.stripMargin)
     }
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/9bbe0171/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/NonFoldableLiteral.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/NonFoldableLiteral.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/NonFoldableLiteral.scala
new file mode 100644
index 0000000..0559fb8
--- /dev/null
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/NonFoldableLiteral.scala
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.catalyst.expressions
+
+import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.expressions.codegen._
+import org.apache.spark.sql.types._
+
+
+/**
+ * A literal value that is not foldable. Used in expression codegen testing to 
test code path
+ * that behave differently based on foldable values.
+ */
+case class NonFoldableLiteral(value: Any, dataType: DataType)
+  extends LeafExpression with CodegenFallback {
+
+  override def foldable: Boolean = false
+  override def nullable: Boolean = true
+
+  override def toString: String = if (value != null) value.toString else "null"
+
+  override def eval(input: InternalRow): Any = value
+
+  override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): 
String = {
+    Literal.create(value, dataType).genCode(ctx, ev)
+  }
+}
+
+
+object NonFoldableLiteral {
+  def apply(value: Any): NonFoldableLiteral = {
+    val lit = Literal(value)
+    NonFoldableLiteral(lit.value, lit.dataType)
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/9bbe0171/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
index d18558b..cec61b6 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
@@ -2033,7 +2033,10 @@ object functions {
   def hour(columnName: String): Column = hour(Column(columnName))
 
   /**
-   * Returns the last day of the month which the given date belongs to.
+   * Given a date column, returns the last day of the month which the given 
date belongs to.
+   * For example, input "2015-07-27" returns "2015-07-31" since July 31 is the 
last day of the
+   * month in July 2015.
+   *
    * @group datetime_funcs
    * @since 1.5.0
    */
@@ -2054,14 +2057,19 @@ object functions {
   def minute(columnName: String): Column = minute(Column(columnName))
 
   /**
-   * Returns the first date which is later than given date sd and named as dow.
-   * For example, `next_day('2015-07-27', "Sunday")` would return 2015-08-02, 
which is the
-   * first Sunday later than 2015-07-27. The parameter dayOfWeek could be 
2-letter, 3-letter,
-   * or full name of the day of the week (e.g. Mo, tue, FRIDAY).
+   * Given a date column, returns the first date which is later than the value 
of the date column
+   * that is on the specified day of the week.
+   *
+   * For example, `next_day('2015-07-27', "Sunday")` returns 2015-08-02 
because that is the first
+   * Sunday after 2015-07-27.
+   *
+   * Day of the week parameter is case insensitive, and accepts:
+   * "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun".
+   *
    * @group datetime_funcs
    * @since 1.5.0
    */
-  def next_day(sd: Column, dayOfWeek: String): Column = NextDay(sd.expr, 
lit(dayOfWeek).expr)
+  def next_day(date: Column, dayOfWeek: String): Column = NextDay(date.expr, 
lit(dayOfWeek).expr)
 
   /**
    * Extracts the seconds as an integer from a given date/timestamp/string.

http://git-wip-us.apache.org/repos/asf/spark/blob/9bbe0171/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
index 001fcd0..36820cb 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
@@ -20,6 +20,7 @@ package org.apache.spark.sql
 import java.sql.{Timestamp, Date}
 import java.text.SimpleDateFormat
 
+import org.apache.spark.sql.catalyst.util.DateTimeUtils
 import org.apache.spark.sql.functions._
 
 class DateFunctionsSuite extends QueryTest {
@@ -27,6 +28,26 @@ class DateFunctionsSuite extends QueryTest {
 
   import ctx.implicits._
 
+  test("function current_date") {
+    val df1 = Seq((1, 2), (3, 1)).toDF("a", "b")
+    val d0 = DateTimeUtils.millisToDays(System.currentTimeMillis())
+    val d1 = 
DateTimeUtils.fromJavaDate(df1.select(current_date()).collect().head.getDate(0))
+    val d2 = DateTimeUtils.fromJavaDate(
+      ctx.sql("""SELECT CURRENT_DATE()""").collect().head.getDate(0))
+    val d3 = DateTimeUtils.millisToDays(System.currentTimeMillis())
+    assert(d0 <= d1 && d1 <= d2 && d2 <= d3 && d3 - d0 <= 1)
+  }
+
+  test("function current_timestamp") {
+    val df1 = Seq((1, 2), (3, 1)).toDF("a", "b")
+    checkAnswer(df1.select(countDistinct(current_timestamp())), Row(1))
+    // Execution in one query should return the same value
+    checkAnswer(ctx.sql("""SELECT CURRENT_TIMESTAMP() = 
CURRENT_TIMESTAMP()"""),
+      Row(true))
+    assert(math.abs(ctx.sql("""SELECT 
CURRENT_TIMESTAMP()""").collect().head.getTimestamp(
+      0).getTime - System.currentTimeMillis()) < 5000)
+  }
+
   val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
   val sdfDate = new SimpleDateFormat("yyyy-MM-dd")
   val d = new Date(sdf.parse("2015-04-08 13:10:15").getTime)

http://git-wip-us.apache.org/repos/asf/spark/blob/9bbe0171/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala
deleted file mode 100644
index 44b9153..0000000
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/DatetimeExpressionsSuite.scala
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql
-
-import org.apache.spark.sql.catalyst.util.DateTimeUtils
-import org.apache.spark.sql.functions._
-
-class DatetimeExpressionsSuite extends QueryTest {
-  private lazy val ctx = org.apache.spark.sql.test.TestSQLContext
-
-  import ctx.implicits._
-
-  lazy val df1 = Seq((1, 2), (3, 1)).toDF("a", "b")
-
-  test("function current_date") {
-    val d0 = DateTimeUtils.millisToDays(System.currentTimeMillis())
-    val d1 = 
DateTimeUtils.fromJavaDate(df1.select(current_date()).collect().head.getDate(0))
-    val d2 = DateTimeUtils.fromJavaDate(
-      ctx.sql("""SELECT CURRENT_DATE()""").collect().head.getDate(0))
-    val d3 = DateTimeUtils.millisToDays(System.currentTimeMillis())
-    assert(d0 <= d1 && d1 <= d2 && d2 <= d3 && d3 - d0 <= 1)
-  }
-
-  test("function current_timestamp") {
-    checkAnswer(df1.select(countDistinct(current_timestamp())), Row(1))
-    // Execution in one query should return the same value
-    checkAnswer(ctx.sql("""SELECT CURRENT_TIMESTAMP() = 
CURRENT_TIMESTAMP()"""),
-      Row(true))
-    assert(math.abs(ctx.sql("""SELECT 
CURRENT_TIMESTAMP()""").collect().head.getTimestamp(
-      0).getTime - System.currentTimeMillis()) < 5000)
-  }
-
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to