Repository: spark
Updated Branches:
  refs/heads/master 96a4d1d08 -> d797ed0ef


[SPARK-20909][SQL] Add build-int SQL function - DAYOFWEEK

## What changes were proposed in this pull request?

Add build-int SQL function - DAYOFWEEK

## How was this patch tested?

unit tests

Author: Yuming Wang <wgy...@gmail.com>

Closes #18134 from wangyum/SPARK-20909.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/d797ed0e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/d797ed0e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/d797ed0e

Branch: refs/heads/master
Commit: d797ed0ef10f3e2e4cade3fc47071839ae8c5fd4
Parents: 96a4d1d
Author: Yuming Wang <wgy...@gmail.com>
Authored: Tue May 30 15:40:50 2017 +0900
Committer: Takuya UESHIN <ues...@databricks.com>
Committed: Tue May 30 15:40:50 2017 +0900

----------------------------------------------------------------------
 .../catalyst/analysis/FunctionRegistry.scala    |  1 +
 .../expressions/datetimeExpressions.scala       | 38 ++++++++++++++++++++
 .../expressions/DateExpressionsSuite.scala      | 14 ++++++++
 .../resources/sql-tests/inputs/datetime.sql     |  2 ++
 .../sql-tests/results/datetime.sql.out          | 10 +++++-
 5 files changed, 64 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/d797ed0e/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index 549fa0d..8081036 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -360,6 +360,7 @@ object FunctionRegistry {
     expression[ToUTCTimestamp]("to_utc_timestamp"),
     expression[TruncDate]("trunc"),
     expression[UnixTimestamp]("unix_timestamp"),
+    expression[DayOfWeek]("dayofweek"),
     expression[WeekOfYear]("weekofyear"),
     expression[Year]("year"),
     expression[TimeWindow]("window"),

http://git-wip-us.apache.org/repos/asf/spark/blob/d797ed0e/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
index 4098300..505ed94 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
@@ -404,6 +404,44 @@ case class DayOfMonth(child: Expression) extends 
UnaryExpression with ImplicitCa
 
 // scalastyle:off line.size.limit
 @ExpressionDescription(
+  usage = "_FUNC_(date) - Returns the day of the week for date/timestamp (1 = 
Sunday, 2 = Monday, ..., 7 = Saturday).",
+  extended = """
+    Examples:
+      > SELECT _FUNC_('2009-07-30');
+       5
+  """)
+// scalastyle:on line.size.limit
+case class DayOfWeek(child: Expression) extends UnaryExpression with 
ImplicitCastInputTypes {
+
+  override def inputTypes: Seq[AbstractDataType] = Seq(DateType)
+
+  override def dataType: DataType = IntegerType
+
+  @transient private lazy val c = {
+    Calendar.getInstance(DateTimeUtils.getTimeZone("UTC"))
+  }
+
+  override protected def nullSafeEval(date: Any): Any = {
+    c.setTimeInMillis(date.asInstanceOf[Int] * 1000L * 3600L * 24L)
+    c.get(Calendar.DAY_OF_WEEK)
+  }
+
+  override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): 
ExprCode = {
+    nullSafeCodeGen(ctx, ev, time => {
+      val cal = classOf[Calendar].getName
+      val c = ctx.freshName("cal")
+      val dtu = DateTimeUtils.getClass.getName.stripSuffix("$")
+      ctx.addMutableState(cal, c, s"""$c = 
$cal.getInstance($dtu.getTimeZone("UTC"));""")
+      s"""
+        $c.setTimeInMillis($time * 1000L * 3600L * 24L);
+        ${ev.value} = $c.get($cal.DAY_OF_WEEK);
+      """
+    })
+  }
+}
+
+// scalastyle:off line.size.limit
+@ExpressionDescription(
   usage = "_FUNC_(date) - Returns the week of the year of the given date. A 
week is considered to start on a Monday and week 1 is the first week with >3 
days.",
   extended = """
     Examples:

http://git-wip-us.apache.org/repos/asf/spark/blob/d797ed0e/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
index 4ce6853..89d99f9 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
@@ -196,6 +196,20 @@ class DateExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     }
   }
 
+  test("DayOfWeek") {
+    checkEvaluation(DayOfWeek(Literal.create(null, DateType)), null)
+    checkEvaluation(DayOfWeek(Literal(d)), Calendar.WEDNESDAY)
+    checkEvaluation(DayOfWeek(Cast(Literal(sdfDate.format(d)), DateType, 
gmtId)),
+      Calendar.WEDNESDAY)
+    checkEvaluation(DayOfWeek(Cast(Literal(ts), DateType, gmtId)), 
Calendar.FRIDAY)
+    checkEvaluation(DayOfWeek(Cast(Literal("2011-05-06"), DateType, gmtId)), 
Calendar.FRIDAY)
+    checkEvaluation(DayOfWeek(Literal(new Date(sdf.parse("2017-05-27 
13:10:15").getTime))),
+      Calendar.SATURDAY)
+    checkEvaluation(DayOfWeek(Literal(new Date(sdf.parse("1582-10-15 
13:10:15").getTime))),
+      Calendar.FRIDAY)
+    checkConsistencyBetweenInterpretedAndCodegen(DayOfWeek, DateType)
+  }
+
   test("WeekOfYear") {
     checkEvaluation(WeekOfYear(Literal.create(null, DateType)), null)
     checkEvaluation(WeekOfYear(Literal(d)), 15)

http://git-wip-us.apache.org/repos/asf/spark/blob/d797ed0e/sql/core/src/test/resources/sql-tests/inputs/datetime.sql
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/inputs/datetime.sql 
b/sql/core/src/test/resources/sql-tests/inputs/datetime.sql
index e957f69..616b6ca 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/datetime.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/datetime.sql
@@ -6,3 +6,5 @@ select current_date = current_date(), current_timestamp = 
current_timestamp();
 select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 
'yyyy-MM-dd');
 
 select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), 
to_timestamp('2016-12-31', 'yyyy-MM-dd');
+
+select dayofweek('2007-02-03'), dayofweek('2009-07-30'), 
dayofweek('2017-05-27'), dayofweek(null), dayofweek('1582-10-15 13:10:15');

http://git-wip-us.apache.org/repos/asf/spark/blob/d797ed0e/sql/core/src/test/resources/sql-tests/results/datetime.sql.out
----------------------------------------------------------------------
diff --git a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out 
b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out
index 13e1e48..a28b91c 100644
--- a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 3
+-- Number of queries: 4
 
 
 -- !query 0
@@ -24,3 +24,11 @@ select to_timestamp(null), to_timestamp('2016-12-31 
00:12:00'), to_timestamp('20
 struct<to_timestamp(NULL):timestamp,to_timestamp('2016-12-31 
00:12:00'):timestamp,to_timestamp('2016-12-31', 'yyyy-MM-dd'):timestamp>
 -- !query 2 output
 NULL   2016-12-31 00:12:00     2016-12-31 00:00:00
+
+
+-- !query 3
+select dayofweek('2007-02-03'), dayofweek('2009-07-30'), 
dayofweek('2017-05-27'), dayofweek(null), dayofweek('1582-10-15 13:10:15')
+-- !query 3 schema
+struct<dayofweek(CAST(2007-02-03 AS DATE)):int,dayofweek(CAST(2009-07-30 AS 
DATE)):int,dayofweek(CAST(2017-05-27 AS DATE)):int,dayofweek(CAST(NULL AS 
DATE)):int,dayofweek(CAST(1582-10-15 13:10:15 AS DATE)):int>
+-- !query 3 output
+7      5       7       NULL    6


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to