Repository: spark
Updated Branches:
  refs/heads/master 4131ad03f -> 3927bb9b4


[SPARK-22473][FOLLOWUP][TEST] Remove deprecated Date functions

## What changes were proposed in this pull request?

#19696 replaced the deprecated usages for `Date` and `Waiter`, but a few 
methods were missed. The PR fixes the forgotten deprecated usages.

## How was this patch tested?

existing UTs

Author: Marco Gaido <mga...@hortonworks.com>

Closes #19875 from mgaido91/SPARK-22473_FOLLOWUP.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3927bb9b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3927bb9b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3927bb9b

Branch: refs/heads/master
Commit: 3927bb9b460d2d944ecf3c8552d71e8a25d29655
Parents: 4131ad0
Author: Marco Gaido <mga...@hortonworks.com>
Authored: Mon Dec 4 11:07:27 2017 -0600
Committer: Sean Owen <so...@cloudera.com>
Committed: Mon Dec 4 11:07:27 2017 -0600

----------------------------------------------------------------------
 .../spark/sql/catalyst/expressions/PredicateSuite.scala      | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/3927bb9b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala
index 865092a..0079e4e 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala
@@ -291,26 +291,26 @@ class PredicateSuite extends SparkFunSuite with 
ExpressionEvalHelper {
   private val udt = new ExamplePointUDT
 
   private val smallValues =
-    Seq(1.toByte, 1.toShort, 1, 1L, Decimal(1), Array(1.toByte), new 
Date(2000, 1, 1),
+    Seq(1.toByte, 1.toShort, 1, 1L, Decimal(1), Array(1.toByte), 
Date.valueOf("2000-01-01"),
       new Timestamp(1), "a", 1f, 1d, 0f, 0d, false, Array(1L, 2L))
       .map(Literal(_)) ++ Seq(Literal.create(MyStruct(1L, "b")),
       Literal.create(MyStruct2(MyStruct(1L, "a"), Array(1, 1))),
       Literal.create(ArrayData.toArrayData(Array(1.0, 2.0)), udt))
   private val largeValues =
-    Seq(2.toByte, 2.toShort, 2, 2L, Decimal(2), Array(2.toByte), new 
Date(2000, 1, 2),
+    Seq(2.toByte, 2.toShort, 2, 2L, Decimal(2), Array(2.toByte), 
Date.valueOf("2000-01-02"),
       new Timestamp(2), "b", 2f, 2d, Float.NaN, Double.NaN, true, Array(2L, 
1L))
       .map(Literal(_)) ++ Seq(Literal.create(MyStruct(2L, "b")),
       Literal.create(MyStruct2(MyStruct(1L, "a"), Array(1, 2))),
       Literal.create(ArrayData.toArrayData(Array(1.0, 3.0)), udt))
 
   private val equalValues1 =
-    Seq(1.toByte, 1.toShort, 1, 1L, Decimal(1), Array(1.toByte), new 
Date(2000, 1, 1),
+    Seq(1.toByte, 1.toShort, 1, 1L, Decimal(1), Array(1.toByte), 
Date.valueOf("2000-01-01"),
       new Timestamp(1), "a", 1f, 1d, Float.NaN, Double.NaN, true, Array(1L, 
2L))
       .map(Literal(_)) ++ Seq(Literal.create(MyStruct(1L, "b")),
       Literal.create(MyStruct2(MyStruct(1L, "a"), Array(1, 1))),
       Literal.create(ArrayData.toArrayData(Array(1.0, 2.0)), udt))
   private val equalValues2 =
-    Seq(1.toByte, 1.toShort, 1, 1L, Decimal(1), Array(1.toByte), new 
Date(2000, 1, 1),
+    Seq(1.toByte, 1.toShort, 1, 1L, Decimal(1), Array(1.toByte), 
Date.valueOf("2000-01-01"),
       new Timestamp(1), "a", 1f, 1d, Float.NaN, Double.NaN, true, Array(1L, 
2L))
       .map(Literal(_)) ++ Seq(Literal.create(MyStruct(1L, "b")),
       Literal.create(MyStruct2(MyStruct(1L, "a"), Array(1, 1))),


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to