Repository: phoenix
Updated Branches:
  refs/heads/master 108b78dee -> bd2acd540


PHOENIX-3540 Fix Time data type in Phoenix Spark integration


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/bd2acd54
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/bd2acd54
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/bd2acd54

Branch: refs/heads/master
Commit: bd2acd5404ce03fb330a72bbf346546b7f4fbd2b
Parents: 108b78d
Author: Ankit Singhal <ankitsingha...@gmail.com>
Authored: Thu Dec 22 13:17:20 2016 +0530
Committer: Ankit Singhal <ankitsingha...@gmail.com>
Committed: Thu Dec 22 13:17:20 2016 +0530

----------------------------------------------------------------------
 phoenix-spark/src/it/resources/globalSetup.sql       |  2 ++
 .../org/apache/phoenix/spark/PhoenixSparkIT.scala    | 12 ++++++++++++
 .../scala/org/apache/phoenix/spark/PhoenixRDD.scala  | 15 ++++++++-------
 3 files changed, 22 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/bd2acd54/phoenix-spark/src/it/resources/globalSetup.sql
----------------------------------------------------------------------
diff --git a/phoenix-spark/src/it/resources/globalSetup.sql 
b/phoenix-spark/src/it/resources/globalSetup.sql
index 852687e..72f8620 100644
--- a/phoenix-spark/src/it/resources/globalSetup.sql
+++ b/phoenix-spark/src/it/resources/globalSetup.sql
@@ -48,6 +48,8 @@ CREATE TABLE TEST_SMALL_TINY (ID BIGINT NOT NULL PRIMARY KEY, 
COL1 SMALLINT, COL
 UPSERT INTO TEST_SMALL_TINY VALUES (1, 32767, 127)
 CREATE TABLE DATE_TEST(ID BIGINT NOT NULL PRIMARY KEY, COL1 DATE)
 UPSERT INTO DATE_TEST VALUES(1, CURRENT_DATE())
+CREATE TABLE TIME_TEST(ID BIGINT NOT NULL PRIMARY KEY, COL1 TIME)
+UPSERT INTO TIME_TEST VALUES(1, CURRENT_TIME())
 CREATE TABLE "space" ("key" VARCHAR PRIMARY KEY, "first name" VARCHAR)
 UPSERT INTO "space" VALUES ('key1', 'xyz')
 CREATE TABLE "small" ("key" VARCHAR PRIMARY KEY, "first name" VARCHAR, 
"salary" INTEGER )

http://git-wip-us.apache.org/repos/asf/phoenix/blob/bd2acd54/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
----------------------------------------------------------------------
diff --git 
a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala 
b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
index 8aeba09..dbcc4f1 100644
--- a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
+++ b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
@@ -621,4 +621,16 @@ class PhoenixSparkIT extends AbstractPhoenixSparkIT {
 
     assert(Math.abs(epoch - ts) < 300000)
   }
+
+  test("Can load Phoenix Time columns through DataFrame API") {
+    val sqlContext = new SQLContext(sc)
+    val df = sqlContext.read
+      .format("org.apache.phoenix.spark")
+      .options(Map("table" -> "TIME_TEST", "zkUrl" -> quorumAddress))
+      .load
+    val time = df.select("COL1").first().getTimestamp(0).getTime
+    val epoch = new Date().getTime
+    assert(Math.abs(epoch - time) < 86400000)
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/bd2acd54/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
----------------------------------------------------------------------
diff --git 
a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala 
b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
index 505de1b..204a7ef 100644
--- a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
+++ b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
@@ -130,13 +130,14 @@ class PhoenixRDD(sc: SparkContext, table: String, 
columns: Seq[String],
       val rowSeq = columns.map { case (name, sqlType) =>
         val res = pr.resultMap(name)
 
-        // Special handling for data types
-        if(dateAsTimestamp && sqlType == 91) { // 91 is the defined type for 
Date
-          new java.sql.Timestamp(res.asInstanceOf[java.sql.Date].getTime)
-        }
-        else {
-          res
-        }
+          // Special handling for data types
+          if (dateAsTimestamp && sqlType == 91) { // 91 is the defined type 
for Date
+            new java.sql.Timestamp(res.asInstanceOf[java.sql.Date].getTime)
+          } else if (sqlType == 92) { // 92 is the defined type for Time
+            new java.sql.Timestamp(res.asInstanceOf[java.sql.Time].getTime)
+          } else {
+            res
+          }
       }
 
       // Create a Spark Row from the sequence

Reply via email to