Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-1.1 5286236f9 -> 3378ee857


PHOENIX-3540 Fix Time data type in Phoenix Spark integration


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/3378ee85
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/3378ee85
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/3378ee85

Branch: refs/heads/4.x-HBase-1.1
Commit: 3378ee857136eee1d7e7c2f09e3a1e42e340dcf4
Parents: 5286236
Author: Ankit Singhal <ankitsingha...@gmail.com>
Authored: Thu Dec 22 13:18:05 2016 +0530
Committer: Ankit Singhal <ankitsingha...@gmail.com>
Committed: Thu Dec 22 13:18:05 2016 +0530

----------------------------------------------------------------------
 phoenix-spark/src/it/resources/globalSetup.sql       |  2 ++
 .../org/apache/phoenix/spark/PhoenixSparkIT.scala    | 12 ++++++++++++
 .../scala/org/apache/phoenix/spark/PhoenixRDD.scala  | 15 ++++++++-------
 3 files changed, 22 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/3378ee85/phoenix-spark/src/it/resources/globalSetup.sql
----------------------------------------------------------------------
diff --git a/phoenix-spark/src/it/resources/globalSetup.sql 
b/phoenix-spark/src/it/resources/globalSetup.sql
index 30605d3..1a2d162 100644
--- a/phoenix-spark/src/it/resources/globalSetup.sql
+++ b/phoenix-spark/src/it/resources/globalSetup.sql
@@ -48,6 +48,8 @@ CREATE TABLE TEST_SMALL_TINY (ID BIGINT NOT NULL PRIMARY KEY, 
COL1 SMALLINT, COL
 UPSERT INTO TEST_SMALL_TINY VALUES (1, 32767, 127)
 CREATE TABLE DATE_TEST(ID BIGINT NOT NULL PRIMARY KEY, COL1 DATE)
 UPSERT INTO DATE_TEST VALUES(1, CURRENT_DATE())
+CREATE TABLE TIME_TEST(ID BIGINT NOT NULL PRIMARY KEY, COL1 TIME)
+UPSERT INTO TIME_TEST VALUES(1, CURRENT_TIME())
 CREATE TABLE "space" ("key" VARCHAR PRIMARY KEY, "first name" VARCHAR)
 UPSERT INTO "space" VALUES ('key1', 'xyz')
 CREATE TABLE "small" ("key" VARCHAR PRIMARY KEY, "first name" VARCHAR, 
"salary" INTEGER )

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3378ee85/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
----------------------------------------------------------------------
diff --git 
a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala 
b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
index b4ee538..bbab4e0 100644
--- a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
+++ b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/PhoenixSparkIT.scala
@@ -572,4 +572,16 @@ class PhoenixSparkIT extends AbstractPhoenixSparkIT {
 
     assert(Math.abs(epoch - ts) < 300000)
   }
+
+  test("Can load Phoenix Time columns through DataFrame API") {
+    val sqlContext = new SQLContext(sc)
+    val df = sqlContext.read
+      .format("org.apache.phoenix.spark")
+      .options(Map("table" -> "TIME_TEST", "zkUrl" -> quorumAddress))
+      .load
+    val time = df.select("COL1").first().getTimestamp(0).getTime
+    val epoch = new Date().getTime
+    assert(Math.abs(epoch - time) < 86400000)
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/3378ee85/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
----------------------------------------------------------------------
diff --git 
a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala 
b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
index 505de1b..204a7ef 100644
--- a/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
+++ b/phoenix-spark/src/main/scala/org/apache/phoenix/spark/PhoenixRDD.scala
@@ -130,13 +130,14 @@ class PhoenixRDD(sc: SparkContext, table: String, 
columns: Seq[String],
       val rowSeq = columns.map { case (name, sqlType) =>
         val res = pr.resultMap(name)
 
-        // Special handling for data types
-        if(dateAsTimestamp && sqlType == 91) { // 91 is the defined type for 
Date
-          new java.sql.Timestamp(res.asInstanceOf[java.sql.Date].getTime)
-        }
-        else {
-          res
-        }
+          // Special handling for data types
+          if (dateAsTimestamp && sqlType == 91) { // 91 is the defined type 
for Date
+            new java.sql.Timestamp(res.asInstanceOf[java.sql.Date].getTime)
+          } else if (sqlType == 92) { // 92 is the defined type for Time
+            new java.sql.Timestamp(res.asInstanceOf[java.sql.Time].getTime)
+          } else {
+            res
+          }
       }
 
       // Create a Spark Row from the sequence

Reply via email to