Repository: spark
Updated Branches:
  refs/heads/branch-1.4 91ce13109 -> 84ee348bc


[SPARK-7330] [SQL] avoid NPE at jdbc rdd

Thank nadavoosh point this out in #5590

Author: Daoyuan Wang <daoyuan.w...@intel.com>

Closes #5877 from adrian-wang/jdbcrdd and squashes the following commits:

cc11900 [Daoyuan Wang] avoid NPE in jdbcrdd

(cherry picked from commit ed9be06a4797bbb678355b361054c8872ac20b75)
Signed-off-by: Yin Huai <yh...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/84ee348b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/84ee348b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/84ee348b

Branch: refs/heads/branch-1.4
Commit: 84ee348bce9f06626d4ca80358ae3eed32b4dd36
Parents: 91ce131
Author: Daoyuan Wang <daoyuan.w...@intel.com>
Authored: Thu May 7 10:05:01 2015 -0700
Committer: Yin Huai <yh...@databricks.com>
Committed: Thu May 7 10:05:19 2015 -0700

----------------------------------------------------------------------
 .../org/apache/spark/sql/jdbc/JDBCRDD.scala     |  8 ++++++-
 .../org/apache/spark/sql/jdbc/JDBCSuite.scala   | 25 ++++++++++++++++++++
 2 files changed, 32 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/84ee348b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
index 325a326..1a5083d 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
@@ -362,7 +362,13 @@ private[sql] class JDBCRDD(
           conversions(i) match {
             case BooleanConversion    => mutableRow.setBoolean(i, 
rs.getBoolean(pos))
             case DateConversion       =>
-              mutableRow.update(i, DateUtils.fromJavaDate(rs.getDate(pos)))
+              // DateUtils.fromJavaDate does not handle null value, so we need 
to check it.
+              val dateVal = rs.getDate(pos)
+              if (dateVal != null) {
+                mutableRow.update(i, DateUtils.fromJavaDate(dateVal))
+              } else {
+                mutableRow.update(i, null)
+              }
             case DecimalConversion    =>
               val decimalVal = rs.getBigDecimal(pos)
               if (decimalVal == null) {

http://git-wip-us.apache.org/repos/asf/spark/blob/84ee348b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index 3ec17d3..021affa 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -104,6 +104,8 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
         ).executeUpdate()
     conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
       + "'1996-01-01', '2002-02-20 11:22:33.543543543')").executeUpdate()
+    conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
+      + "null, '2002-02-20 11:22:33.543543543')").executeUpdate()
     conn.commit()
     sql(
       s"""
@@ -127,6 +129,23 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
         |OPTIONS (url '$url', dbtable 'TEST.FLTTYPES', user 'testUser', 
password 'testPass')
       """.stripMargin.replaceAll("\n", " "))
 
+    conn.prepareStatement(
+      s"""
+        |create table test.nulltypes (a INT, b BOOLEAN, c TINYINT, d 
BINARY(20), e VARCHAR(20),
+        |f VARCHAR_IGNORECASE(20), g CHAR(20), h BLOB, i CLOB, j TIME, k DATE, 
l TIMESTAMP,
+        |m DOUBLE, n REAL, o DECIMAL(40, 20))
+      """.stripMargin.replaceAll("\n", " ")).executeUpdate()
+    conn.prepareStatement("insert into test.nulltypes values ("
+      + "null, null, null, null, null, null, null, null, null, "
+      + "null, null, null, null, null, null)").executeUpdate()
+    conn.commit()
+    sql(
+      s"""
+         |CREATE TEMPORARY TABLE nulltypes
+         |USING org.apache.spark.sql.jdbc
+         |OPTIONS (url '$url', dbtable 'TEST.NULLTYPES', user 'testUser', 
password 'testPass')
+      """.stripMargin.replaceAll("\n", " "))
+
     // Untested: IDENTITY, OTHER, UUID, ARRAY, and GEOMETRY types.
   }
 
@@ -254,6 +273,7 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
     val rows = TestSQLContext.jdbc(urlWithUserAndPass, 
"TEST.TIMETYPES").collect()
     val cachedRows = TestSQLContext.jdbc(urlWithUserAndPass, 
"TEST.TIMETYPES").cache().collect()
     assert(rows(0).getAs[java.sql.Date](1) === 
java.sql.Date.valueOf("1996-01-01"))
+    assert(rows(1).getAs[java.sql.Date](1) === null)
     assert(cachedRows(0).getAs[java.sql.Date](1) === 
java.sql.Date.valueOf("1996-01-01"))
   }
 
@@ -266,6 +286,11 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
     assert(cachedRows(0).getAs[java.sql.Date](1) === 
java.sql.Date.valueOf("1996-01-01"))
   }
 
+  test("test types for null value") {
+    val rows = TestSQLContext.jdbc(urlWithUserAndPass, 
"TEST.NULLTYPES").collect()
+    assert((0 to 14).forall(i => rows(0).isNullAt(i)))
+  }
+
   test("H2 floating-point types") {
     val rows = sql("SELECT * FROM flttypes").collect()
     assert(rows(0).getDouble(0) === 1.00000000000000022) // Yes, I meant ==.


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to