This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new 2dda441bec01 [SPARK-47666][SQL][3.5] Fix NPE when reading mysql bit 
array as LongType
2dda441bec01 is described below

commit 2dda441bec018b1386248b54ccfef46defa5a07a
Author: Kent Yao <y...@apache.org>
AuthorDate: Tue Apr 2 18:16:35 2024 +0800

    [SPARK-47666][SQL][3.5] Fix NPE when reading mysql bit array as LongType
    
    ### What changes were proposed in this pull request?
    
    This PR fixes NPE when reading mysql bit array as LongType
    
    ### Why are the changes needed?
    
    bugfix
    
    ### Does this PR introduce _any_ user-facing change?
    
    no
    
    ### How was this patch tested?
    
    new tests
    ### Was this patch authored or co-authored using generative AI tooling?
    
    no
    
    Closes #45792 from yaooqinn/PR_TOOL_PICK_PR_45790_BRANCH-3.5.
    
    Authored-by: Kent Yao <y...@apache.org>
    Signed-off-by: Kent Yao <y...@apache.org>
---
 .../apache/spark/sql/jdbc/MySQLIntegrationSuite.scala  | 10 +++++++++-
 .../sql/execution/datasources/jdbc/JdbcUtils.scala     | 18 ++++++++++--------
 2 files changed, 19 insertions(+), 9 deletions(-)

diff --git 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
index 68d88fbc552a..bc7302163d9a 100644
--- 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
+++ 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala
@@ -62,6 +62,9 @@ class MySQLIntegrationSuite extends 
DockerJDBCIntegrationSuite {
       + "17, 77777, 123456789, 123456789012345, 
123456789012345.123456789012345, "
       + "42.75, 1.0000000000000002, -128, 255)").executeUpdate()
 
+    conn.prepareStatement("INSERT INTO numbers VALUES (null, null, "
+      + "null, null, null, null, null, null, null, null, 
null)").executeUpdate()
+
     conn.prepareStatement("CREATE TABLE dates (d DATE, t TIME, dt DATETIME, ts 
TIMESTAMP, "
       + "yr YEAR)").executeUpdate()
     conn.prepareStatement("INSERT INTO dates VALUES ('1991-11-09', '13:31:24', 
"
@@ -101,7 +104,7 @@ class MySQLIntegrationSuite extends 
DockerJDBCIntegrationSuite {
   test("Numeric types") {
     val df = sqlContext.read.jdbc(jdbcUrl, "numbers", new Properties)
     val rows = df.collect()
-    assert(rows.length == 1)
+    assert(rows.length == 2)
     val types = rows(0).toSeq.map(x => x.getClass.toString)
     assert(types.length == 11)
     assert(types(0).equals("class java.lang.Boolean"))
@@ -212,6 +215,11 @@ class MySQLIntegrationSuite extends 
DockerJDBCIntegrationSuite {
        """.stripMargin.replaceAll("\n", " "))
     assert(sql("select x, y from queryOption").collect.toSet == expectedResult)
   }
+
+  test("SPARK-47666: Check nulls for result set getters") {
+    val nulls = spark.read.jdbc(jdbcUrl, "numbers", new 
Properties).tail(1).head
+    assert(nulls === Row(null, null, null, null, null, null, null, null, null, 
null, null))
+  }
 }
 
 /**
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
index 3521a50cd2dd..7b5c4cfc9b6e 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
@@ -430,14 +430,16 @@ object JdbcUtils extends Logging with SQLConfHelper {
 
     case LongType if metadata.contains("binarylong") =>
       (rs: ResultSet, row: InternalRow, pos: Int) =>
-        val bytes = rs.getBytes(pos + 1)
-        var ans = 0L
-        var j = 0
-        while (j < bytes.length) {
-          ans = 256 * ans + (255 & bytes(j))
-          j = j + 1
-        }
-        row.setLong(pos, ans)
+        val l = nullSafeConvert[Array[Byte]](rs.getBytes(pos + 1), bytes => {
+          var ans = 0L
+          var j = 0
+          while (j < bytes.length) {
+            ans = 256 * ans + (255 & bytes(j))
+            j = j + 1
+          }
+          ans
+        })
+        row.update(pos, l)
 
     case LongType =>
       (rs: ResultSet, row: InternalRow, pos: Int) =>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to