This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch branch-2.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-2.3 by this push:
     new 9446da1  [SPARK-27216][CORE][BACKPORT-2.3] Upgrade RoaringBitmap to 
0.7.45 to fix Kryo unsafe ser/dser issue
9446da1 is described below

commit 9446da10ddfad7afe5f8ca9386d46607e489f742
Author: LantaoJin <jinlan...@gmail.com>
AuthorDate: Thu Apr 4 18:23:52 2019 -0500

    [SPARK-27216][CORE][BACKPORT-2.3] Upgrade RoaringBitmap to 0.7.45 to fix 
Kryo unsafe ser/dser issue
    
    ## What changes were proposed in this pull request?
    
    Back-port of #24264 to branch-2.3.
    
    HighlyCompressedMapStatus uses RoaringBitmap to record the empty blocks. 
But RoaringBitmap couldn't be ser/deser with unsafe KryoSerializer.
    
    It's a bug of RoaringBitmap-0.5.11 and fixed in latest version.
    
    ## How was this patch tested?
    
    Add a UT
    
    Closes #24291 from LantaoJin/SPARK-27216_BACKPORT-2.3.
    
    Authored-by: LantaoJin <jinlan...@gmail.com>
    Signed-off-by: Sean Owen <sean.o...@databricks.com>
---
 .../spark/serializer/KryoSerializerSuite.scala     | 35 ++++++----------------
 dev/deps/spark-deps-hadoop-2.6                     |  3 +-
 dev/deps/spark-deps-hadoop-2.7                     |  3 +-
 pom.xml                                            |  2 +-
 4 files changed, 14 insertions(+), 29 deletions(-)

diff --git 
a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala 
b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
index 1234a80..c9a9657 100644
--- a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.serializer
 
-import java.io.{ByteArrayInputStream, ByteArrayOutputStream, FileInputStream, 
FileOutputStream}
+import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
 import java.nio.ByteBuffer
 
 import scala.collection.JavaConverters._
@@ -32,7 +32,6 @@ import org.apache.spark.{SharedSparkContext, SparkConf, 
SparkFunSuite}
 import org.apache.spark.scheduler.HighlyCompressedMapStatus
 import org.apache.spark.serializer.KryoTest._
 import org.apache.spark.storage.BlockManagerId
-import org.apache.spark.util.Utils
 
 class KryoSerializerSuite extends SparkFunSuite with SharedSparkContext {
   conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
@@ -365,30 +364,6 @@ class KryoSerializerSuite extends SparkFunSuite with 
SharedSparkContext {
     assert(thrown.getCause.isInstanceOf[KryoException])
   }
 
-  test("SPARK-12222: deserialize RoaringBitmap throw Buffer underflow 
exception") {
-    val dir = Utils.createTempDir()
-    val tmpfile = dir.toString + "/RoaringBitmap"
-    val outStream = new FileOutputStream(tmpfile)
-    val output = new KryoOutput(outStream)
-    val bitmap = new RoaringBitmap
-    bitmap.add(1)
-    bitmap.add(3)
-    bitmap.add(5)
-    // Ignore Kryo because it doesn't use writeObject
-    bitmap.serialize(new KryoOutputObjectOutputBridge(null, output))
-    output.flush()
-    output.close()
-
-    val inStream = new FileInputStream(tmpfile)
-    val input = new KryoInput(inStream)
-    val ret = new RoaringBitmap
-    // Ignore Kryo because it doesn't use readObject
-    ret.deserialize(new KryoInputObjectInputBridge(null, input))
-    input.close()
-    assert(ret == bitmap)
-    Utils.deleteRecursively(dir)
-  }
-
   test("KryoOutputObjectOutputBridge.writeObject and 
KryoInputObjectInputBridge.readObject") {
     val kryo = new KryoSerializer(conf).newKryo()
 
@@ -442,6 +417,14 @@ class KryoSerializerSuite extends SparkFunSuite with 
SharedSparkContext {
       testSerializerInstanceReuse(autoReset = autoReset, referenceTracking = 
referenceTracking)
     }
   }
+
+  test("SPARK-27216: test RoaringBitmap ser/dser with Kryo") {
+    val expected = new RoaringBitmap()
+    expected.add(1787)
+    val ser = new KryoSerializer(conf).newInstance()
+    val actual: RoaringBitmap = ser.deserialize(ser.serialize(expected))
+    assert(actual === expected)
+  }
 }
 
 class KryoSerializerAutoResetDisabledSuite extends SparkFunSuite with 
SharedSparkContext {
diff --git a/dev/deps/spark-deps-hadoop-2.6 b/dev/deps/spark-deps-hadoop-2.6
index 4f0794d..c9f219d 100644
--- a/dev/deps/spark-deps-hadoop-2.6
+++ b/dev/deps/spark-deps-hadoop-2.6
@@ -1,5 +1,5 @@
 JavaEWAH-0.3.2.jar
-RoaringBitmap-0.5.11.jar
+RoaringBitmap-0.7.45.jar
 ST4-4.0.4.jar
 activation-1.1.1.jar
 aircompressor-0.8.jar
@@ -178,6 +178,7 @@ scala-reflect-2.11.8.jar
 scala-xml_2.11-1.0.5.jar
 scalap-2.11.8.jar
 shapeless_2.11-2.3.2.jar
+shims-0.7.45.jar
 slf4j-api-1.7.16.jar
 slf4j-log4j12-1.7.16.jar
 snakeyaml-1.15.jar
diff --git a/dev/deps/spark-deps-hadoop-2.7 b/dev/deps/spark-deps-hadoop-2.7
index df2be77..8e99ea3 100644
--- a/dev/deps/spark-deps-hadoop-2.7
+++ b/dev/deps/spark-deps-hadoop-2.7
@@ -1,5 +1,5 @@
 JavaEWAH-0.3.2.jar
-RoaringBitmap-0.5.11.jar
+RoaringBitmap-0.7.45.jar
 ST4-4.0.4.jar
 activation-1.1.1.jar
 aircompressor-0.8.jar
@@ -179,6 +179,7 @@ scala-reflect-2.11.8.jar
 scala-xml_2.11-1.0.5.jar
 scalap-2.11.8.jar
 shapeless_2.11-2.3.2.jar
+shims-0.7.45.jar
 slf4j-api-1.7.16.jar
 slf4j-log4j12-1.7.16.jar
 snakeyaml-1.15.jar
diff --git a/pom.xml b/pom.xml
index cee4d14..9cb1a55 100644
--- a/pom.xml
+++ b/pom.xml
@@ -573,7 +573,7 @@
       <dependency>
         <groupId>org.roaringbitmap</groupId>
         <artifactId>RoaringBitmap</artifactId>
-        <version>0.5.11</version>
+        <version>0.7.45</version>
       </dependency>
       <dependency>
         <groupId>commons-net</groupId>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to