Repository: spark
Updated Branches:
  refs/heads/master 6193a202a -> 381a967a7


[SPARK-25249][CORE][TEST] add a unit test for OpenHashMap

## What changes were proposed in this pull request?

This PR adds a unit test for OpenHashMap , this can help developers  to 
distinguish between the 0/0.0/0L and null

## How was this patch tested?

Closes #22241 from 10110346/openhashmap.

Authored-by: liuxian <liu.xi...@zte.com.cn>
Signed-off-by: Sean Owen <sean.o...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/381a967a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/381a967a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/381a967a

Branch: refs/heads/master
Commit: 381a967a76c9e7ea1e100a922cafedc50042b81e
Parents: 6193a20
Author: liuxian <liu.xi...@zte.com.cn>
Authored: Mon Aug 27 12:05:33 2018 -0500
Committer: Sean Owen <sean.o...@databricks.com>
Committed: Mon Aug 27 12:05:33 2018 -0500

----------------------------------------------------------------------
 .../util/collection/OpenHashMapSuite.scala      | 46 ++++++++++++++++++++
 1 file changed, 46 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/381a967a/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala 
b/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
index 08a3200..151235d 100644
--- 
a/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala
@@ -194,4 +194,50 @@ class OpenHashMapSuite extends SparkFunSuite with Matchers 
{
     val numInvalidValues = map.iterator.count(_._2 == 0)
     assertResult(0)(numInvalidValues)
   }
+
+  test("distinguish between the 0/0.0/0L and null") {
+    val specializedMap1 = new OpenHashMap[String, Long]
+    specializedMap1("a") = null.asInstanceOf[Long]
+    specializedMap1("b") = 0L
+    assert(specializedMap1.contains("a"))
+    assert(!specializedMap1.contains("c"))
+    // null.asInstance[Long] will return 0L
+    assert(specializedMap1("a") === 0L)
+    assert(specializedMap1("b") === 0L)
+    // If the data type is in @specialized annotation, and
+    // the `key` is not be contained, the `map(key)` will return 0
+    assert(specializedMap1("c") === 0L)
+
+    val specializedMap2 = new OpenHashMap[String, Double]
+    specializedMap2("a") = null.asInstanceOf[Double]
+    specializedMap2("b") = 0.toDouble
+    assert(specializedMap2.contains("a"))
+    assert(!specializedMap2.contains("c"))
+    // null.asInstance[Double] will return 0.0
+    assert(specializedMap2("a") === 0.0)
+    assert(specializedMap2("b") === 0.0)
+    assert(specializedMap2("c") === 0.0)
+
+    val map1 = new OpenHashMap[String, Short]
+    map1("a") = null.asInstanceOf[Short]
+    map1("b") = 0.toShort
+    assert(map1.contains("a"))
+    assert(!map1.contains("c"))
+    // null.asInstance[Short] will return 0
+    assert(map1("a") === 0)
+    assert(map1("b") === 0)
+    // If the data type is not in @specialized annotation, and
+    // the `key` is not be contained, the `map(key)` will return null
+    assert(map1("c") === null)
+
+    val map2 = new OpenHashMap[String, Float]
+    map2("a") = null.asInstanceOf[Float]
+    map2("b") = 0.toFloat
+    assert(map2.contains("a"))
+    assert(!map2.contains("c"))
+    // null.asInstance[Float] will return 0.0
+    assert(map2("a") === 0.0)
+    assert(map2("b") === 0.0)
+    assert(map2("c") === null)
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to