Github user srowen commented on a diff in the pull request: https://github.com/apache/spark/pull/19553#discussion_r147918030 --- Diff: core/src/main/scala/org/apache/spark/api/java/JavaUtils.scala --- @@ -43,10 +43,17 @@ private[spark] object JavaUtils { override def size: Int = underlying.size - override def get(key: AnyRef): B = try { - underlying.getOrElse(key.asInstanceOf[A], null.asInstanceOf[B]) - } catch { - case ex: ClassCastException => null.asInstanceOf[B] + // Delegate to implementation because AbstractMap implementation iterates over whole key set + override def containsKey(key: AnyRef): Boolean = { + underlying.contains(key.asInstanceOf[A]) --- End diff -- No, because Scala Maps do require their key type in contains() and get(). https://www.scala-lang.org/api/current/scala/collection/Map.html . This is indeed the difference between the APIs that we need to bridge.
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org