Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22749#discussion_r227867735
  
    --- Diff: 
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
 ---
    @@ -280,59 +281,59 @@ class ScalaReflectionSuite extends SparkFunSuite {
     
       test("serialize and deserialize arbitrary sequence types") {
         import scala.collection.immutable.Queue
    -    val queueSerializer = serializerFor[Queue[Int]](BoundReference(
    -      0, ObjectType(classOf[Queue[Int]]), nullable = false))
    -    assert(queueSerializer.dataType.head.dataType ==
    +    val queueSerializer = 
serializerForType(ScalaReflection.localTypeOf[Queue[Int]])
    +    assert(queueSerializer.dataType ==
           ArrayType(IntegerType, containsNull = false))
         val queueDeserializer = deserializerFor[Queue[Int]]
         assert(queueDeserializer.dataType == ObjectType(classOf[Queue[_]]))
     
         import scala.collection.mutable.ArrayBuffer
    -    val arrayBufferSerializer = 
serializerFor[ArrayBuffer[Int]](BoundReference(
    -      0, ObjectType(classOf[ArrayBuffer[Int]]), nullable = false))
    -    assert(arrayBufferSerializer.dataType.head.dataType ==
    +    val arrayBufferSerializer = 
serializerForType(ScalaReflection.localTypeOf[ArrayBuffer[Int]])
    +    assert(arrayBufferSerializer.dataType ==
           ArrayType(IntegerType, containsNull = false))
         val arrayBufferDeserializer = deserializerFor[ArrayBuffer[Int]]
         assert(arrayBufferDeserializer.dataType == 
ObjectType(classOf[ArrayBuffer[_]]))
       }
     
       test("serialize and deserialize arbitrary map types") {
    -    val mapSerializer = serializerFor[Map[Int, Int]](BoundReference(
    -      0, ObjectType(classOf[Map[Int, Int]]), nullable = false))
    -    assert(mapSerializer.dataType.head.dataType ==
    +    val mapSerializer = 
serializerForType(ScalaReflection.localTypeOf[Map[Int, Int]])
    +    assert(mapSerializer.dataType ==
           MapType(IntegerType, IntegerType, valueContainsNull = false))
         val mapDeserializer = deserializerFor[Map[Int, Int]]
         assert(mapDeserializer.dataType == ObjectType(classOf[Map[_, _]]))
     
         import scala.collection.immutable.HashMap
    -    val hashMapSerializer = serializerFor[HashMap[Int, 
Int]](BoundReference(
    -      0, ObjectType(classOf[HashMap[Int, Int]]), nullable = false))
    -    assert(hashMapSerializer.dataType.head.dataType ==
    +    val hashMapSerializer = 
serializerForType(ScalaReflection.localTypeOf[HashMap[Int, Int]])
    +    assert(hashMapSerializer.dataType ==
           MapType(IntegerType, IntegerType, valueContainsNull = false))
         val hashMapDeserializer = deserializerFor[HashMap[Int, Int]]
         assert(hashMapDeserializer.dataType == ObjectType(classOf[HashMap[_, 
_]]))
     
         import scala.collection.mutable.{LinkedHashMap => LHMap}
    -    val linkedHashMapSerializer = serializerFor[LHMap[Long, 
String]](BoundReference(
    -      0, ObjectType(classOf[LHMap[Long, String]]), nullable = false))
    -    assert(linkedHashMapSerializer.dataType.head.dataType ==
    +    val linkedHashMapSerializer = serializerForType(
    +        ScalaReflection.localTypeOf[LHMap[Long, String]])
    +    assert(linkedHashMapSerializer.dataType ==
           MapType(LongType, StringType, valueContainsNull = true))
         val linkedHashMapDeserializer = deserializerFor[LHMap[Long, String]]
         assert(linkedHashMapDeserializer.dataType == 
ObjectType(classOf[LHMap[_, _]]))
       }
     
       test("SPARK-22442: Generate correct field names for special characters") 
{
    -    val serializer = serializerFor[SpecialCharAsFieldData](BoundReference(
    -      0, ObjectType(classOf[SpecialCharAsFieldData]), nullable = false))
    +    val serializer = 
serializerForType(ScalaReflection.localTypeOf[SpecialCharAsFieldData])
    --- End diff --
    
    like `deserializerFor` in this suite, let's also create a `serializerFor`


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to