Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/18704#discussion_r138365192
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnAccessor.scala
 ---
    @@ -149,4 +153,23 @@ private[columnar] object ColumnAccessor {
             throw new Exception(s"not support type: $other")
         }
       }
    +
    +  def decompress(columnAccessor: ColumnAccessor, columnVector: 
WritableColumnVector, numRows: Int):
    +      Unit = {
    +    if (columnAccessor.isInstanceOf[NativeColumnAccessor[_]]) {
    +      val nativeAccessor = 
columnAccessor.asInstanceOf[NativeColumnAccessor[_]]
    +      nativeAccessor.decompress(columnVector, numRows)
    +    } else {
    +      val dataBuffer = 
columnAccessor.asInstanceOf[BasicColumnAccessor[_]].getByteBuffer
    +      val nullsBuffer = 
dataBuffer.duplicate().order(ByteOrder.nativeOrder())
    +      nullsBuffer.rewind()
    +
    +      val numNulls = ByteBufferHelper.getInt(nullsBuffer)
    +      for (i <- 0 until numNulls) {
    +        val cordinal = ByteBufferHelper.getInt(nullsBuffer)
    +        columnVector.putNull(cordinal)
    +      }
    +      throw new RuntimeException("Not support non-primitive type now")
    --- End diff --
    
    If we need to throw exception at last, why not do it at the beginning?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to