Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22724#discussion_r225392951
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
 ---
    @@ -196,6 +197,48 @@ object Literal {
         case other =>
           throw new RuntimeException(s"no default for type $dataType")
       }
    +
    +  private[expressions] def validateLiteralValue(value: Any, dataType: 
DataType): Unit = {
    +    def doValidate(v: Any, dataType: DataType): Boolean = dataType match {
    +      case BooleanType => v.isInstanceOf[Boolean]
    +      case ByteType => v.isInstanceOf[Byte]
    +      case ShortType => v.isInstanceOf[Short]
    +      case IntegerType | DateType => v.isInstanceOf[Int]
    +      case LongType | TimestampType => v.isInstanceOf[Long]
    +      case FloatType => v.isInstanceOf[Float]
    +      case DoubleType => v.isInstanceOf[Double]
    +      case _: DecimalType => v.isInstanceOf[Decimal]
    +      case CalendarIntervalType => v.isInstanceOf[CalendarInterval]
    +      case BinaryType => v.isInstanceOf[Array[Byte]]
    +      case StringType => v.isInstanceOf[UTF8String]
    +      case st: StructType =>
    +        v.isInstanceOf[InternalRow] && {
    +          val row = v.asInstanceOf[InternalRow]
    +          st.fields.map(_.dataType).zipWithIndex.forall {
    +            case (dt, i) => doValidate(row.get(i, dt), dt)
    +          }
    +        }
    +      case at: ArrayType =>
    +        v.isInstanceOf[GenericArrayData] && {
    +          val ar = v.asInstanceOf[GenericArrayData].array
    +          ar.isEmpty || doValidate(ar.head, at.elementType)
    +        }
    +      case mt: MapType =>
    +        v.isInstanceOf[ArrayBasedMapData] && {
    +          val map = v.asInstanceOf[ArrayBasedMapData]
    +          map.numElements() == 0 || {
    --- End diff --
    
    we don't need this. The array validation already consider numElements


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to