Github user maropu commented on a diff in the pull request: https://github.com/apache/spark/pull/22512#discussion_r225140616 --- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/InterpretedMutableProjection.scala --- @@ -53,6 +55,47 @@ class InterpretedMutableProjection(expressions: Seq[Expression]) extends Mutable this } + private[this] val fieldWriters = validExprs.map { case (e, i) => + val writer = generateRowWriter(i, e.dataType) + if (!e.nullable) { + (v: Any) => writer(v) + } else { + (v: Any) => { + if (v == null) { + mutableRow.setNullAt(i) + } else { + writer(v) + } + } + } + } + + private def generateRowWriter(ordinal: Int, dt: DataType): Any => Unit = dt match { + case BooleanType => + v => mutableRow.setBoolean(ordinal, v.asInstanceOf[Boolean]) + case ByteType => + v => mutableRow.setByte(ordinal, v.asInstanceOf[Byte]) + case ShortType => + v => mutableRow.setShort(ordinal, v.asInstanceOf[Short]) + case IntegerType | DateType => + v => mutableRow.setInt(ordinal, v.asInstanceOf[Int]) + case LongType | TimestampType => + v => mutableRow.setLong(ordinal, v.asInstanceOf[Long]) + case FloatType => + v => mutableRow.setFloat(ordinal, v.asInstanceOf[Float]) + case DoubleType => + v => mutableRow.setDouble(ordinal, v.asInstanceOf[Double]) + case DecimalType.Fixed(precision, _) => + v => mutableRow.setDecimal(ordinal, v.asInstanceOf[Decimal], precision) + case CalendarIntervalType | BinaryType | _: ArrayType | StringType | _: StructType | + _: MapType | _: UserDefinedType[_] => + v => mutableRow.update(ordinal, v) --- End diff -- This match should only accept the generic internal rows, so I added code to verify types for the `UnsafeRow` case; https://github.com/apache/spark/pull/22512/files#diff-3ed819282d4e4941571dd3b08fc03e37R55
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org