Github user ueshin commented on a diff in the pull request: https://github.com/apache/spark/pull/18754#discussion_r139612110 --- Diff: sql/core/src/main/scala/org/apache/spark/sql/execution/arrow/ArrowWriter.scala --- @@ -224,6 +226,25 @@ private[arrow] class DoubleWriter(val valueVector: NullableFloat8Vector) extends } } +private[arrow] class DecimalWriter( + val valueVector: NullableDecimalVector, + precision: Int, + scale: Int) extends ArrowFieldWriter { + + override def valueMutator: NullableDecimalVector#Mutator = valueVector.getMutator() + + override def setNull(): Unit = { + valueMutator.setNull(count) + } + + override def setValue(input: SpecializedGetters, ordinal: Int): Unit = { + valueMutator.setIndexDefined(count) + val decimal = input.getDecimal(ordinal, precision, scale) + decimal.changePrecision(precision, scale) + DecimalUtility.writeBigDecimalToArrowBuf(decimal.toJavaBigDecimal, valueVector.getBuffer, count) --- End diff -- @BryanCutler Thanks, I'll update it to use `setSafe` after upgrading Arrow to 0.7. Btw, when I tested upgrading to 0.7 locally, `ArrowConvertersSuite.string type conversion` came to fail. Do you have any ideas of that?
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org