Github user viirya commented on a diff in the pull request: https://github.com/apache/spark/pull/18875#discussion_r137950179 --- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonGenerator.scala --- @@ -193,14 +228,58 @@ private[sql] class JacksonGenerator( * * @param row The row to convert */ - def write(row: InternalRow): Unit = writeObject(writeFields(row, schema, rootFieldWriters)) + def write(row: InternalRow): Unit = dataType match { + case st: StructType => + writeObject(writeFields(row, st, rootFieldWriters)) + case _ => throw new UnsupportedOperationException( + s"`JacksonGenerator` can only be used to write out a row when initialized with `StructType`.") + } /** - * Transforms multiple `InternalRow`s to JSON array using Jackson + * Transforms multiple `InternalRow`s or `MapData`s to JSON array using Jackson * - * @param array The array of rows to convert + * @param array The array of rows or maps to convert */ - def write(array: ArrayData): Unit = writeArray(writeArrayData(array, arrElementWriter)) + def write(array: ArrayData): Unit = dataType match { + case st: StructType => + try { + if (array.numElements() > 0) { + array.getStruct(0, st.length) + } + } catch { + case cce: ClassCastException => + throw new UnsupportedOperationException( + s"`JacksonGenerator` can only be used to write out an array of struct " + + s"when initialized with `StructType`") + } + writeArray(writeArrayData(array, arrElementWriter)) + case _: MapType => + try { + if (array.numElements() > 0) { + array.getMap(0) + } + } catch { + case cce: ClassCastException => + throw new UnsupportedOperationException( + s"`JacksonGenerator` can only be used to write out an array of map when initialized" + + s"with `MapType`") + } + writeArray(writeArrayData(array, arrElementWriter)) + case _ => throw new UnsupportedOperationException( --- End diff -- I think we already check the `dataType` is valid when constructing this. We don't need this pattern case.
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org