Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22037#discussion_r209412587
  
    --- Diff: 
external/avro/src/main/scala/org/apache/spark/sql/avro/SchemaConverters.scala 
---
    @@ -114,32 +129,35 @@ object SchemaConverters {
           prevNameSpace: String = "",
           outputTimestampType: AvroOutputTimestampType.Value = 
AvroOutputTimestampType.TIMESTAMP_MICROS)
         : Schema = {
    -    val builder = if (nullable) {
    -      SchemaBuilder.builder().nullable()
    -    } else {
    -      SchemaBuilder.builder()
    -    }
    +    val builder = SchemaBuilder.builder()
     
    -    catalystType match {
    +    val schema = catalystType match {
           case BooleanType => builder.booleanType()
           case ByteType | ShortType | IntegerType => builder.intType()
           case LongType => builder.longType()
    -      case DateType => builder
    -        .intBuilder()
    -        .prop(LogicalType.LOGICAL_TYPE_PROP, LogicalTypes.date().getName)
    -        .endInt()
    +      case DateType =>
    +        LogicalTypes.date().addToSchema(builder.intType())
           case TimestampType =>
             val timestampType = outputTimestampType match {
               case AvroOutputTimestampType.TIMESTAMP_MILLIS => 
LogicalTypes.timestampMillis()
               case AvroOutputTimestampType.TIMESTAMP_MICROS => 
LogicalTypes.timestampMicros()
               case other =>
                 throw new IncompatibleSchemaException(s"Unexpected output 
timestamp type $other.")
             }
    -        builder.longBuilder().prop(LogicalType.LOGICAL_TYPE_PROP, 
timestampType.getName).endLong()
    +        timestampType.addToSchema(builder.longType())
     
           case FloatType => builder.floatType()
           case DoubleType => builder.doubleType()
    -      case _: DecimalType | StringType => builder.stringType()
    +      case StringType => builder.stringType()
    +      case d: DecimalType =>
    +        val avroType = LogicalTypes.decimal(d.precision, d.scale)
    +        val fixedSize = minBytesForPrecision(d.precision)
    +        // Use random name to avoid conflict in naming of fixed field.
    +        // Field names must start with [A-Za-z_], while the charset of 
Random.alphanumeric contains
    +        // [0-9]. So add a single character "f" to ensure the name is 
valid.
    +        val name = "f" + Random.alphanumeric.take(32).mkString("")
    --- End diff --
    
    can we use `recordName` here?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to