teeyog commented on a change in pull request #2431: URL: https://github.com/apache/hudi/pull/2431#discussion_r563665044
########## File path: hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/hudi/DataSourceOptions.scala ########## @@ -181,16 +183,33 @@ object DataSourceWriteOptions { @Deprecated val DEFAULT_STORAGE_TYPE_OPT_VAL = COW_STORAGE_TYPE_OPT_VAL - def translateStorageTypeToTableType(optParams: Map[String, String]) : Map[String, String] = { + def translateOptParams(optParams: Map[String, String]): Map[String, String] = { + // translate StorageType to TableType + var newOptParams = optParams if (optParams.contains(STORAGE_TYPE_OPT_KEY) && !optParams.contains(TABLE_TYPE_OPT_KEY)) { log.warn(STORAGE_TYPE_OPT_KEY + " is deprecated and will be removed in a later release; Please use " + TABLE_TYPE_OPT_KEY) - optParams ++ Map(TABLE_TYPE_OPT_KEY -> optParams(STORAGE_TYPE_OPT_KEY)) - } else { - optParams + newOptParams = optParams ++ Map(TABLE_TYPE_OPT_KEY -> optParams(STORAGE_TYPE_OPT_KEY)) } + // translate the api partitionBy of spark DataFrameWriter to PARTITIONPATH_FIELD_OPT_KEY + if (optParams.contains(SparkDataSourceUtils.PARTITIONING_COLUMNS_KEY) && !optParams.contains(PARTITIONPATH_FIELD_OPT_KEY)) { + val partitionColumns = optParams.get(SparkDataSourceUtils.PARTITIONING_COLUMNS_KEY) + .map(SparkDataSourceUtils.decodePartitioningColumns) + .getOrElse(Nil) + + val keyGeneratorClass = optParams.getOrElse(DataSourceWriteOptions.KEYGENERATOR_CLASS_OPT_KEY, + DataSourceWriteOptions.DEFAULT_KEYGENERATOR_CLASS_OPT_VAL) + val partitionPathField = + keyGeneratorClass match { + case "org.apache.hudi.keygen.CustomKeyGenerator" => + partitionColumns.map(e => s"$e:SIMPLE").mkString(",") Review comment: Yes, now if the parameters include ```TIMESTAMP_TYPE_FIELD_PROP``` and ```TIMESTAMP_OUTPUT_DATE_FORMAT_PROP```, TIMESTAMP is used by default, otherwise SIMPLE ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org