Github user maropu commented on a diff in the pull request: https://github.com/apache/spark/pull/19999#discussion_r157337772 --- Diff: sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcRelationProvider.scala --- @@ -45,13 +45,51 @@ class JdbcRelationProvider extends CreatableRelationProvider assert(lowerBound.nonEmpty && upperBound.nonEmpty && numPartitions.nonEmpty, s"When 'partitionColumn' is specified, '$JDBC_LOWER_BOUND', '$JDBC_UPPER_BOUND', and " + s"'$JDBC_NUM_PARTITIONS' are also required") - JDBCPartitioningInfo( + JDBCPartitioningInfo(resolvePartitionColumnType(parameters), partitionColumn.get, lowerBound.get, upperBound.get, numPartitions.get) } val parts = JDBCRelation.columnPartition(partitionInfo) JDBCRelation(parts, jdbcOptions)(sqlContext.sparkSession) } + def resolvePartitionColumnType(parameters: Map[String, String]): Int = { --- End diff -- If you want a column type, how about using `JDBCRDD.resolveTable`?
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org