Github user cloud-fan commented on a diff in the pull request: https://github.com/apache/spark/pull/16138#discussion_r97976102 --- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala --- @@ -1047,6 +1048,64 @@ case class ToDate(child: Expression) extends UnaryExpression with ImplicitCastIn } /** + * Parses a column to a date based on the given format. + */ +// scalastyle:off line.size.limit +@ExpressionDescription( + usage = "_FUNC_(date_str, fmt) - Parses the `left` expression with the `fmt` expression. Returns null with invalid input.", + extended = """ + Examples: + > SELECT _FUNC_('2016-12-31', 'yyyy-MM-dd'); + 2016-12-31 + """) +// scalastyle:on line.size.limit +case class ParseToDate(left: Expression, format: Expression, child: Expression) + extends RuntimeReplaceable { + + def this(left: Expression, format: Expression) = { + this(left, format, Cast(Cast(new UnixTimestamp(left, format), TimestampType), DateType)) + } + + def this(left: Expression) = { + // RuntimeReplaceable forces the signature, the second value + // is ignored completely + this(left, Literal(""), ToDate(left)) + } + + override def flatArguments: Iterator[Any] = Iterator(left, format) + override def sql: String = s"$prettyName(${left.sql}, ${format.sql})" --- End diff -- This is the problem: you set `format` to empty string at https://github.com/apache/spark/pull/16138/files#diff-b83497f7bc11578a0b63a814a2a30f48R1072 , but this is not safe. Although the `format` will be ignored, we use it to build the sql string of this expression, which maybe something like `to_date('2012-12-12 12:12:12', '')`. we should make `format` an `Option[String]`, and generate corrected sql string here.
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org