chenzhx commented on code in PR #36663: URL: https://github.com/apache/spark/pull/36663#discussion_r907174118
########## sql/core/src/main/scala/org/apache/spark/sql/catalyst/util/V2ExpressionBuilder.scala: ########## @@ -254,6 +254,55 @@ class V2ExpressionBuilder(e: Expression, isPredicate: Boolean = false) { } else { None } + case date: DateAdd => + val childrenExpressions = date.children.flatMap(generateExpression(_)) + if (childrenExpressions.length == date.children.length) { + Some(new GeneralScalarExpression("DATE_ADD", childrenExpressions.toArray[V2Expression])) + } else { + None + } + case date: DateDiff => + val childrenExpressions = date.children.flatMap(generateExpression(_)) + if (childrenExpressions.length == date.children.length) { + Some(new GeneralScalarExpression("DATE_DIFF", childrenExpressions.toArray[V2Expression])) + } else { + None + } + case date: TruncDate => + val childrenExpressions = date.children.flatMap(generateExpression(_)) + if (childrenExpressions.length == date.children.length) { + Some(new GeneralScalarExpression("TRUNC", childrenExpressions.toArray[V2Expression])) + } else { + None + } + case Second(child, _) => + generateExpression(child).map(v => new V2Extract("SECOND", v)) + case Minute(child, _) => + generateExpression(child).map(v => new V2Extract("MINUTE", v)) + case Hour(child, _) => + generateExpression(child).map(v => new V2Extract("HOUR", v)) + case Month(child) => + generateExpression(child).map(v => new V2Extract("MONTH", v)) + case Quarter(child) => + generateExpression(child).map(v => new V2Extract("QUARTER", v)) + case Year(child) => + generateExpression(child).map(v => new V2Extract("YEAR", v)) + // The DAY_OF_WEEK function in Spark returns the day of the week for date/timestamp. + // Database dialects should avoid to follow ISO semantics when handling DAY_OF_WEEK. Review Comment: OK -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org