MaxGekk commented on a change in pull request #28576:
URL: https://github.com/apache/spark/pull/28576#discussion_r427087787



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeFormatterHelper.scala
##########
@@ -31,17 +31,39 @@ import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy._
 
 trait DateTimeFormatterHelper {
+  private def getOrDefault(accessor: TemporalAccessor, field: ChronoField, 
default: Int): Int = {
+    if (accessor.isSupported(field)) {
+      accessor.get(field)
+    } else {
+      default
+    }
+  }
+
+  protected def toLocalDate(temporalAccessor: TemporalAccessor): LocalDate = {
+    val year = getOrDefault(temporalAccessor, ChronoField.YEAR, 1970)
+    val month = getOrDefault(temporalAccessor, ChronoField.MONTH_OF_YEAR, 1)
+    val day = getOrDefault(temporalAccessor, ChronoField.DAY_OF_MONTH, 1)
+    LocalDate.of(year, month, day)
+  }
+
   // Converts the parsed temporal object to ZonedDateTime. It sets time 
components to zeros
   // if they does not exist in the parsed object.
   protected def toZonedDateTime(

Review comment:
       I guess, the additional functional calls will slow down overall 
performance. I think it makes sense to re-run datetime benchmarks.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeFormatterHelper.scala
##########
@@ -31,17 +31,39 @@ import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy._
 
 trait DateTimeFormatterHelper {
+  private def getOrDefault(accessor: TemporalAccessor, field: ChronoField, 
default: Int): Int = {
+    if (accessor.isSupported(field)) {
+      accessor.get(field)
+    } else {
+      default
+    }
+  }
+
+  protected def toLocalDate(temporalAccessor: TemporalAccessor): LocalDate = {
+    val year = getOrDefault(temporalAccessor, ChronoField.YEAR, 1970)

Review comment:
       How often do users consider 1970 as the default year, your guess? I 
would propose current year as we do in stringToTimestamp:
   
https://github.com/apache/spark/blob/ebc8fa50d0422f3db47b2c45025c7f2efe6ee39a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala#L396




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to