Github user ueshin commented on a diff in the pull request: https://github.com/apache/spark/pull/16308#discussion_r97221574 --- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala --- @@ -69,49 +69,68 @@ object DateTimeUtils { } } - // Java TimeZone has no mention of thread safety. Use thread local instance to be safe. - private val threadLocalLocalTimeZone = new ThreadLocal[TimeZone] { - override protected def initialValue: TimeZone = { - Calendar.getInstance.getTimeZone - } - } - // `SimpleDateFormat` is not thread-safe. - val threadLocalTimestampFormat = new ThreadLocal[DateFormat] { + private val threadLocalTimestampFormat = new ThreadLocal[DateFormat] { override def initialValue(): SimpleDateFormat = { new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US) } } + def getThreadLocalTimestampFormat(timeZone: TimeZone): DateFormat = { + val sdf = threadLocalTimestampFormat.get() + sdf.setTimeZone(timeZone) + sdf + } + // `SimpleDateFormat` is not thread-safe. private val threadLocalDateFormat = new ThreadLocal[DateFormat] { override def initialValue(): SimpleDateFormat = { new SimpleDateFormat("yyyy-MM-dd", Locale.US) } } + def getThreadLocalDateFormat(): DateFormat = { + val sdf = threadLocalDateFormat.get() + sdf.setTimeZone(defaultTimeZone()) + sdf + } + // we should use the exact day as Int, for example, (year, month, day) -> day def millisToDays(millisUtc: Long): SQLDate = { --- End diff -- Yes, I think so except for JSON/CSV datasources or partition value related codes, which will be fixed by the follow-up prs.
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org