Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/17630#discussion_r111527728
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/Statistics.scala
 ---
    @@ -104,22 +106,45 @@ case class ColumnStat(
       /**
        * Returns a map from string to string that can be used to serialize the 
column stats.
        * The key is the name of the field (e.g. "distinctCount" or "min"), and 
the value is the string
    -   * representation for the value. The deserialization side is defined in 
[[ColumnStat.fromMap]].
    +   * representation for the value. min/max values are converted to the 
external data type. For
    +   * example, for DateType we store java.sql.Date, and for TimestampType 
we store
    +   * java.sql.Timestamp. The deserialization side is defined in 
[[ColumnStat.fromMap]].
        *
        * As part of the protocol, the returned map always contains a key 
called "version".
        * In the case min/max values are null (None), they won't appear in the 
map.
        */
    -  def toMap: Map[String, String] = {
    +  def toMap(colName: String, dataType: DataType): Map[String, String] = {
         val map = new scala.collection.mutable.HashMap[String, String]
         map.put(ColumnStat.KEY_VERSION, "1")
         map.put(ColumnStat.KEY_DISTINCT_COUNT, distinctCount.toString)
         map.put(ColumnStat.KEY_NULL_COUNT, nullCount.toString)
         map.put(ColumnStat.KEY_AVG_LEN, avgLen.toString)
         map.put(ColumnStat.KEY_MAX_LEN, maxLen.toString)
    -    min.foreach { v => map.put(ColumnStat.KEY_MIN_VALUE, v.toString) }
    -    max.foreach { v => map.put(ColumnStat.KEY_MAX_VALUE, v.toString) }
    +    min.foreach { v => map.put(ColumnStat.KEY_MIN_VALUE, 
toExternalString(v, colName, dataType)) }
    +    max.foreach { v => map.put(ColumnStat.KEY_MAX_VALUE, 
toExternalString(v, colName, dataType)) }
         map.toMap
       }
    +
    +  /**
    +   * Converts the given value from Catalyst data type to string 
representation of external
    +   * data type.
    +   */
    +  private def toExternalString(v: Any, colName: String, dataType: 
DataType): String = {
    +    val externalValue = dataType match {
    +      case BooleanType => v.asInstanceOf[Boolean]
    +      case _: IntegralType => v.toString.toLong
    +      case DateType => DateTimeUtils.toJavaDate(v.toString.toInt)
    --- End diff --
    
    nit: `v.asInstanceOf[Int]`


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to