Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20062#discussion_r158717586
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statsEstimation/EstimationUtils.scala
 ---
    @@ -89,29 +89,29 @@ object EstimationUtils {
       }
     
       /**
    -   * For simplicity we use Decimal to unify operations for data types 
whose min/max values can be
    +   * For simplicity we use Double to unify operations for data types whose 
min/max values can be
        * represented as numbers, e.g. Boolean can be represented as 0 (false) 
or 1 (true).
        * The two methods below are the contract of conversion.
        */
    -  def toDecimal(value: Any, dataType: DataType): Decimal = {
    +  def toDouble(value: Any, dataType: DataType): Double = {
         dataType match {
    -      case _: NumericType | DateType | TimestampType => 
Decimal(value.toString)
    -      case BooleanType => if (value.asInstanceOf[Boolean]) Decimal(1) else 
Decimal(0)
    +      case _: NumericType | DateType | TimestampType => 
value.toString.toDouble
    +      case BooleanType => if (value.asInstanceOf[Boolean]) 1 else 0
         }
       }
     
    -  def fromDecimal(dec: Decimal, dataType: DataType): Any = {
    +  def fromDouble(double: Double, dataType: DataType): Any = {
         dataType match {
    -      case BooleanType => dec.toLong == 1
    -      case DateType => dec.toInt
    -      case TimestampType => dec.toLong
    -      case ByteType => dec.toByte
    -      case ShortType => dec.toShort
    -      case IntegerType => dec.toInt
    -      case LongType => dec.toLong
    -      case FloatType => dec.toFloat
    -      case DoubleType => dec.toDouble
    -      case _: DecimalType => dec
    +      case BooleanType => double.toLong == 1
    --- End diff --
    
    maybe `toInt`?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to