Github user viirya commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20023#discussion_r158207539
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/types/DecimalType.scala ---
    @@ -136,10 +137,54 @@ object DecimalType extends AbstractDataType {
         case DoubleType => DoubleDecimal
       }
     
    +  private[sql] def forLiteral(literal: Literal): DecimalType = 
literal.value match {
    +    case v: Short => fromBigDecimal(BigDecimal(v))
    +    case v: Int => fromBigDecimal(BigDecimal(v))
    +    case v: Long => fromBigDecimal(BigDecimal(v))
    +    case _ => forType(literal.dataType)
    +  }
    +
    +  private[sql] def fromBigDecimal(d: BigDecimal): DecimalType = {
    +    DecimalType(Math.max(d.precision, d.scale), d.scale)
    +  }
    +
       private[sql] def bounded(precision: Int, scale: Int): DecimalType = {
         DecimalType(min(precision, MAX_PRECISION), min(scale, MAX_SCALE))
       }
     
    +  // scalastyle:off line.size.limit
    +  /**
    +   * Decimal implementation is based on Hive's one, which is itself 
inspired to SQLServer's one.
    +   * In particular, when a result precision is greater than {@link 
#MAX_PRECISION}, the
    +   * corresponding scale is reduced to prevent the integral part of a 
result from being truncated.
    +   *
    +   * For further reference, please see
    +   * 
https://blogs.msdn.microsoft.com/sqlprogrammability/2006/03/29/multiplication-and-division-with-numerics/.
    --- End diff --
    
    Not sure if this blog link can be available for long time.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to