Github user mgaido91 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20350#discussion_r163184979
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala ---
    @@ -237,14 +238,26 @@ final class Decimal extends Ordered[Decimal] with 
Serializable {
       /**
        * Create new `Decimal` with given precision and scale.
        *
    -   * @return a non-null `Decimal` value if successful or `null` if 
overflow would occur.
    +   * @return a non-null `Decimal` value if successful. Otherwise, if 
`nullOnOverflow` is true, null
    +   *         is returned; if `nullOnOverflow` is false, an 
`ArithmeticException` is thrown.
        */
       private[sql] def toPrecision(
           precision: Int,
           scale: Int,
    -      roundMode: BigDecimal.RoundingMode.Value = ROUND_HALF_UP): Decimal = 
{
    +      roundMode: BigDecimal.RoundingMode.Value = ROUND_HALF_UP,
    +      nullOnOverflow: Boolean = true): Decimal = {
         val copy = clone()
    -    if (copy.changePrecision(precision, scale, roundMode)) copy else null
    +    if (copy.changePrecision(precision, scale, roundMode)) {
    +      copy
    +    } else {
    +      def message = s"$toDebugString cannot be represented as 
Decimal($precision, $scale)."
    +      if (nullOnOverflow) {
    +        if (log.isDebugEnabled) logDebug(s"$message NULL is returned.")
    +        null
    --- End diff --
    
    since also @hvanhovell was suggesting that this is not necessary, even 
though I think it would be good to have it, I am removing it.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to