turboFei commented on a change in pull request #25701: [SPARK-29000][SQL] 
Decimal precision overflow when don't allow precision loss
URL: https://github.com/apache/spark/pull/25701#discussion_r321649874
 
 

 ##########
 File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
 ##########
 @@ -862,17 +862,23 @@ object TypeCoercion {
       case e if !e.childrenResolved => e
 
       case b @ BinaryOperator(left, right) if left.dataType != right.dataType 
=>
-        findTightestCommonType(left.dataType, right.dataType).map { commonType 
=>
-          if (b.inputType.acceptsType(commonType)) {
-            // If the expression accepts the tightest common type, cast to 
that.
-            val newLeft = if (left.dataType == commonType) left else 
Cast(left, commonType)
-            val newRight = if (right.dataType == commonType) right else 
Cast(right, commonType)
-            b.withNewChildren(Seq(newLeft, newRight))
-          } else {
-            // Otherwise, don't do anything with the expression.
-            b
-          }
-        }.getOrElse(b)  // If there is no applicable conversion, leave 
expression unchanged.
+        (left, right) match {
+          // Skip to handle decimals
+          case (l, r) if l.dataType.isInstanceOf[DecimalType] ||
 
 Review comment:
   done.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to