This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new a7fb330  [SPARK-31468][SQL] Null types should be implicitly casted to 
Decimal types
a7fb330 is described below

commit a7fb330ed38e79b91a4dc156b977a5bd36e74010
Author: Takeshi Yamamuro <yamam...@apache.org>
AuthorDate: Fri Apr 17 14:11:17 2020 +0000

    [SPARK-31468][SQL] Null types should be implicitly casted to Decimal types
    
    ### What changes were proposed in this pull request?
    
    This PR intends to fix a bug that occurs when comparing null types to 
decimal types in master/branch-3.0;
    ```
    scala> Seq(BigDecimal(10)).toDF("v1").selectExpr("v1 = NULL").explain(true)
    org.apache.spark.sql.AnalysisException: cannot resolve '(`v1` = NULL)' due 
to data type mismatch: differing types in '(`v1` = NULL)' (decimal(38,18) and 
null).; line 1 pos 0;
    'Project [(v1#5 = null) AS (v1 = NULL)#7]
    +- Project [value#2 AS v1#5]
       +- LocalRelation [value#2]
    ...
    ```
    The query above passed in v2.4.5.
    
    ### Why are the changes needed?
    
    bugfix
    
    ### Does this PR introduce any user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Added tests.
    
    Closes #28241 from maropu/SPARK-31468.
    
    Authored-by: Takeshi Yamamuro <yamam...@apache.org>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../spark/sql/catalyst/analysis/TypeCoercion.scala    | 19 +++++++++++++++----
 .../sql/catalyst/analysis/TypeCoercionSuite.scala     | 18 ++++++++++++++++++
 2 files changed, 33 insertions(+), 4 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
index eb9a4d4..c6e3f56 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
@@ -842,15 +842,26 @@ object TypeCoercion {
    * Casts types according to the expected input types for [[Expression]]s.
    */
   object ImplicitTypeCasts extends TypeCoercionRule {
+
+    private def canHandleTypeCoercion(leftType: DataType, rightType: 
DataType): Boolean = {
+      (leftType, rightType) match {
+        case (_: DecimalType, NullType) => true
+        case (NullType, _: DecimalType) => true
+        case _ =>
+          // If DecimalType operands are involved except for the two cases 
above,
+          // DecimalPrecision will handle it.
+          !leftType.isInstanceOf[DecimalType] && 
!rightType.isInstanceOf[DecimalType] &&
+            leftType != rightType
+      }
+    }
+
     override protected def coerceTypes(
         plan: LogicalPlan): LogicalPlan = plan resolveExpressions {
       // Skip nodes who's children have not been resolved yet.
       case e if !e.childrenResolved => e
 
-      // If DecimalType operands are involved, DecimalPrecision will handle it
-      case b @ BinaryOperator(left, right) if 
!left.dataType.isInstanceOf[DecimalType] &&
-          !right.dataType.isInstanceOf[DecimalType] &&
-          left.dataType != right.dataType =>
+      case b @ BinaryOperator(left, right)
+          if canHandleTypeCoercion(left.dataType, right.dataType) =>
         findTightestCommonType(left.dataType, right.dataType).map { commonType 
=>
           if (b.inputType.acceptsType(commonType)) {
             // If the expression accepts the tightest common type, cast to 
that.
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
index ab21a9e..e37555f 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
@@ -1541,6 +1541,24 @@ class TypeCoercionSuite extends AnalysisTest {
       Multiply(CaseWhen(Seq((EqualTo(1, 2), Cast(1, DecimalType(34, 24)))),
         Cast(100, DecimalType(34, 24))), Cast(1, IntegerType)))
   }
+
+  test("SPARK-31468: null types should be casted to decimal types in 
ImplicitTypeCasts") {
+    Seq(AnyTypeBinaryOperator(_, _), NumericTypeBinaryOperator(_, _)).foreach 
{ binaryOp =>
+      // binaryOp(decimal, null) case
+      ruleTest(TypeCoercion.ImplicitTypeCasts,
+        binaryOp(Literal.create(null, DecimalType.SYSTEM_DEFAULT),
+          Literal.create(null, NullType)),
+        binaryOp(Literal.create(null, DecimalType.SYSTEM_DEFAULT),
+          Cast(Literal.create(null, NullType), DecimalType.SYSTEM_DEFAULT)))
+
+      // binaryOp(null, decimal) case
+      ruleTest(TypeCoercion.ImplicitTypeCasts,
+        binaryOp(Literal.create(null, NullType),
+          Literal.create(null, DecimalType.SYSTEM_DEFAULT)),
+        binaryOp(Cast(Literal.create(null, NullType), 
DecimalType.SYSTEM_DEFAULT),
+          Literal.create(null, DecimalType.SYSTEM_DEFAULT)))
+    }
+  }
 }
 
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to