maropu commented on a change in pull request #25239: [SPARK-28495][SQL] 
AssignableCast: A new type coercion following store assignment rules of ANSI SQL
URL: https://github.com/apache/spark/pull/25239#discussion_r306773713
 
 

 ##########
 File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 ##########
 @@ -2665,6 +2666,38 @@ class Analyzer(
       }
     }
   }
+
+  /**
+   * Replace the [[AssignableCast]] expression by [[Cast]], and throw 
exceptions if the cast is
+   * not assignable.
+   */
+  object ResolveAssignableCast extends Rule[LogicalPlan] {
+    private def fail(from: Expression, to: DataType, walkedTypePath: 
Seq[String]) = {
+      val fromStr = from match {
+        case l: LambdaVariable => "array element"
+        case e => e.sql
+      }
+      throw new AnalysisException(s"Cannot assign $fromStr from " +
+        s"${from.dataType.catalogString} to ${to.catalogString}.\n" +
+        "The type path of the target object is:\n" + 
walkedTypePath.mkString("", "\n", "\n") +
+        "You can add an explicit cast to the input data.")
+    }
+
+    def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
+      case p if !p.childrenResolved => p
+      case p if p.resolved => p
+
+      case p => p transformExpressions {
+        case u @ AssignableCast(child, _, _) if !child.resolved => u
+
+        case AssignableCast(child, dataType, walkedTypePath)
+          if !Cast.canAssign(child.dataType, dataType) =>
+          fail(child, dataType, walkedTypePath)
+
+        case AssignableCast(child, dataType, _) => Cast(child, 
dataType.asNullable)
 
 Review comment:
   In the current pr, is the result null if out-of-range cases?
   For example, in case of `int->short` casts, it seems `Cast` just returns a 
weired value for a out-of-range value?;
   
https://github.com/apache/spark/blob/167fa0402dab0800e9762d63d29bd6d9892fa9a8/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala#L479
   ```
   scala> sql("create table t (s short) using parquet")
   scala> sql("insert into t values (int(32768))")
   // InsertIntoTable Relation[s#12] parquet, false, false
   // +- Project [cast(col1#31 as smallint) AS s#32]
   //    +- LocalRelation [col1#31]
   scala> sql("select * from t").show
   +------+
   |     s|
   +------+
   |-32768|
   +------+
   ```

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to