maropu commented on a change in pull request #25239: [SPARK-28495][SQL] AssignableCast: A new type coercion following store assignment rules of ANSI SQL URL: https://github.com/apache/spark/pull/25239#discussion_r306730996
########## File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala ########## @@ -2665,6 +2666,38 @@ class Analyzer( } } } + + /** + * Replace the [[AssignableCast]] expression by [[Cast]], and throw exceptions if the cast is + * not assignable. + */ + object ResolveAssignableCast extends Rule[LogicalPlan] { + private def fail(from: Expression, to: DataType, walkedTypePath: Seq[String]) = { + val fromStr = from match { + case l: LambdaVariable => "array element" + case e => e.sql + } + throw new AnalysisException(s"Cannot assign $fromStr from " + + s"${from.dataType.catalogString} to ${to.catalogString}.\n" + + "The type path of the target object is:\n" + walkedTypePath.mkString("", "\n", "\n") + + "You can add an explicit cast to the input data.") + } + + def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp { + case p if !p.childrenResolved => p + case p if p.resolved => p + + case p => p transformExpressions { + case u @ AssignableCast(child, _, _) if !child.resolved => u + + case AssignableCast(child, dataType, walkedTypePath) + if !Cast.canAssign(child.dataType, dataType) => + fail(child, dataType, walkedTypePath) + + case AssignableCast(child, dataType, _) => Cast(child, dataType.asNullable) Review comment: We don't need rounding/truncating/overflow checks here for some cases, e.g., int->short, double->float? ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org