This is an automated email from the ASF dual-hosted git repository. gengliang pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 17b85ff9756 [SPARK-39190][SQL] Provide query context for decimal precision overflow error when WSCG is off 17b85ff9756 is described below commit 17b85ff97569a43d7fd33863d17bfdaf62d539e0 Author: Gengliang Wang <gengli...@apache.org> AuthorDate: Mon May 16 17:44:53 2022 +0800 [SPARK-39190][SQL] Provide query context for decimal precision overflow error when WSCG is off ### What changes were proposed in this pull request? Similar to https://github.com/apache/spark/pull/36525, this PR provides query context for decimal precision overflow error when WSCG is off ### Why are the changes needed? Enhance the runtime error query context of checking decimal overflow. After changes, it works when the whole stage codegen is not available. ### Does this PR introduce _any_ user-facing change? NO ### How was this patch tested? UT Closes #36557 from gengliangwang/decimalContextWSCG. Authored-by: Gengliang Wang <gengli...@apache.org> Signed-off-by: Gengliang Wang <gengli...@apache.org> --- .../sql/catalyst/expressions/decimalExpressions.scala | 12 +++++++++--- .../test/scala/org/apache/spark/sql/SQLQuerySuite.scala | 16 ++++++++++++++++ 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala index 4a4b8e0fc0d..2cdd784ea4d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalExpressions.scala @@ -128,7 +128,7 @@ case class PromotePrecision(child: Expression) extends UnaryExpression { case class CheckOverflow( child: Expression, dataType: DecimalType, - nullOnOverflow: Boolean) extends UnaryExpression { + nullOnOverflow: Boolean) extends UnaryExpression with SupportQueryContext { override def nullable: Boolean = true @@ -138,11 +138,11 @@ case class CheckOverflow( dataType.scale, Decimal.ROUND_HALF_UP, nullOnOverflow, - origin.context) + queryContext) override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val errorContextCode = if (nullOnOverflow) { - ctx.addReferenceObj("errCtx", origin.context) + ctx.addReferenceObj("errCtx", queryContext) } else { "\"\"" } @@ -163,6 +163,12 @@ case class CheckOverflow( override protected def withNewChildInternal(newChild: Expression): CheckOverflow = copy(child = newChild) + + override def initQueryContext(): String = if (nullOnOverflow) { + "" + } else { + origin.context + } } // A variant `CheckOverflow`, which treats null as overflow. This is necessary in `Sum`. diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 0355bd90d04..07ec4290250 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -4376,6 +4376,22 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark } } + test("SPARK-39190: Query context of decimal overflow error should be serialized to executors" + + " when WSCG is off") { + withSQLConf(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "false", + SQLConf.ANSI_ENABLED.key -> "true") { + withTable("t") { + sql("create table t(d decimal(38, 0)) using parquet") + sql("insert into t values (2e37BD)") + val query = "select d / 0.1 from t" + val msg = intercept[SparkException] { + sql(query).collect() + }.getMessage + assert(msg.contains(query)) + } + } + } + test("SPARK-38589: try_avg should return null if overflow happens before merging") { val yearMonthDf = Seq(Int.MaxValue, Int.MaxValue, 2) .map(Period.ofMonths) --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org