This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new c3872cd4f1b7 [SPARK-46818][SQL] Improve error messages for Range with 
non-foldable inputs
c3872cd4f1b7 is described below

commit c3872cd4f1b7c27956c067da11a26ae5e2829ed3
Author: allisonwang-db <allison.w...@databricks.com>
AuthorDate: Thu Jan 25 10:30:03 2024 +0300

    [SPARK-46818][SQL] Improve error messages for Range with non-foldable inputs
    
    ### What changes were proposed in this pull request?
    
    This PR improves the error messages for the `Range` function when the input 
arguments are not foldable.
    
    ### Why are the changes needed?
    
    To make error messages more user-friendly.
    For example, before this PR, Spark throws an internal error:
    
    ```
    SELECT * FROM range(0, (select 1));
    [INTERNAL_ERROR] Cannot evaluate expression: scalar-subquery#1306 [] 
SQLSTATE: XX000
    ```
    After this PR, the error message will be:
    ```
    [NON_FOLDABLE_ARGUMENT] The function `range` requires the parameter `end` 
to be a foldable expression of the type "BIGINT", but the actual argument is a 
non-foldable. SQLSTATE: 42K08; line 1 pos 14
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes. Error message improvement.
    
    ### How was this patch tested?
    
    New SQL tests.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #44857 from allisonwang-db/spark-46818-fix-range.
    
    Authored-by: allisonwang-db <allison.w...@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../plans/logical/basicLogicalOperators.scala      | 28 +++++++++----
 .../table-valued-functions.sql.out                 | 44 ++++++++++++++++++++
 .../sql-tests/inputs/table-valued-functions.sql    |  4 ++
 .../results/table-valued-functions.sql.out         | 48 ++++++++++++++++++++++
 4 files changed, 116 insertions(+), 8 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
index fbbae16130c0..7c2dfd31f4e3 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala
@@ -965,7 +965,12 @@ object Range {
     if (SQLConf.get.ansiEnabled) AnsiTypeCoercion else TypeCoercion
   }
 
-  private def castAndEval[T](expression: Expression, dataType: DataType, 
paramIndex: Int): T = {
+  private def castAndEval[T](
+      expression: Expression, dataType: DataType, paramIndex: Int, paramName: 
String): T = {
+    if (!expression.foldable) {
+      throw QueryCompilationErrors.nonFoldableArgumentError(
+        "range", paramName, dataType)
+    }
     typeCoercion.implicitCast(expression, dataType)
       .map(_.eval())
       .filter(_ != null)
@@ -975,11 +980,11 @@ object Range {
       }.asInstanceOf[T]
   }
 
-  def toLong(expression: Expression, paramIndex: Int): Long =
-    castAndEval[Long](expression, LongType, paramIndex)
+  def toLong(expression: Expression, paramIndex: Int, paramName: String): Long 
=
+    castAndEval[Long](expression, LongType, paramIndex, paramName)
 
-  def toInt(expression: Expression, paramIndex: Int): Int =
-    castAndEval[Int](expression, IntegerType, paramIndex)
+  def toInt(expression: Expression, paramIndex: Int, paramName: String): Int =
+    castAndEval[Int](expression, IntegerType, paramIndex, paramName)
 }
 
 @ExpressionDescription(
@@ -1025,12 +1030,19 @@ case class Range(
   require(step != 0, s"step ($step) cannot be 0")
 
   def this(start: Expression, end: Expression, step: Expression, numSlices: 
Expression) = {
-    this(Range.toLong(start, 1), Range.toLong(end, 2), Range.toLong(step, 3),
-      Some(Range.toInt(numSlices, 4)))
+    this(
+      Range.toLong(start, 1, "start"),
+      Range.toLong(end, 2, "end"),
+      Range.toLong(step, 3, "step"),
+      Some(Range.toInt(numSlices, 4, "numSlices")))
   }
 
   def this(start: Expression, end: Expression, step: Expression) =
-    this(Range.toLong(start, 1), Range.toLong(end, 2), Range.toLong(step, 3), 
None)
+    this(
+      Range.toLong(start, 1, "start"),
+      Range.toLong(end, 2, "end"),
+      Range.toLong(step, 3, "step"),
+      None)
 
   def this(start: Expression, end: Expression) = this(start, end, 
Literal.create(1L, LongType))
 
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/table-valued-functions.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/table-valued-functions.sql.out
index fe4c6ec26056..9ee8d741aed3 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/table-valued-functions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/table-valued-functions.sql.out
@@ -154,6 +154,50 @@ Project [i#xL]
       +- Range (0, 2, step=1, splits=None)
 
 
+-- !query
+select * from range(0, (select 1))
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "42K08",
+  "messageParameters" : {
+    "funcName" : "`range`",
+    "paramName" : "`end`",
+    "paramType" : "\"BIGINT\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 15,
+    "stopIndex" : 34,
+    "fragment" : "range(0, (select 1))"
+  } ]
+}
+
+
+-- !query
+select * from values (0, 1) t(c1, c2), lateral range(0, c2)
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "42K08",
+  "messageParameters" : {
+    "funcName" : "`range`",
+    "paramName" : "`end`",
+    "paramType" : "\"BIGINT\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 48,
+    "stopIndex" : 59,
+    "fragment" : "range(0, c2)"
+  } ]
+}
+
+
 -- !query
 select * from explode(array(1, 2))
 -- !query analysis
diff --git 
a/sql/core/src/test/resources/sql-tests/inputs/table-valued-functions.sql 
b/sql/core/src/test/resources/sql-tests/inputs/table-valued-functions.sql
index 79d427bc2099..9bf41f71d47f 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/table-valued-functions.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/table-valued-functions.sql
@@ -31,6 +31,10 @@ select * from RaNgE(2);
 -- range call with alias
 select i from range(0, 2) t(i);
 
+-- range with non-foldable input
+select * from range(0, (select 1));
+select * from values (0, 1) t(c1, c2), lateral range(0, c2);
+
 -- explode
 select * from explode(array(1, 2));
 select * from explode(map('a', 1, 'b', 2));
diff --git 
a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out
index a7e9ecd2543e..358217d3b190 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out
@@ -191,6 +191,54 @@ struct<i:bigint>
 1
 
 
+-- !query
+select * from range(0, (select 1))
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "42K08",
+  "messageParameters" : {
+    "funcName" : "`range`",
+    "paramName" : "`end`",
+    "paramType" : "\"BIGINT\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 15,
+    "stopIndex" : 34,
+    "fragment" : "range(0, (select 1))"
+  } ]
+}
+
+
+-- !query
+select * from values (0, 1) t(c1, c2), lateral range(0, c2)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "NON_FOLDABLE_ARGUMENT",
+  "sqlState" : "42K08",
+  "messageParameters" : {
+    "funcName" : "`range`",
+    "paramName" : "`end`",
+    "paramType" : "\"BIGINT\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 48,
+    "stopIndex" : 59,
+    "fragment" : "range(0, c2)"
+  } ]
+}
+
+
 -- !query
 select * from explode(array(1, 2))
 -- !query schema


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to