srielau commented on code in PR #42985:
URL: https://github.com/apache/spark/pull/42985#discussion_r1334511376


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala:
##########
@@ -61,68 +62,97 @@ case class PrintToStderr(child: Expression) extends 
UnaryExpression {
 /**
  * Throw with the result of an expression (used for debugging).
  */
+// scalastyle:off line.size.limit
 @ExpressionDescription(
-  usage = "_FUNC_(expr) - Throws an exception with `expr`.",
+  usage = "_FUNC_( expr [, errorParams ]) - Throws a USER_RAISED_EXCEPTION 
with `expr` as message, or a defined error class in `expr` with a parameter 
map.",
   examples = """
     Examples:
       > SELECT _FUNC_('custom error message');
-       java.lang.RuntimeException
-       custom error message
+       [USER_RAISED_EXCEPTION] custom error message
+
+      > SELECT _FUNC_('VIEW_NOT_FOUND', Map('relationName' -> '`V1`'));
+       [VIEW_NOT_FOUND] The view `V1` cannot be found. ...
   """,
   since = "3.1.0",
   group = "misc_funcs")
-case class RaiseError(child: Expression, dataType: DataType)
-  extends UnaryExpression with ImplicitCastInputTypes {
+// scalastyle:on line.size.limit
+case class RaiseError(errorClass: Expression, errorParms: Expression, 
dataType: DataType)
+  extends BinaryExpression with ImplicitCastInputTypes {
+
+  def this(str: Expression) = {
+    this(Literal(
+      if (SQLConf.get.legacyNegativeIndexInArrayInsert) {

Review Comment:
   Abandoned effort to put the logic in the wrong spot. Removed.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to