This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new bccbdb72b004 [SPARK-46739][SQL] Add the error class `UNSUPPORTED_CALL`
bccbdb72b004 is described below

commit bccbdb72b004f90ed8605d5bcaaf8b4605d8d099
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Wed Jan 17 15:41:01 2024 +0300

    [SPARK-46739][SQL] Add the error class `UNSUPPORTED_CALL`
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to add new error class for unsupported method calls, 
and remove similar legacy error classes. New `apply()` method of 
`SparkUnsupportedOperationException` extracts method and class name from stack 
traces automatically, and places them to error class parameters.
    
    ### Why are the changes needed?
    To improve code maintenance by avoid boilerplate code (extract class and 
method names automatically), and to clean up `error-classes.json`.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes, it can if user's code depends on the error class or message format of 
`SparkUnsupportedOperationException`.
    
    ### How was this patch tested?
    By running new test:
    ```
    $ build/sbt "test:testOnly *QueryCompilationErrorsSuite"
    ```
    and the affected test suites:
    ```
    $ build/sbt "core/testOnly *SparkThrowableSuite"
    $ build/sbt "test:testOnly *ShuffleSpecSuite"
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #44757 from MaxGekk/unsupported_call-error-class.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../src/main/resources/error/error-classes.json    | 181 +--------------------
 .../scala/org/apache/spark/SparkException.scala    |  13 ++
 docs/sql-error-conditions.md                       |   6 +
 .../sql/catalyst/expressions/UnsafeArrayData.java  |   4 +-
 .../spark/sql/catalyst/expressions/UnsafeRow.java  |   2 +-
 .../spark/sql/connector/write/WriteBuilder.java    |   2 +-
 .../spark/sql/vectorized/ArrowColumnVector.java    |  26 +--
 .../apache/spark/sql/vectorized/ColumnarArray.java |   4 +-
 .../spark/sql/vectorized/ColumnarBatchRow.java     |   6 +-
 .../apache/spark/sql/vectorized/ColumnarRow.java   |   6 +-
 .../spark/sql/catalyst/ProjectingInternalRow.scala |   8 +-
 .../spark/sql/catalyst/analysis/Analyzer.scala     |   2 +-
 .../sql/catalyst/analysis/FunctionRegistry.scala   |  14 +-
 .../catalog/FunctionExpressionBuilder.scala        |   2 +-
 .../sql/catalyst/catalog/functionResources.scala   |   2 +-
 .../spark/sql/catalyst/expressions/grouping.scala  |   6 +-
 .../catalyst/expressions/namedExpressions.scala    |  18 +-
 .../spark/sql/catalyst/expressions/ordering.scala  |   2 +-
 .../spark/sql/catalyst/plans/joinTypes.scala       |   2 +-
 .../sql/catalyst/plans/physical/partitioning.scala |   4 +-
 .../catalyst/util/ResolveDefaultColumnsUtil.scala  |   2 +-
 .../spark/sql/catalyst/ShuffleSpecSuite.scala      |  13 +-
 .../sql/errors/QueryCompilationErrorsSuite.scala   |  14 +-
 23 files changed, 99 insertions(+), 240 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-classes.json 
b/common/utils/src/main/resources/error/error-classes.json
index 19817ced3356..2fa86de3daa3 100644
--- a/common/utils/src/main/resources/error/error-classes.json
+++ b/common/utils/src/main/resources/error/error-classes.json
@@ -3592,6 +3592,12 @@
     ],
     "sqlState" : "0A000"
   },
+  "UNSUPPORTED_CALL" : {
+    "message" : [
+      "Cannot call the method \"<methodName>\" of the class \"<className>\"."
+    ],
+    "sqlState" : "0A000"
+  },
   "UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING" : {
     "message" : [
       "The char/varchar type can't be used in the table schema.",
@@ -7133,16 +7139,6 @@
       "Cannot bind a V1 function."
     ]
   },
-  "_LEGACY_ERROR_TEMP_3111" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3112" : {
-    "message" : [
-      "Operation unsupported for <class>"
-    ]
-  },
   "_LEGACY_ERROR_TEMP_3113" : {
     "message" : [
       "UnresolvedTableSpec doesn't have a data type"
@@ -7153,76 +7149,11 @@
       "UnresolvedTableSpec doesn't have a data type"
     ]
   },
-  "_LEGACY_ERROR_TEMP_3115" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3116" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3117" : {
-    "message" : [
-      "Cannot modify <class>"
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3118" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3119" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3120" : {
-    "message" : [
-      ""
-    ]
-  },
   "_LEGACY_ERROR_TEMP_3121" : {
     "message" : [
       "A HllSketch instance cannot be updates with a Spark <dataType> type"
     ]
   },
-  "_LEGACY_ERROR_TEMP_3122" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3123" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3124" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3125" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3126" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3127" : {
-    "message" : [
-      "Not supported on UnsafeArrayData."
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3128" : {
-    "message" : [
-      ""
-    ]
-  },
   "_LEGACY_ERROR_TEMP_3129" : {
     "message" : [
       "Cannot convert this array to unsafe format as it's too big."
@@ -7333,121 +7264,21 @@
       "Cannot create columnar reader."
     ]
   },
-  "_LEGACY_ERROR_TEMP_3151" : {
-    "message" : [
-      ""
-    ]
-  },
   "_LEGACY_ERROR_TEMP_3152" : {
     "message" : [
       "Datatype not supported <dataType>"
     ]
   },
-  "_LEGACY_ERROR_TEMP_3153" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3154" : {
-    "message" : [
-      ""
-    ]
-  },
   "_LEGACY_ERROR_TEMP_3155" : {
     "message" : [
       "Datatype not supported <dataType>"
     ]
   },
-  "_LEGACY_ERROR_TEMP_3156" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3157" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3158" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3159" : {
-    "message" : [
-      ""
-    ]
-  },
   "_LEGACY_ERROR_TEMP_3160" : {
     "message" : [
       ""
     ]
   },
-  "_LEGACY_ERROR_TEMP_3161" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3162" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3163" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3164" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3165" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3166" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3167" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3168" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3169" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3170" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3171" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3172" : {
-    "message" : [
-      ""
-    ]
-  },
-  "_LEGACY_ERROR_TEMP_3173" : {
-    "message" : [
-      ""
-    ]
-  },
   "_LEGACY_ERROR_USER_RAISED_EXCEPTION" : {
     "message" : [
       "<errorMessage>"
diff --git a/common/utils/src/main/scala/org/apache/spark/SparkException.scala 
b/common/utils/src/main/scala/org/apache/spark/SparkException.scala
index 6e1f4b796518..67bdc23b5f08 100644
--- a/common/utils/src/main/scala/org/apache/spark/SparkException.scala
+++ b/common/utils/src/main/scala/org/apache/spark/SparkException.scala
@@ -228,6 +228,19 @@ private[spark] class SparkUnsupportedOperationException 
private(
   override def getErrorClass: String = errorClass.orNull
 }
 
+private[spark] object SparkUnsupportedOperationException {
+  def apply(): SparkUnsupportedOperationException = {
+    val stackTrace = Thread.currentThread().getStackTrace
+    val messageParameters = if (stackTrace.length >= 4) {
+      val element = stackTrace(3)
+      Map("className" -> element.getClassName, "methodName" -> 
element.getMethodName)
+    } else {
+      Map("className" -> "?", "methodName" -> "?")
+    }
+    new SparkUnsupportedOperationException("UNSUPPORTED_CALL", 
messageParameters)
+  }
+}
+
 /**
  * Class not found exception thrown from Spark with an error class.
  */
diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md
index ef12f6d03c06..35b50d6c6e4f 100644
--- a/docs/sql-error-conditions.md
+++ b/docs/sql-error-conditions.md
@@ -2331,6 +2331,12 @@ For more details see 
[UNSUPPORTED_ADD_FILE](sql-error-conditions-unsupported-add
 
 Unsupported arrow type `<typeName>`.
 
+### UNSUPPORTED_CALL
+
+[SQLSTATE: 
0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported)
+
+Cannot call the method "`<methodName>`" of the class "`<className>`".
+
 ### UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING
 
 [SQLSTATE: 
0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported)
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java
index 32188fdc01bb..e612166fb259 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java
@@ -102,7 +102,7 @@ public final class UnsafeArrayData extends ArrayData 
implements Externalizable,
 
   @Override
   public Object[] array() {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3127");
+    throw SparkUnsupportedOperationException.apply();
   }
 
   /**
@@ -274,7 +274,7 @@ public final class UnsafeArrayData extends ArrayData 
implements Externalizable,
 
   @Override
   public void update(int ordinal, Object value) {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3128");
+    throw SparkUnsupportedOperationException.apply();
   }
 
   @Override
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
index 203b0efd1034..6325ba68af5b 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
@@ -192,7 +192,7 @@ public final class UnsafeRow extends InternalRow implements 
Externalizable, Kryo
 
   @Override
   public void update(int ordinal, Object value) {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3126");
+    throw SparkUnsupportedOperationException.apply();
   }
 
   @Override
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/WriteBuilder.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/WriteBuilder.java
index 653b520fc8fc..fefc06c12df7 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/WriteBuilder.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/WriteBuilder.java
@@ -72,7 +72,7 @@ public interface WriteBuilder {
    */
   @Deprecated(since = "3.2.0")
   default StreamingWrite buildForStreaming() {
-    throw new SparkUnsupportedOperationException(getClass().getName() +
+    throw new SparkUnsupportedOperationException(
       "_LEGACY_ERROR_TEMP_3136", Map.of("class", getClass().getName()));
   }
 }
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java
index 417cbdd129c9..7382d96e20ba 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java
@@ -227,55 +227,55 @@ public class ArrowColumnVector extends ColumnVector {
     }
 
     boolean getBoolean(int rowId) {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3161");
+      throw SparkUnsupportedOperationException.apply();
     }
 
     byte getByte(int rowId) {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3162");
+      throw SparkUnsupportedOperationException.apply();
     }
 
     short getShort(int rowId) {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3163");
+      throw SparkUnsupportedOperationException.apply();
     }
 
     int getInt(int rowId) {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3164");
+      throw SparkUnsupportedOperationException.apply();
     }
 
     long getLong(int rowId) {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3165");
+      throw SparkUnsupportedOperationException.apply();
     }
 
     float getFloat(int rowId) {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3166");
+      throw SparkUnsupportedOperationException.apply();
     }
 
     double getDouble(int rowId) {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3167");
+      throw SparkUnsupportedOperationException.apply();
     }
 
     CalendarInterval getInterval(int rowId) {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3168");
+      throw SparkUnsupportedOperationException.apply();
     }
 
     Decimal getDecimal(int rowId, int precision, int scale) {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3169");
+      throw SparkUnsupportedOperationException.apply();
     }
 
     UTF8String getUTF8String(int rowId) {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3170");
+      throw SparkUnsupportedOperationException.apply();
     }
 
     byte[] getBinary(int rowId) {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3171");
+      throw SparkUnsupportedOperationException.apply();
     }
 
     ColumnarArray getArray(int rowId) {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3172");
+      throw SparkUnsupportedOperationException.apply();
     }
 
     ColumnarMap getMap(int rowId) {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3173");
+      throw SparkUnsupportedOperationException.apply();
     }
   }
 
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarArray.java 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarArray.java
index 7825be3e40e9..4163af9bfda5 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarArray.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarArray.java
@@ -189,11 +189,11 @@ public final class ColumnarArray extends ArrayData {
 
   @Override
   public void update(int ordinal, Object value) {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3158");
+    throw SparkUnsupportedOperationException.apply();
   }
 
   @Override
   public void setNullAt(int ordinal) {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3159");
+    throw SparkUnsupportedOperationException.apply();
   }
 }
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
index faab4509c36e..d05b3e2dc2d9 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
@@ -90,7 +90,7 @@ public final class ColumnarBatchRow extends InternalRow {
 
   @Override
   public boolean anyNull() {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3151");
+    throw SparkUnsupportedOperationException.apply();
   }
 
   @Override
@@ -201,11 +201,11 @@ public final class ColumnarBatchRow extends InternalRow {
 
   @Override
   public void update(int ordinal, Object value) {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3153");
+    throw SparkUnsupportedOperationException.apply();
   }
 
   @Override
   public void setNullAt(int ordinal) {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3153");
+    throw SparkUnsupportedOperationException.apply();
   }
 }
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java
index 3c1777b178b5..aaac980bb332 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java
@@ -95,7 +95,7 @@ public final class ColumnarRow extends InternalRow {
 
   @Override
   public boolean anyNull() {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3154");
+    throw SparkUnsupportedOperationException.apply();
   }
 
   @Override
@@ -203,11 +203,11 @@ public final class ColumnarRow extends InternalRow {
 
   @Override
   public void update(int ordinal, Object value) {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3156");
+    throw SparkUnsupportedOperationException.apply();
   }
 
   @Override
   public void setNullAt(int ordinal) {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3157");
+    throw SparkUnsupportedOperationException.apply();
   }
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ProjectingInternalRow.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ProjectingInternalRow.scala
index b15b7690b008..408bd65333ca 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ProjectingInternalRow.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ProjectingInternalRow.scala
@@ -37,13 +37,9 @@ case class ProjectingInternalRow(schema: StructType, 
colOrdinals: Seq[Int]) exte
     this.row = row
   }
 
-  override def setNullAt(i: Int): Unit = {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3117")
-  }
+  override def setNullAt(i: Int): Unit = throw 
SparkUnsupportedOperationException()
 
-  override def update(i: Int, value: Any): Unit = {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3117")
-  }
+  override def update(i: Int, value: Any): Unit = throw 
SparkUnsupportedOperationException()
 
   override def copy(): InternalRow = {
     val newRow = if (row != null) row.copy() else null
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 086c688cad32..719780675273 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -81,7 +81,7 @@ object SimpleAnalyzer extends Analyzer(
 }
 
 object FakeV2SessionCatalog extends TableCatalog with FunctionCatalog {
-  private def fail() = throw new 
SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3118")
+  private def fail() = throw SparkUnsupportedOperationException()
   override def listTables(namespace: Array[String]): Array[Identifier] = fail()
   override def loadTable(ident: Identifier): Table = {
     throw new NoSuchTableException(ident.asMultipartIdentifier)
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index a9b1178a8dba..a77086c948e0 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -266,31 +266,31 @@ trait SimpleFunctionRegistryBase[T] extends 
FunctionRegistryBase[T] with Logging
 trait EmptyFunctionRegistryBase[T] extends FunctionRegistryBase[T] {
   override def registerFunction(
       name: FunctionIdentifier, info: ExpressionInfo, builder: 
FunctionBuilder): Unit = {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124")
+    throw SparkUnsupportedOperationException()
   }
 
   override def lookupFunction(name: FunctionIdentifier, children: 
Seq[Expression]): T = {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124")
+    throw SparkUnsupportedOperationException()
   }
 
   override def listFunction(): Seq[FunctionIdentifier] = {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124")
+    throw SparkUnsupportedOperationException()
   }
 
   override def lookupFunction(name: FunctionIdentifier): 
Option[ExpressionInfo] = {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124")
+    throw SparkUnsupportedOperationException()
   }
 
   override def lookupFunctionBuilder(name: FunctionIdentifier): 
Option[FunctionBuilder] = {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124")
+    throw SparkUnsupportedOperationException()
   }
 
   override def dropFunction(name: FunctionIdentifier): Boolean = {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124")
+    throw SparkUnsupportedOperationException()
   }
 
   override def clear(): Unit = {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124")
+    throw SparkUnsupportedOperationException()
   }
 }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/FunctionExpressionBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/FunctionExpressionBuilder.scala
index 0bfb45579cbf..08971dedad28 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/FunctionExpressionBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/FunctionExpressionBuilder.scala
@@ -27,6 +27,6 @@ trait FunctionExpressionBuilder {
 
 object DummyFunctionExpressionBuilder extends FunctionExpressionBuilder {
   override def makeExpression(name: String, clazz: Class[_], input: 
Seq[Expression]): Expression = {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3119")
+    throw SparkUnsupportedOperationException()
   }
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/functionResources.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/functionResources.scala
index 0d779d9d779f..efd9130b4649 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/functionResources.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/functionResources.scala
@@ -59,6 +59,6 @@ trait FunctionResourceLoader {
 
 object DummyFunctionResourceLoader extends FunctionResourceLoader {
   override def loadResource(resource: FunctionResource): Unit = {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3120")
+    throw SparkUnsupportedOperationException()
   }
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/grouping.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/grouping.scala
index 3baf6d9d1b73..4350c4da932f 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/grouping.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/grouping.scala
@@ -42,12 +42,10 @@ trait BaseGroupingSets extends Expression with 
CodegenFallback {
   // this should be replaced first
   override lazy val resolved: Boolean = false
 
-  override def dataType: DataType =
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3122")
+  override def dataType: DataType = throw SparkUnsupportedOperationException()
   override def foldable: Boolean = false
   override def nullable: Boolean = true
-  override def eval(input: InternalRow): Any =
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3123")
+  override def eval(input: InternalRow): Any = throw 
SparkUnsupportedOperationException()
   final override val nodePatterns: Seq[TreePattern] = Seq(GROUPING_ANALYTICS)
 }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
index e25509ef5bc1..604494517518 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
@@ -395,23 +395,23 @@ case class PrettyAttribute(
   override def sql: String = toString
 
   override def withNullability(newNullability: Boolean): Attribute =
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125")
+    throw SparkUnsupportedOperationException()
   override def newInstance(): Attribute =
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125")
+    throw SparkUnsupportedOperationException()
   override def withQualifier(newQualifier: Seq[String]): Attribute =
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125")
+    throw SparkUnsupportedOperationException()
   override def withName(newName: String): Attribute =
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125")
+    throw SparkUnsupportedOperationException()
   override def withMetadata(newMetadata: Metadata): Attribute =
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125")
+    throw SparkUnsupportedOperationException()
   override def qualifier: Seq[String] =
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125")
+    throw SparkUnsupportedOperationException()
   override def exprId: ExprId =
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125")
+    throw SparkUnsupportedOperationException()
   override def withExprId(newExprId: ExprId): Attribute =
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125")
+    throw SparkUnsupportedOperationException()
   override def withDataType(newType: DataType): Attribute =
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125")
+    throw SparkUnsupportedOperationException()
   override def nullable: Boolean = true
 }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ordering.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ordering.scala
index 5f9f07e299b7..37a3b3a34e49 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ordering.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ordering.scala
@@ -30,7 +30,7 @@ import org.apache.spark.sql.types._
  */
 class BaseOrdering extends Ordering[InternalRow] {
   def compare(a: InternalRow, b: InternalRow): Int = {
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3116")
+    throw SparkUnsupportedOperationException()
   }
 }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/joinTypes.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/joinTypes.scala
index d047051e8cdc..3744613dacc0 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/joinTypes.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/joinTypes.scala
@@ -92,7 +92,7 @@ case class ExistenceJoin(exists: Attribute) extends JoinType {
   override def sql: String = {
     // This join type is only used in the end of optimizer and physical plans, 
we will not
     // generate SQL for this join type
-    throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3115")
+    throw SparkUnsupportedOperationException()
   }
 }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala
index 8199b358dc1f..c98a2a92a3ab 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala
@@ -634,9 +634,7 @@ trait ShuffleSpec {
    *  - [[isCompatibleWith]] returns false on the side where the `clustering` 
is from.
    */
   def createPartitioning(clustering: Seq[Expression]): Partitioning =
-    throw new SparkUnsupportedOperationException(
-      errorClass = "_LEGACY_ERROR_TEMP_3112",
-      messageParameters = Map("class" -> getClass.getCanonicalName))
+    throw SparkUnsupportedOperationException()
 }
 
 case object SinglePartitionShuffleSpec extends ShuffleSpec {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala
index 8f10ba079dd9..bf64399c5659 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala
@@ -451,7 +451,7 @@ object ResolveDefaultColumns extends QueryErrorsBase with 
ResolveDefaultColumnsU
     override def initialize(name: String, options: CaseInsensitiveStringMap): 
Unit = {}
     override def name(): String = CatalogManager.SESSION_CATALOG_NAME
     override def listFunctions(namespace: Array[String]): Array[Identifier] = {
-      throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3111")
+      throw SparkUnsupportedOperationException()
     }
     override def loadFunction(ident: Identifier): UnboundFunction = {
       
V1Function(v1Catalog.lookupPersistentFunction(ident.asFunctionIdentifier))
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ShuffleSpecSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ShuffleSpecSuite.scala
index 6b069d1c9736..95208553a3a5 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ShuffleSpecSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ShuffleSpecSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.catalyst
 
-import org.apache.spark.SparkFunSuite
+import org.apache.spark.{SparkFunSuite, SparkUnsupportedOperationException}
 import org.apache.spark.sql.catalyst.dsl.expressions._
 import org.apache.spark.sql.catalyst.plans.SQLHelper
 import org.apache.spark.sql.catalyst.plans.physical._
@@ -470,8 +470,13 @@ class ShuffleSpecSuite extends SparkFunSuite with 
SQLHelper {
 
     // unsupported cases
 
-    val msg = intercept[Exception](RangeShuffleSpec(10, distribution)
-      .createPartitioning(distribution.clustering))
-    assert(msg.getMessage.contains("Operation unsupported"))
+    checkError(
+      exception = intercept[SparkUnsupportedOperationException] {
+        RangeShuffleSpec(10, 
distribution).createPartitioning(distribution.clustering)
+      },
+      errorClass = "UNSUPPORTED_CALL",
+      parameters = Map(
+        "methodName" -> "createPartitioning$",
+        "className" -> 
"org.apache.spark.sql.catalyst.plans.physical.ShuffleSpec"))
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
index 30a5bf709066..0ed58626b099 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
@@ -17,9 +17,10 @@
 
 package org.apache.spark.sql.errors
 
-import org.apache.spark.SPARK_DOC_ROOT
+import org.apache.spark.{SPARK_DOC_ROOT, SparkUnsupportedOperationException}
 import org.apache.spark.sql.{AnalysisException, ClassData, 
IntegratedUDFTestUtils, QueryTest, Row}
 import org.apache.spark.sql.api.java.{UDF1, UDF2, UDF23Test}
+import org.apache.spark.sql.catalyst.expressions.UnsafeRow
 import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
 import org.apache.spark.sql.expressions.SparkUserDefinedFunction
@@ -918,6 +919,17 @@ class QueryCompilationErrorsSuite
       )
     }
   }
+
+  test("UNSUPPORTED_CALL: call the unsupported method update()") {
+    checkError(
+      exception = intercept[SparkUnsupportedOperationException] {
+        new UnsafeRow(1).update(0, 1)
+      },
+      errorClass = "UNSUPPORTED_CALL",
+      parameters = Map(
+        "methodName" -> "update",
+        "className" -> "org.apache.spark.sql.catalyst.expressions.UnsafeRow"))
+  }
 }
 
 class MyCastToString extends SparkUserDefinedFunction(


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to