This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new d4134a83088 [SPARK-43028][SQL] Add error class SQL_CONF_NOT_FOUND
d4134a83088 is described below

commit d4134a8308889c3b8f87c93a23f91aa070e97b21
Author: allisonwang-db <allison.w...@databricks.com>
AuthorDate: Tue Apr 11 13:43:16 2023 +0300

    [SPARK-43028][SQL] Add error class SQL_CONF_NOT_FOUND
    
    ### What changes were proposed in this pull request?
    
    This PR adds a new error class `SQL_CONF_NOT_FOUND`.
    
    ### Why are the changes needed?
    
    To make the error message more user-friendly when getting a non-existing 
SQL config. For example:
    ```
    spark.conf.get("some.conf")
    ```
    Before this PR, it will throw this error:
    ```
    java.util.NoSuchElementException: some.conf
    ```
    After this PR:
    ```
    [SQL_CONF_NOT_FOUND] The SQL config "some.conf" cannot be found. Please 
verify that the config exists.
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes. The error message will be changed.
    
    ### How was this patch tested?
    
    Added a new UT.
    
    Closes #40660 from allisonwang-db/SPARK-43028-conf-error.
    
    Authored-by: allisonwang-db <allison.w...@databricks.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 R/pkg/R/SQLContext.R                                  |  2 +-
 core/src/main/resources/error/error-classes.json      |  5 +++++
 .../main/scala/org/apache/spark/SparkException.scala  | 19 +++++++++++++++++++
 .../spark/sql/errors/QueryExecutionErrors.scala       |  6 ++++--
 .../scala/org/apache/spark/sql/internal/SQLConf.scala |  4 ++--
 .../org/apache/spark/sql/internal/SQLConfSuite.scala  |  9 ++++++++-
 6 files changed, 39 insertions(+), 6 deletions(-)

diff --git a/R/pkg/R/SQLContext.R b/R/pkg/R/SQLContext.R
index 5adebade8b7..5f500c29958 100644
--- a/R/pkg/R/SQLContext.R
+++ b/R/pkg/R/SQLContext.R
@@ -111,7 +111,7 @@ sparkR.conf <- function(key, defaultValue) {
       tryCatch(callJMethod(conf, "get", key),
               error = function(e) {
                 estr <- as.character(e)
-                if (any(grepl("java.util.NoSuchElementException", estr, fixed 
= TRUE))) {
+                if (any(grepl("SQL_CONF_NOT_FOUND", estr, fixed = TRUE))) {
                   stop("Config '", key, "' is not set")
                 } else {
                   stop("Unknown error: ", estr)
diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 54cffa498cc..ae73071a120 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -1425,6 +1425,11 @@
       "sortBy must be used together with bucketBy."
     ]
   },
+  "SQL_CONF_NOT_FOUND" : {
+    "message" : [
+      "The SQL config <sqlConf> cannot be found. Please verify that the config 
exists."
+    ]
+  },
   "STAR_GROUP_BY_POS" : {
     "message" : [
       "Star (*) is not allowed in a select list when GROUP BY an ordinal 
position is used."
diff --git a/core/src/main/scala/org/apache/spark/SparkException.scala 
b/core/src/main/scala/org/apache/spark/SparkException.scala
index 2f05b2ad6a7..4e48e9c8d41 100644
--- a/core/src/main/scala/org/apache/spark/SparkException.scala
+++ b/core/src/main/scala/org/apache/spark/SparkException.scala
@@ -293,6 +293,25 @@ private[spark] class SparkRuntimeException(
   override def getQueryContext: Array[QueryContext] = context
 }
 
+/**
+ * No such element exception thrown from Spark with an error class.
+ */
+private[spark] class SparkNoSuchElementException(
+    errorClass: String,
+    messageParameters: Map[String, String],
+    context: Array[QueryContext] = Array.empty,
+    summary: String = "")
+    extends NoSuchElementException(
+      SparkThrowableHelper.getMessage(errorClass, messageParameters, summary))
+    with SparkThrowable {
+
+  override def getMessageParameters: java.util.Map[String, String] = 
messageParameters.asJava
+
+  override def getErrorClass: String = errorClass
+
+  override def getQueryContext: Array[QueryContext] = context
+}
+
 /**
  * Security exception thrown from Spark with an error class.
  */
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 2fccca839d5..d07dcec3693 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -2139,8 +2139,10 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
       cause = null)
   }
 
-  def noSuchElementExceptionError(key: String): Throwable = {
-    new NoSuchElementException(key)
+  def sqlConfigNotFoundError(key: String): SparkNoSuchElementException = {
+    new SparkNoSuchElementException(
+      errorClass = "SQL_CONF_NOT_FOUND",
+      messageParameters = Map("sqlConf" -> toSQLConf(key)))
   }
 
   def cannotMutateReadOnlySQLConfError(): SparkUnsupportedOperationException = 
{
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index b0996dd6acc..4986dc3661c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.internal
 
-import java.util.{Locale, NoSuchElementException, Properties, TimeZone}
+import java.util.{Locale, Properties, TimeZone}
 import java.util
 import java.util.concurrent.TimeUnit
 import java.util.concurrent.atomic.AtomicReference
@@ -5072,7 +5072,7 @@ class SQLConf extends Serializable with Logging {
         // Try to use the default value
         Option(getConfigEntry(key)).map { e => 
e.stringConverter(e.readFrom(reader)) }
       }.
-      getOrElse(throw QueryExecutionErrors.noSuchElementExceptionError(key))
+      getOrElse(throw QueryExecutionErrors.sqlConfigNotFoundError(key))
   }
 
   /**
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
index 30f4fdfbbcf..0a0bee2eabd 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
@@ -22,7 +22,7 @@ import java.util.TimeZone
 import org.apache.hadoop.fs.Path
 import org.apache.logging.log4j.Level
 
-import org.apache.spark.SPARK_DOC_ROOT
+import org.apache.spark.{SPARK_DOC_ROOT, SparkNoSuchElementException}
 import org.apache.spark.sql._
 import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.MIT
@@ -493,4 +493,11 @@ class SQLConfSuite extends QueryTest with 
SharedSparkSession {
          |${nonInternalLegacyConfigs.map(_._1).mkString("\n")}
          |""".stripMargin)
   }
+
+  test("SPARK-43028: config not found error") {
+    checkError(
+      exception = 
intercept[SparkNoSuchElementException](spark.conf.get("some.conf")),
+      errorClass = "SQL_CONF_NOT_FOUND",
+      parameters = Map("sqlConf" -> "\"some.conf\""))
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to