This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f9bc04e4a05 [SPARK-41022][SQL][TESTS] Test the error class: 
DEFAULT_DATABASE_NOT_EXISTS, INDEX_ALREADY_EXISTS, INDEX_NOT_FOUND, 
ROUTINE_NOT_FOUND
f9bc04e4a05 is described below

commit f9bc04e4a05624ea54da1cd843359007725d4b82
Author: panbingkun <pbk1...@gmail.com>
AuthorDate: Sun Nov 6 21:06:48 2022 +0300

    [SPARK-41022][SQL][TESTS] Test the error class: 
DEFAULT_DATABASE_NOT_EXISTS, INDEX_ALREADY_EXISTS, INDEX_NOT_FOUND, 
ROUTINE_NOT_FOUND
    
    ### What changes were proposed in this pull request?
    This PR aims to test the error classes, include:
    1. DEFAULT_DATABASE_NOT_EXISTS
    2. INDEX_ALREADY_EXISTS
    3. INDEX_NOT_FOUND
    4. ROUTINE_NOT_FOUND
    
    ### Why are the changes needed?
    The changes improve the error framework.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    1. Add new UT.
    2. Update existed UT.
    3. Pass GA.
    
    Closes #38522 from panbingkun/index_test.
    
    Authored-by: panbingkun <pbk1...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../sql/catalyst/catalog/SessionCatalogSuite.scala | 28 +++++++++++++++++-----
 .../spark/sql/internal/SharedStateSuite.scala      | 20 ++++++++++++++--
 .../org/apache/spark/sql/jdbc/JDBCV2Suite.scala    | 20 +++++++++++++++-
 3 files changed, 59 insertions(+), 9 deletions(-)

diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
index d846162b784..f86d12474d6 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
@@ -1578,13 +1578,29 @@ abstract class SessionCatalogSuite extends AnalysisTest 
with Eventually {
       val arguments = Seq(Literal(1), Literal(2), Literal(3))
       assert(catalog.lookupFunction(FunctionIdentifier("func1"), arguments) 
=== Literal(1))
       catalog.dropTempFunction("func1", ignoreIfNotExists = false)
-      intercept[NoSuchFunctionException] {
-        catalog.lookupFunction(FunctionIdentifier("func1"), arguments)
-      }
-      intercept[NoSuchTempFunctionException] {
-        catalog.dropTempFunction("func1", ignoreIfNotExists = false)
-      }
+      checkError(
+        exception = intercept[NoSuchFunctionException] {
+          catalog.lookupFunction(FunctionIdentifier("func1"), arguments)
+        },
+        errorClass = "ROUTINE_NOT_FOUND",
+        parameters = Map("routineName" -> "`default`.`func1`")
+      )
+      checkError(
+        exception = intercept[NoSuchTempFunctionException] {
+          catalog.dropTempFunction("func1", ignoreIfNotExists = false)
+        },
+        errorClass = "ROUTINE_NOT_FOUND",
+        parameters = Map("routineName" -> "`func1`")
+      )
       catalog.dropTempFunction("func1", ignoreIfNotExists = true)
+
+      checkError(
+        exception = intercept[NoSuchTempFunctionException] {
+          catalog.dropTempFunction("func2", ignoreIfNotExists = false)
+        },
+        errorClass = "ROUTINE_NOT_FOUND",
+        parameters = Map("routineName" -> "`func2`")
+      )
     }
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/internal/SharedStateSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/internal/SharedStateSuite.scala
index 81bf1534242..d3154d0125a 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/internal/SharedStateSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/internal/SharedStateSuite.scala
@@ -22,10 +22,10 @@ import java.net.URL
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.fs.FsUrlStreamHandlerFactory
 
-import org.apache.spark.SparkConf
+import org.apache.spark.{SparkConf, SparkException}
+import org.apache.spark.sql.catalyst.catalog.SessionCatalog
 import org.apache.spark.sql.test.SharedSparkSession
 
-
 /**
  * Tests for [[org.apache.spark.sql.internal.SharedState]].
  */
@@ -52,4 +52,20 @@ class SharedStateSuite extends SharedSparkSession {
     assert(conf.isInstanceOf[Configuration])
     assert(conf.asInstanceOf[Configuration].get("fs.defaultFS") == "file:///")
   }
+
+  test("Default database does not exist") {
+    
SQLConf.get.setConfString("spark.sql.catalog.spark_catalog.defaultDatabase",
+      "default_database_not_exists")
+
+    checkError(
+      exception = intercept[SparkException] {
+        spark.sharedState.externalCatalog
+      },
+      errorClass = "DEFAULT_DATABASE_NOT_EXISTS",
+      parameters = Map("defaultDatabase" -> "default_database_not_exists")
+    )
+
+    
SQLConf.get.setConfString("spark.sql.catalog.spark_catalog.defaultDatabase",
+      SessionCatalog.DEFAULT_DATABASE)
+  }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
index f47efae88c8..46281ee6644 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
@@ -25,7 +25,7 @@ import scala.util.control.NonFatal
 import org.apache.spark.{SparkConf, SparkException}
 import org.apache.spark.sql.{AnalysisException, DataFrame, ExplainSuiteHelper, 
QueryTest, Row}
 import org.apache.spark.sql.catalyst.InternalRow
-import 
org.apache.spark.sql.catalyst.analysis.CannotReplaceMissingTableException
+import 
org.apache.spark.sql.catalyst.analysis.{CannotReplaceMissingTableException, 
IndexAlreadyExistsException, NoSuchIndexException}
 import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, Filter, 
GlobalLimit, LocalLimit, Offset, Sort}
 import org.apache.spark.sql.connector.{IntegralAverage, StrLen}
 import org.apache.spark.sql.connector.catalog.{Catalogs, Identifier, 
TableCatalog}
@@ -2628,6 +2628,15 @@ class JDBCV2Suite extends QueryTest with 
SharedSparkSession with ExplainSuiteHel
     assert(indexes1.isEmpty)
 
     sql(s"CREATE INDEX people_index ON TABLE h2.test.people (id)")
+    checkError(
+      exception = intercept[IndexAlreadyExistsException] {
+        sql(s"CREATE INDEX people_index ON TABLE h2.test.people (id)")
+      },
+      errorClass = "INDEX_ALREADY_EXISTS",
+      parameters = Map(
+        "message" -> "Failed to create index people_index in test.people"
+      )
+    )
     assert(jdbcTable.indexExists("people_index"))
     val indexes2 = jdbcTable.listIndexes()
     assert(!indexes2.isEmpty)
@@ -2636,6 +2645,15 @@ class JDBCV2Suite extends QueryTest with 
SharedSparkSession with ExplainSuiteHel
     assert(tableIndex.indexName() == "people_index")
 
     sql(s"DROP INDEX people_index ON TABLE h2.test.people")
+    checkError(
+      exception = intercept[NoSuchIndexException] {
+        sql(s"DROP INDEX people_index ON TABLE h2.test.people")
+      },
+      errorClass = "INDEX_NOT_FOUND",
+      parameters = Map(
+        "message" -> "Failed to drop index people_index in test.people"
+      )
+    )
     assert(jdbcTable.indexExists("people_index") == false)
     val indexes3 = jdbcTable.listIndexes()
     assert(indexes3.isEmpty)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to