This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 9983bdb3b88 [SPARK-39663][SQL][TESTS] Add UT for MysqlDialect 
listIndexes method
9983bdb3b88 is described below

commit 9983bdb3b882a083cba9785392c3ba5d7a36496a
Author: panbingkun <pbk1...@gmail.com>
AuthorDate: Wed Jul 6 11:27:17 2022 -0500

    [SPARK-39663][SQL][TESTS] Add UT for MysqlDialect listIndexes method
    
    ### What changes were proposed in this pull request?
    Add complemented UT for MysqlDialect's lustIndexes method.
    
    ### Why are the changes needed?
    Add UT for existed function & improve test coverage.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Pass GA.
    
    Closes #37060 from panbingkun/SPARK-39663.
    
    Authored-by: panbingkun <pbk1...@gmail.com>
    Signed-off-by: Sean Owen <sro...@gmail.com>
---
 .../spark/sql/jdbc/v2/MySQLIntegrationSuite.scala  |  2 ++
 .../org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala  | 30 ++++++++++++++++++----
 .../org/apache/spark/sql/jdbc/MySQLDialect.scala   |  2 +-
 3 files changed, 28 insertions(+), 6 deletions(-)

diff --git 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala
 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala
index 97f521a378e..6e76b74c7d8 100644
--- 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala
+++ 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala
@@ -119,6 +119,8 @@ class MySQLIntegrationSuite extends 
DockerJDBCIntegrationV2Suite with V2JDBCTest
 
   override def supportsIndex: Boolean = true
 
+  override def supportListIndexes: Boolean = true
+
   override def indexOptions: String = "KEY_BLOCK_SIZE=10"
 
   testVarPop()
diff --git 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
index 5f0033490d5..0f85bd534c3 100644
--- 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
+++ 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
@@ -197,6 +197,8 @@ private[v2] trait V2JDBCTest extends SharedSparkSession 
with DockerIntegrationFu
 
   def supportsIndex: Boolean = false
 
+  def supportListIndexes: Boolean = false
+
   def indexOptions: String = ""
 
   test("SPARK-36895: Test INDEX Using SQL") {
@@ -219,11 +221,21 @@ private[v2] trait V2JDBCTest extends SharedSparkSession 
with DockerIntegrationFu
           s" The supported Index Types are:"))
 
         sql(s"CREATE index i1 ON $catalogName.new_table USING BTREE (col1)")
+        assert(jdbcTable.indexExists("i1"))
+        if (supportListIndexes) {
+          val indexes = jdbcTable.listIndexes()
+          assert(indexes.size == 1)
+          assert(indexes.head.indexName() == "i1")
+        }
+
         sql(s"CREATE index i2 ON $catalogName.new_table (col2, col3, col5)" +
           s" OPTIONS ($indexOptions)")
-
-        assert(jdbcTable.indexExists("i1") == true)
-        assert(jdbcTable.indexExists("i2") == true)
+        assert(jdbcTable.indexExists("i2"))
+        if (supportListIndexes) {
+          val indexes = jdbcTable.listIndexes()
+          assert(indexes.size == 2)
+          assert(indexes.map(_.indexName()).sorted === Array("i1", "i2"))
+        }
 
         // This should pass without exception
         sql(s"CREATE index IF NOT EXISTS i1 ON $catalogName.new_table (col1)")
@@ -234,10 +246,18 @@ private[v2] trait V2JDBCTest extends SharedSparkSession 
with DockerIntegrationFu
         assert(m.contains("Failed to create index i1 in new_table"))
 
         sql(s"DROP index i1 ON $catalogName.new_table")
-        sql(s"DROP index i2 ON $catalogName.new_table")
-
         assert(jdbcTable.indexExists("i1") == false)
+        if (supportListIndexes) {
+          val indexes = jdbcTable.listIndexes()
+          assert(indexes.size == 1)
+          assert(indexes.head.indexName() == "i2")
+        }
+
+        sql(s"DROP index i2 ON $catalogName.new_table")
         assert(jdbcTable.indexExists("i2") == false)
+        if (supportListIndexes) {
+          assert(jdbcTable.listIndexes().isEmpty)
+        }
 
         // This should pass without exception
         sql(s"DROP index IF EXISTS i1 ON $catalogName.new_table")
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala
index 24f9bac74f8..c4cb5369af9 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala
@@ -206,7 +206,7 @@ private case object MySQLDialect extends JdbcDialect with 
SQLConfHelper {
           val indexName = rs.getString("key_name")
           val colName = rs.getString("column_name")
           val indexType = rs.getString("index_type")
-          val indexComment = rs.getString("Index_comment")
+          val indexComment = rs.getString("index_comment")
           if (indexMap.contains(indexName)) {
             val index = indexMap.get(indexName).get
             val newIndex = new TableIndex(indexName, indexType,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to