[spark] branch master updated: [SPARK-36895][SQL][FOLLOWUP] Use property to specify index type

2021-11-08 Thread sarutak
This is an automated email from the ASF dual-hosted git repository.

sarutak pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
 new d8a4a8c  [SPARK-36895][SQL][FOLLOWUP] Use property to specify index 
type
d8a4a8c is described below

commit d8a4a8c629db6ae8081477e58fdbf20983b48a39
Author: Huaxin Gao 
AuthorDate: Tue Nov 9 04:21:29 2021 +0900

[SPARK-36895][SQL][FOLLOWUP] Use property to specify index type

### What changes were proposed in this pull request?
use property to specify index type

### Why are the changes needed?
fix scala doc in https://github.com/apache/spark/pull/34486 and resubmit

### Does this PR introduce _any_ user-facing change?
Yes
```
  void createIndex(String indexName,
  String indexType,
  NamedReference[] columns,
  Map> columnsProperties,
  Map properties)
```
changed to
```
createIndex(String indexName,
  NamedReference[] columns,
  Map> columnsProperties,
  Map properties
```

### How was this patch tested?
new test

Closes #34523 from huaxingao/newDelete.

Authored-by: Huaxin Gao 
Signed-off-by: Kousuke Saruta 
---
 .../spark/sql/jdbc/v2/MySQLIntegrationSuite.scala  | 67 --
 .../org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala  | 82 +-
 .../sql/connector/catalog/index/SupportsIndex.java |  8 ++-
 .../sql/execution/datasources/jdbc/JdbcUtils.scala |  3 +-
 .../execution/datasources/v2/CreateIndexExec.scala |  9 ++-
 .../execution/datasources/v2/jdbc/JDBCTable.scala  |  3 +-
 .../org/apache/spark/sql/jdbc/JdbcDialects.scala   |  2 -
 .../org/apache/spark/sql/jdbc/MySQLDialect.scala   | 27 ---
 8 files changed, 45 insertions(+), 156 deletions(-)

diff --git 
a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala
 
b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala
index d77dcb4..592f7d6 100644
--- 
a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala
+++ 
b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala
@@ -18,16 +18,11 @@
 package org.apache.spark.sql.jdbc.v2
 
 import java.sql.{Connection, SQLFeatureNotSupportedException}
-import java.util
 
 import org.scalatest.time.SpanSugar._
 
 import org.apache.spark.SparkConf
 import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.catalyst.analysis.{IndexAlreadyExistsException, 
NoSuchIndexException}
-import org.apache.spark.sql.connector.catalog.{Catalogs, Identifier, 
TableCatalog}
-import org.apache.spark.sql.connector.catalog.index.SupportsIndex
-import org.apache.spark.sql.connector.expressions.{FieldReference, 
NamedReference}
 import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
 import org.apache.spark.sql.jdbc.{DatabaseOnDocker, DockerJDBCIntegrationSuite}
 import org.apache.spark.sql.types._
@@ -122,66 +117,4 @@ class MySQLIntegrationSuite extends 
DockerJDBCIntegrationSuite with V2JDBCTest {
   }
 
   override def supportsIndex: Boolean = true
-
-  override def testIndexProperties(jdbcTable: SupportsIndex): Unit = {
-val properties = new util.HashMap[String, String]();
-properties.put("KEY_BLOCK_SIZE", "10")
-properties.put("COMMENT", "'this is a comment'")
-// MySQL doesn't allow property set on individual column, so use empty 
Array for
-// column properties
-jdbcTable.createIndex("i1", "BTREE", Array(FieldReference("col1")),
-  new util.HashMap[NamedReference, util.Map[String, String]](), properties)
-
-var index = jdbcTable.listIndexes()
-// The index property size is actually 1. Even though the index is created
-// with properties "KEY_BLOCK_SIZE", "10" and "COMMENT", "'this is a 
comment'", when
-// retrieving index using `SHOW INDEXES`, MySQL only returns `COMMENT`.
-assert(index(0).properties.size == 1)
-assert(index(0).properties.get("COMMENT").equals("this is a comment"))
-  }
-
-  override def testIndexUsingSQL(tbl: String): Unit = {
-val loaded = Catalogs.load("mysql", conf)
-val jdbcTable = loaded.asInstanceOf[TableCatalog]
-  .loadTable(Identifier.of(Array.empty[String], "new_table"))
-  .asInstanceOf[SupportsIndex]
-assert(jdbcTable.indexExists("i1") == false)
-assert(jdbcTable.indexExists("i2") == false)
-
-val indexType = "DUMMY"
-var m = intercept[UnsupportedOperationException] {
-  sql(s"CREATE index i1 ON $catalogName.new_table USING DUMMY (col1)")
-}.getMessage
-assert(m.contains(s"Index Type $indexType is not supported." +
-  s" The supported Index Types are: BTREE and HASH"))
-
-sql(s"CREATE index i1 ON $catalogName.new_table USING 

[spark] branch master updated: [SPARK-36895][SQL][FOLLOWUP] Use property to specify index type

2021-11-08 Thread wenchen
This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
 new 4011dd6  [SPARK-36895][SQL][FOLLOWUP] Use property to specify index 
type
4011dd6 is described below

commit 4011dd6995c61737ca67288224afb548eeecb3a0
Author: Huaxin Gao 
AuthorDate: Mon Nov 8 20:58:38 2021 +0800

[SPARK-36895][SQL][FOLLOWUP] Use property to specify index type

### What changes were proposed in this pull request?
use property to specify index type

### Why are the changes needed?
to address this comment 
https://github.com/apache/spark/pull/34148#discussion_r731500964

### Does this PR introduce _any_ user-facing change?
Yes
```
  void createIndex(String indexName,
  String indexType,
  NamedReference[] columns,
  Map> columnsProperties,
  Map properties)
```
changed to
```
createIndex(String indexName,
  NamedReference[] columns,
  Map> columnsProperties,
  Map properties
```

### How was this patch tested?
new test

Closes #34486 from huaxingao/deleteIndexType.

Lead-authored-by: Huaxin Gao 
Co-authored-by: Wenchen Fan 
Co-authored-by: Huaxin Gao 
Signed-off-by: Wenchen Fan 
---
 .../spark/sql/jdbc/v2/MySQLIntegrationSuite.scala  | 67 --
 .../org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala  | 82 +-
 .../sql/connector/catalog/index/SupportsIndex.java |  8 ++-
 .../sql/execution/datasources/jdbc/JdbcUtils.scala |  3 +-
 .../execution/datasources/v2/CreateIndexExec.scala |  9 ++-
 .../execution/datasources/v2/jdbc/JDBCTable.scala  |  3 +-
 .../org/apache/spark/sql/jdbc/JdbcDialects.scala   |  1 -
 .../org/apache/spark/sql/jdbc/MySQLDialect.scala   | 27 ---
 8 files changed, 45 insertions(+), 155 deletions(-)

diff --git 
a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala
 
b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala
index d77dcb4..592f7d6 100644
--- 
a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala
+++ 
b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/MySQLIntegrationSuite.scala
@@ -18,16 +18,11 @@
 package org.apache.spark.sql.jdbc.v2
 
 import java.sql.{Connection, SQLFeatureNotSupportedException}
-import java.util
 
 import org.scalatest.time.SpanSugar._
 
 import org.apache.spark.SparkConf
 import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.catalyst.analysis.{IndexAlreadyExistsException, 
NoSuchIndexException}
-import org.apache.spark.sql.connector.catalog.{Catalogs, Identifier, 
TableCatalog}
-import org.apache.spark.sql.connector.catalog.index.SupportsIndex
-import org.apache.spark.sql.connector.expressions.{FieldReference, 
NamedReference}
 import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
 import org.apache.spark.sql.jdbc.{DatabaseOnDocker, DockerJDBCIntegrationSuite}
 import org.apache.spark.sql.types._
@@ -122,66 +117,4 @@ class MySQLIntegrationSuite extends 
DockerJDBCIntegrationSuite with V2JDBCTest {
   }
 
   override def supportsIndex: Boolean = true
-
-  override def testIndexProperties(jdbcTable: SupportsIndex): Unit = {
-val properties = new util.HashMap[String, String]();
-properties.put("KEY_BLOCK_SIZE", "10")
-properties.put("COMMENT", "'this is a comment'")
-// MySQL doesn't allow property set on individual column, so use empty 
Array for
-// column properties
-jdbcTable.createIndex("i1", "BTREE", Array(FieldReference("col1")),
-  new util.HashMap[NamedReference, util.Map[String, String]](), properties)
-
-var index = jdbcTable.listIndexes()
-// The index property size is actually 1. Even though the index is created
-// with properties "KEY_BLOCK_SIZE", "10" and "COMMENT", "'this is a 
comment'", when
-// retrieving index using `SHOW INDEXES`, MySQL only returns `COMMENT`.
-assert(index(0).properties.size == 1)
-assert(index(0).properties.get("COMMENT").equals("this is a comment"))
-  }
-
-  override def testIndexUsingSQL(tbl: String): Unit = {
-val loaded = Catalogs.load("mysql", conf)
-val jdbcTable = loaded.asInstanceOf[TableCatalog]
-  .loadTable(Identifier.of(Array.empty[String], "new_table"))
-  .asInstanceOf[SupportsIndex]
-assert(jdbcTable.indexExists("i1") == false)
-assert(jdbcTable.indexExists("i2") == false)
-
-val indexType = "DUMMY"
-var m = intercept[UnsupportedOperationException] {
-  sql(s"CREATE index i1 ON $catalogName.new_table USING DUMMY (col1)")
-}.getMessage
-assert(m.contains(s"Index Type $indexType is not supported." +
-  s" The supported Index