Repository: spark
Updated Branches:
  refs/heads/master 27e815c31 -> 43f4fd6f9


[SPARK-16867][SQL] createTable and alterTable in ExternalCatalog should not 
take db

## What changes were proposed in this pull request?

These 2 methods take `CatalogTable` as parameter, which already have the 
database information.

## How was this patch tested?

existing test

Author: Wenchen Fan <wenc...@databricks.com>

Closes #14476 from cloud-fan/minor5.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/43f4fd6f
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/43f4fd6f
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/43f4fd6f

Branch: refs/heads/master
Commit: 43f4fd6f9bfff749af17e3c65b53a33f5ecb0922
Parents: 27e815c
Author: Wenchen Fan <wenc...@databricks.com>
Authored: Thu Aug 4 16:48:30 2016 +0800
Committer: Cheng Lian <l...@databricks.com>
Committed: Thu Aug 4 16:48:30 2016 +0800

----------------------------------------------------------------------
 .../sql/catalyst/catalog/ExternalCatalog.scala  |  9 +++++----
 .../sql/catalyst/catalog/InMemoryCatalog.scala  |  7 +++++--
 .../sql/catalyst/catalog/SessionCatalog.scala   |  4 ++--
 .../catalyst/catalog/ExternalCatalogSuite.scala | 20 ++++++++++----------
 .../spark/sql/hive/HiveExternalCatalog.scala    | 17 +++++------------
 .../sql/hive/MetastoreDataSourcesSuite.scala    |  2 +-
 6 files changed, 28 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/43f4fd6f/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
index 35fc6dd..27e1810 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
@@ -69,20 +69,21 @@ abstract class ExternalCatalog {
   // Tables
   // --------------------------------------------------------------------------
 
-  def createTable(db: String, tableDefinition: CatalogTable, ignoreIfExists: 
Boolean): Unit
+  def createTable(tableDefinition: CatalogTable, ignoreIfExists: Boolean): Unit
 
   def dropTable(db: String, table: String, ignoreIfNotExists: Boolean, purge: 
Boolean): Unit
 
   def renameTable(db: String, oldName: String, newName: String): Unit
 
   /**
-   * Alter a table whose name that matches the one specified in 
`tableDefinition`,
-   * assuming the table exists.
+   * Alter a table whose database and name match the ones specified in 
`tableDefinition`, assuming
+   * the table exists. Note that, even though we can specify database in 
`tableDefinition`, it's
+   * used to identify the table, not to alter the table's database, which is 
not allowed.
    *
    * Note: If the underlying implementation does not support altering a 
certain field,
    * this becomes a no-op.
    */
-  def alterTable(db: String, tableDefinition: CatalogTable): Unit
+  def alterTable(tableDefinition: CatalogTable): Unit
 
   def getTable(db: String, table: String): CatalogTable
 

http://git-wip-us.apache.org/repos/asf/spark/blob/43f4fd6f/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
index 67a90c8..9ebf7de 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
@@ -192,9 +192,10 @@ class InMemoryCatalog(hadoopConfig: Configuration = new 
Configuration) extends E
   // --------------------------------------------------------------------------
 
   override def createTable(
-      db: String,
       tableDefinition: CatalogTable,
       ignoreIfExists: Boolean): Unit = synchronized {
+    assert(tableDefinition.identifier.database.isDefined)
+    val db = tableDefinition.identifier.database.get
     requireDbExists(db)
     val table = tableDefinition.identifier.table
     if (tableExists(db, table)) {
@@ -266,7 +267,9 @@ class InMemoryCatalog(hadoopConfig: Configuration = new 
Configuration) extends E
     catalog(db).tables.remove(oldName)
   }
 
-  override def alterTable(db: String, tableDefinition: CatalogTable): Unit = 
synchronized {
+  override def alterTable(tableDefinition: CatalogTable): Unit = synchronized {
+    assert(tableDefinition.identifier.database.isDefined)
+    val db = tableDefinition.identifier.database.get
     requireTableExists(db, tableDefinition.identifier.table)
     catalog(db).tables(tableDefinition.identifier.table).table = 
tableDefinition
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/43f4fd6f/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 980efda..fabab32 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -223,7 +223,7 @@ class SessionCatalog(
     val table = formatTableName(tableDefinition.identifier.table)
     val newTableDefinition = tableDefinition.copy(identifier = 
TableIdentifier(table, Some(db)))
     requireDbExists(db)
-    externalCatalog.createTable(db, newTableDefinition, ignoreIfExists)
+    externalCatalog.createTable(newTableDefinition, ignoreIfExists)
   }
 
   /**
@@ -242,7 +242,7 @@ class SessionCatalog(
     val newTableDefinition = tableDefinition.copy(identifier = tableIdentifier)
     requireDbExists(db)
     requireTableExists(tableIdentifier)
-    externalCatalog.alterTable(db, newTableDefinition)
+    externalCatalog.alterTable(newTableDefinition)
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/spark/blob/43f4fd6f/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
index 963a225..201d39a 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
@@ -157,7 +157,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite 
with BeforeAndAfterEac
     val catalog = newBasicCatalog()
     val table =
       newTable("external_table1", "db2").copy(tableType = 
CatalogTableType.EXTERNAL)
-    catalog.createTable("db2", table, ignoreIfExists = false)
+    catalog.createTable(table, ignoreIfExists = false)
     val actual = catalog.getTable("db2", "external_table1")
     assert(actual.tableType === CatalogTableType.EXTERNAL)
   }
@@ -212,7 +212,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite 
with BeforeAndAfterEac
   test("alter table") {
     val catalog = newBasicCatalog()
     val tbl1 = catalog.getTable("db2", "tbl1")
-    catalog.alterTable("db2", tbl1.copy(properties = Map("toh" -> "frem")))
+    catalog.alterTable(tbl1.copy(properties = Map("toh" -> "frem")))
     val newTbl1 = catalog.getTable("db2", "tbl1")
     assert(!tbl1.properties.contains("toh"))
     assert(newTbl1.properties.size == tbl1.properties.size + 1)
@@ -222,10 +222,10 @@ abstract class ExternalCatalogSuite extends SparkFunSuite 
with BeforeAndAfterEac
   test("alter table when database/table does not exist") {
     val catalog = newBasicCatalog()
     intercept[AnalysisException] {
-      catalog.alterTable("unknown_db", newTable("tbl1", "unknown_db"))
+      catalog.alterTable(newTable("tbl1", "unknown_db"))
     }
     intercept[AnalysisException] {
-      catalog.alterTable("db2", newTable("unknown_table", "db2"))
+      catalog.alterTable(newTable("unknown_table", "db2"))
     }
   }
 
@@ -266,7 +266,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite 
with BeforeAndAfterEac
   test("basic create and list partitions") {
     val catalog = newEmptyCatalog()
     catalog.createDatabase(newDb("mydb"), ignoreIfExists = false)
-    catalog.createTable("mydb", newTable("tbl", "mydb"), ignoreIfExists = 
false)
+    catalog.createTable(newTable("tbl", "mydb"), ignoreIfExists = false)
     catalog.createPartitions("mydb", "tbl", Seq(part1, part2), ignoreIfExists 
= false)
     assert(catalogPartitionsEqual(catalog, "mydb", "tbl", Seq(part1, part2)))
   }
@@ -555,7 +555,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite 
with BeforeAndAfterEac
       schema = new StructType().add("a", "int").add("b", "string")
     )
 
-    catalog.createTable("db1", table, ignoreIfExists = false)
+    catalog.createTable(table, ignoreIfExists = false)
     assert(exists(db.locationUri, "my_table"))
 
     catalog.renameTable("db1", "my_table", "your_table")
@@ -573,7 +573,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite 
with BeforeAndAfterEac
         None, None, None, false, Map.empty),
       schema = new StructType().add("a", "int").add("b", "string")
     )
-    catalog.createTable("db1", externalTable, ignoreIfExists = false)
+    catalog.createTable(externalTable, ignoreIfExists = false)
     assert(!exists(db.locationUri, "external_table"))
   }
 
@@ -591,7 +591,7 @@ abstract class ExternalCatalogSuite extends SparkFunSuite 
with BeforeAndAfterEac
         .add("b", "string"),
       partitionColumnNames = Seq("a", "b")
     )
-    catalog.createTable("db1", table, ignoreIfExists = false)
+    catalog.createTable(table, ignoreIfExists = false)
 
     catalog.createPartitions("db1", "tbl", Seq(part1, part2), ignoreIfExists = 
false)
     assert(exists(databaseDir, "tbl", "a=1", "b=2"))
@@ -665,8 +665,8 @@ abstract class CatalogTestUtils {
     catalog.createDatabase(newDb("default"), ignoreIfExists = true)
     catalog.createDatabase(newDb("db1"), ignoreIfExists = false)
     catalog.createDatabase(newDb("db2"), ignoreIfExists = false)
-    catalog.createTable("db2", newTable("tbl1", "db2"), ignoreIfExists = false)
-    catalog.createTable("db2", newTable("tbl2", "db2"), ignoreIfExists = false)
+    catalog.createTable(newTable("tbl1", "db2"), ignoreIfExists = false)
+    catalog.createTable(newTable("tbl2", "db2"), ignoreIfExists = false)
     catalog.createPartitions("db2", "tbl2", Seq(part1, part2), ignoreIfExists 
= false)
     catalog.createFunction("db2", newFunc("func1", Some("db2")))
     catalog

http://git-wip-us.apache.org/repos/asf/spark/blob/43f4fd6f/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
index cf2b92f..8302e3e 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
@@ -77,14 +77,6 @@ private[spark] class HiveExternalCatalog(client: HiveClient, 
hadoopConf: Configu
     }
   }
 
-  private def requireDbMatches(db: String, table: CatalogTable): Unit = {
-    if (table.identifier.database != Some(db)) {
-      throw new AnalysisException(
-        s"Provided database '$db' does not match the one specified in the " +
-        s"table definition (${table.identifier.database.getOrElse("n/a")})")
-    }
-  }
-
   private def requireTableExists(db: String, table: String): Unit = {
     withClient { getTable(db, table) }
   }
@@ -147,11 +139,11 @@ private[spark] class HiveExternalCatalog(client: 
HiveClient, hadoopConf: Configu
   // --------------------------------------------------------------------------
 
   override def createTable(
-      db: String,
       tableDefinition: CatalogTable,
       ignoreIfExists: Boolean): Unit = withClient {
+    assert(tableDefinition.identifier.database.isDefined)
+    val db = tableDefinition.identifier.database.get
     requireDbExists(db)
-    requireDbMatches(db, tableDefinition)
 
     if (
     // If this is an external data source table...
@@ -211,8 +203,9 @@ private[spark] class HiveExternalCatalog(client: 
HiveClient, hadoopConf: Configu
    * Note: As of now, this only supports altering table properties, serde 
properties,
    * and num buckets!
    */
-  override def alterTable(db: String, tableDefinition: CatalogTable): Unit = 
withClient {
-    requireDbMatches(db, tableDefinition)
+  override def alterTable(tableDefinition: CatalogTable): Unit = withClient {
+    assert(tableDefinition.identifier.database.isDefined)
+    val db = tableDefinition.identifier.database.get
     requireTableExists(db, tableDefinition.identifier.table)
     client.alterTable(tableDefinition)
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/43f4fd6f/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index c87bda9..c36b027 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -741,7 +741,7 @@ class MetastoreDataSourcesSuite extends QueryTest with 
SQLTestUtils with TestHiv
           DATASOURCE_SCHEMA -> schema.json,
           "EXTERNAL" -> "FALSE"))
 
-      sharedState.externalCatalog.createTable("default", hiveTable, 
ignoreIfExists = false)
+      sharedState.externalCatalog.createTable(hiveTable, ignoreIfExists = 
false)
 
       sessionState.refreshTable(tableName)
       val actualSchema = table(tableName).schema


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to