Repository: spark
Updated Branches:
  refs/heads/master fd4ba3f62 -> 261c55dd8


[SPARK-17250][SQL] Remove HiveClient and setCurrentDatabase from 
HiveSessionCatalog

### What changes were proposed in this pull request?
This is the first step to remove `HiveClient` from `HiveSessionState`. In the 
metastore interaction, we always use the fully qualified table name when 
accessing/operating a table. That means, we always specify the database. Thus, 
it is not necessary to use `HiveClient` to change the active database in Hive 
metastore.

In `HiveSessionCatalog `, `setCurrentDatabase` is the only function that uses 
`HiveClient`. Thus, we can remove it after removing `setCurrentDatabase`

### How was this patch tested?
The existing test cases.

Author: gatorsmile <gatorsm...@gmail.com>

Closes #14821 from gatorsmile/setCurrentDB.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/261c55dd
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/261c55dd
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/261c55dd

Branch: refs/heads/master
Commit: 261c55dd8808502fb7f3384eb537d26a4a8123d7
Parents: fd4ba3f
Author: gatorsmile <gatorsm...@gmail.com>
Authored: Fri Aug 26 11:19:03 2016 -0700
Committer: Yin Huai <yh...@databricks.com>
Committed: Fri Aug 26 11:19:03 2016 -0700

----------------------------------------------------------------------
 .../scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala  | 7 -------
 .../scala/org/apache/spark/sql/hive/HiveSessionState.scala    | 1 -
 2 files changed, 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/261c55dd/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
index 86d3b6d..bfa5899 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
@@ -34,7 +34,6 @@ import org.apache.spark.sql.catalyst.expressions.{Cast, 
Expression, ExpressionIn
 import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
 import org.apache.spark.sql.catalyst.rules.Rule
 import org.apache.spark.sql.hive.HiveShim.HiveFunctionWrapper
-import org.apache.spark.sql.hive.client.HiveClient
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types.{DecimalType, DoubleType}
 import org.apache.spark.util.Utils
@@ -42,7 +41,6 @@ import org.apache.spark.util.Utils
 
 private[sql] class HiveSessionCatalog(
     externalCatalog: HiveExternalCatalog,
-    client: HiveClient,
     sparkSession: SparkSession,
     functionResourceLoader: FunctionResourceLoader,
     functionRegistry: FunctionRegistry,
@@ -55,11 +53,6 @@ private[sql] class HiveSessionCatalog(
     conf,
     hadoopConf) {
 
-  override def setCurrentDatabase(db: String): Unit = {
-    super.setCurrentDatabase(db)
-    client.setCurrentDatabase(db)
-  }
-
   override def lookupRelation(name: TableIdentifier, alias: Option[String]): 
LogicalPlan = {
     val table = formatTableName(name.table)
     if (name.database.isDefined || !tempTables.contains(table)) {

http://git-wip-us.apache.org/repos/asf/spark/blob/261c55dd/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala
index f3c4135..15e1255 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala
@@ -45,7 +45,6 @@ private[hive] class HiveSessionState(sparkSession: 
SparkSession)
   override lazy val catalog = {
     new HiveSessionCatalog(
       
sparkSession.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog],
-      metadataHive,
       sparkSession,
       functionResourceLoader,
       functionRegistry,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to