Repository: spark
Updated Branches:
  refs/heads/master ce5e6a284 -> a752ddad7


[SPARK-11398] [SQL] unnecessary def dialectClassName in HiveContext, and 
misleading dialect conf at the start of spark-sql

1. def dialectClassName in HiveContext is unnecessary.
In HiveContext, if conf.dialect == "hiveql", getSQLDialect() will return new 
HiveQLDialect(this);
else it will use super.getSQLDialect(). Then in super.getSQLDialect(), it calls 
dialectClassName, which is overriden in HiveContext and still return 
super.dialectClassName.
So we'll never reach the code "classOf[HiveQLDialect].getCanonicalName" of def 
dialectClassName in HiveContext.

2. When we start bin/spark-sql, the default context is HiveContext, and the 
corresponding dialect is hiveql.
However, if we type "set spark.sql.dialect;", the result is "sql", which is 
inconsistent with the actual dialect and is misleading. For example, we can use 
sql like "create table" which is only allowed in hiveql, but this dialect conf 
shows it's "sql".
Although this problem will not cause any execution error, it's misleading to 
spark sql users. Therefore I think we should fix it.
In this pr, while procesing “set spark.sql.dialect” in SetCommand, I use 
"conf.dialect" instead of "getConf()" for the case of key == 
SQLConf.DIALECT.key, so that it will return the right dialect conf.

Author: Zhenhua Wang <wangzhen...@huawei.com>

Closes #9349 from wzhfy/dialect.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/a752ddad
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/a752ddad
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/a752ddad

Branch: refs/heads/master
Commit: a752ddad7fe1d0f01b51f7551ec017ff87e1eea5
Parents: ce5e6a2
Author: Zhenhua Wang <wangzhen...@huawei.com>
Authored: Wed Nov 4 17:16:00 2015 -0800
Committer: Davies Liu <davies....@gmail.com>
Committed: Wed Nov 4 17:16:00 2015 -0800

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/sql/execution/commands.scala  | 6 +++++-
 .../main/scala/org/apache/spark/sql/hive/HiveContext.scala    | 6 ------
 .../org/apache/spark/sql/hive/execution/SQLQuerySuite.scala   | 7 +++++++
 3 files changed, 12 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/a752ddad/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
index 8566076..e5f60b1 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
@@ -156,7 +156,11 @@ case class SetCommand(kv: Option[(String, 
Option[String])]) extends RunnableComm
       val runFunc = (sqlContext: SQLContext) => {
         val value =
           try {
-            sqlContext.getConf(key)
+            if (key == SQLConf.DIALECT.key) {
+              sqlContext.conf.dialect
+            } else {
+              sqlContext.getConf(key)
+            }
           } catch {
             case _: NoSuchElementException => "<undefined>"
           }

http://git-wip-us.apache.org/repos/asf/spark/blob/a752ddad/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index 83a81cf..1f51353 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -555,12 +555,6 @@ class HiveContext private[hive](
     override def caseSensitiveAnalysis: Boolean = 
getConf(SQLConf.CASE_SENSITIVE, false)
   }
 
-  protected[sql] override def dialectClassName = if (conf.dialect == "hiveql") 
{
-    classOf[HiveQLDialect].getCanonicalName
-  } else {
-    super.dialectClassName
-  }
-
   protected[sql] override def getSQLDialect(): ParserDialect = {
     if (conf.dialect == "hiveql") {
       new HiveQLDialect(this)

http://git-wip-us.apache.org/repos/asf/spark/blob/a752ddad/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index fd38064..af48d47 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -335,6 +335,13 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
     }
   }
 
+  test("SQL dialect at the start of HiveContext") {
+    val hiveContext = new HiveContext(sqlContext.sparkContext)
+    val dialectConf = "spark.sql.dialect"
+    checkAnswer(hiveContext.sql(s"set $dialectConf"), Row(dialectConf, 
"hiveql"))
+    assert(hiveContext.getSQLDialect().getClass === classOf[HiveQLDialect])
+  }
+
   test("SQL Dialect Switching") {
     assert(getSQLDialect().getClass === classOf[HiveQLDialect])
     setConf("spark.sql.dialect", classOf[MyDialect].getCanonicalName())


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to