Repository: spark Updated Branches: refs/heads/master c890c359b -> d1c193a2f
[SPARK-12855][MINOR][SQL][DOC][TEST] remove spark.sql.dialect from doc and test ## What changes were proposed in this pull request? Since developer API of plug-able parser has been removed in #10801 , docs should be updated accordingly. ## How was this patch tested? This patch will not affect the real code path. Author: Daoyuan Wang <daoyuan.w...@intel.com> Closes #11758 from adrian-wang/spark12855. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/d1c193a2 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/d1c193a2 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/d1c193a2 Branch: refs/heads/master Commit: d1c193a2f1a5e2b98f5df1b86d7a7ec0ced13668 Parents: c890c35 Author: Daoyuan Wang <daoyuan.w...@intel.com> Authored: Wed Mar 16 22:52:10 2016 -0700 Committer: Reynold Xin <r...@databricks.com> Committed: Wed Mar 16 22:52:10 2016 -0700 ---------------------------------------------------------------------- docs/sql-programming-guide.md | 7 ------- sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala | 2 +- .../org/apache/spark/sql/hive/execution/HiveQuerySuite.scala | 6 ------ 3 files changed, 1 insertion(+), 14 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/d1c193a2/docs/sql-programming-guide.md ---------------------------------------------------------------------- diff --git a/docs/sql-programming-guide.md b/docs/sql-programming-guide.md index 3138fd5..2fdc97f 100644 --- a/docs/sql-programming-guide.md +++ b/docs/sql-programming-guide.md @@ -122,13 +122,6 @@ Spark build. If these dependencies are not a problem for your application then u is recommended for the 1.3 release of Spark. Future releases will focus on bringing `SQLContext` up to feature parity with a `HiveContext`. -The specific variant of SQL that is used to parse queries can also be selected using the -`spark.sql.dialect` option. This parameter can be changed using either the `setConf` method on -a `SQLContext` or by using a `SET key=value` command in SQL. For a `SQLContext`, the only dialect -available is "sql" which uses a simple SQL parser provided by Spark SQL. In a `HiveContext`, the -default is "hiveql", though "sql" is also available. Since the HiveQL parser is much more complete, -this is recommended for most use cases. - ## Creating DataFrames http://git-wip-us.apache.org/repos/asf/spark/blob/d1c193a2/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala index e4d9308..0f0342c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala @@ -80,7 +80,7 @@ class SQLContext private[sql]( def this(sparkContext: JavaSparkContext) = this(sparkContext.sc) // If spark.sql.allowMultipleContexts is true, we will throw an exception if a user - // wants to create a new root SQLContext (a SLQContext that is not created by newSession). + // wants to create a new root SQLContext (a SQLContext that is not created by newSession). private val allowMultipleContexts = sparkContext.conf.getBoolean( SQLConf.ALLOW_MULTIPLE_CONTEXTS.key, http://git-wip-us.apache.org/repos/asf/spark/blob/d1c193a2/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala ---------------------------------------------------------------------- diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala index d905f0c..ab4047d 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala @@ -270,12 +270,6 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter { "SELECT 11 % 10, IF((101.1 % 100.0) BETWEEN 1.01 AND 1.11, \"true\", \"false\"), " + "(101 / 2) % 10 FROM src LIMIT 1") - test("Query expressed in SQL") { - setConf("spark.sql.dialect", "sql") - assert(sql("SELECT 1").collect() === Array(Row(1))) - setConf("spark.sql.dialect", "hiveql") - } - test("Query expressed in HiveQL") { sql("FROM src SELECT key").collect() } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org