Repository: spark
Updated Branches:
  refs/heads/branch-2.0 b430aa98c -> 2cb84dd23


[SPARK-15565][SQL] Add the File Scheme to the Default Value of WAREHOUSE_PATH

#### What changes were proposed in this pull request?
The default value of `spark.sql.warehouse.dir` is 
`System.getProperty("user.dir")/spark-warehouse`. Since 
`System.getProperty("user.dir")` is a local dir, we should explicitly set the 
scheme to local filesystem.

cc yhuai

#### How was this patch tested?
Added two test cases

Author: gatorsmile <gatorsm...@gmail.com>

Closes #13348 from gatorsmile/addSchemeToDefaultWarehousePath.

(cherry picked from commit c17272902c95290beca274ee6316a8a98fd7a725)
Signed-off-by: Yin Huai <yh...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2cb84dd2
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2cb84dd2
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2cb84dd2

Branch: refs/heads/branch-2.0
Commit: 2cb84dd2356e782b9e606cd126057726fcf6f228
Parents: b430aa9
Author: gatorsmile <gatorsm...@gmail.com>
Authored: Fri May 27 09:54:31 2016 -0700
Committer: Yin Huai <yh...@databricks.com>
Committed: Fri May 27 09:54:43 2016 -0700

----------------------------------------------------------------------
 .../org/apache/spark/sql/internal/SQLConf.scala |  2 +-
 .../spark/sql/execution/command/DDLSuite.scala  | 25 ++++++++++++++++++++
 .../spark/sql/internal/SQLConfSuite.scala       | 12 ++++++++++
 3 files changed, 38 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/2cb84dd2/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index 4efefda..d1db0dd 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -55,7 +55,7 @@ object SQLConf {
   val WAREHOUSE_PATH = SQLConfigBuilder("spark.sql.warehouse.dir")
     .doc("The default location for managed databases and tables.")
     .stringConf
-    .createWithDefault("${system:user.dir}/spark-warehouse")
+    .createWithDefault("file:${system:user.dir}/spark-warehouse")
 
   val OPTIMIZER_MAX_ITERATIONS = 
SQLConfigBuilder("spark.sql.optimizer.maxIterations")
     .internal()

http://git-wip-us.apache.org/repos/asf/spark/blob/2cb84dd2/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index e32521a..e975756 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -171,6 +171,31 @@ class DDLSuite extends QueryTest with SharedSQLContext 
with BeforeAndAfterEach {
     }
   }
 
+  test("Create Database using Default Warehouse Path") {
+    withSQLConf(SQLConf.WAREHOUSE_PATH.key -> "") {
+      // Will use the default location if and only if we unset the conf
+      spark.conf.unset(SQLConf.WAREHOUSE_PATH.key)
+      val catalog = spark.sessionState.catalog
+      val dbName = "db1"
+      try {
+        sql(s"CREATE DATABASE $dbName")
+        val db1 = catalog.getDatabaseMetadata(dbName)
+        val expectedLocation =
+          "file:" + appendTrailingSlash(System.getProperty("user.dir")) +
+            s"spark-warehouse/$dbName.db"
+        assert(db1 == CatalogDatabase(
+          dbName,
+          "",
+          expectedLocation,
+          Map.empty))
+        sql(s"DROP DATABASE $dbName CASCADE")
+        assert(!catalog.databaseExists(dbName))
+      } finally {
+        catalog.reset()
+      }
+    }
+  }
+
   test("Create/Drop Database - location") {
     val catalog = spark.sessionState.catalog
     val databaseNames = Seq("db1", "`database`")

http://git-wip-us.apache.org/repos/asf/spark/blob/2cb84dd2/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
index ad5365a..3d4fc75 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
@@ -207,4 +207,16 @@ class SQLConfSuite extends QueryTest with SharedSQLContext 
{
     }
   }
 
+  test("default value of WAREHOUSE_PATH") {
+    val original = spark.conf.get(SQLConf.WAREHOUSE_PATH)
+    try {
+      // to get the default value, always unset it
+      spark.conf.unset(SQLConf.WAREHOUSE_PATH.key)
+      assert(spark.sessionState.conf.warehousePath
+        === s"file:${System.getProperty("user.dir")}/spark-warehouse")
+    } finally {
+      sql(s"set ${SQLConf.WAREHOUSE_PATH}=$original")
+    }
+  }
+
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to