Repository: spark
Updated Branches:
  refs/heads/master 76c4bf59f -> c4021401e


[SQL] [Minor] HiveParquetSuite was disabled by mistake, re-enable them

<!-- Reviewable:start -->
[<img src="https://reviewable.io/review_button.png"; height=40 alt="Review on 
Reviewable"/>](https://reviewable.io/reviews/apache/spark/4440)
<!-- Reviewable:end -->

Author: Cheng Lian <l...@databricks.com>

Closes #4440 from liancheng/parquet-oops and squashes the following commits:

f21ede4 [Cheng Lian] HiveParquetSuite was disabled by mistake, re-enable them.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c4021401
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c4021401
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c4021401

Branch: refs/heads/master
Commit: c4021401e326cd5a412a70425f5c75405284880e
Parents: 76c4bf5
Author: Cheng Lian <l...@databricks.com>
Authored: Fri Feb 6 15:23:42 2015 -0800
Committer: Cheng Lian <l...@databricks.com>
Committed: Fri Feb 6 15:23:42 2015 -0800

----------------------------------------------------------------------
 .../apache/spark/sql/parquet/ParquetQuerySuite.scala   |  1 -
 .../apache/spark/sql/parquet/HiveParquetSuite.scala    | 13 ++++++++++---
 2 files changed, 10 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/c4021401/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala
index 48c7598..cba0683 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala
@@ -35,7 +35,6 @@ class ParquetQuerySuite extends QueryTest with ParquetTest {
       }
     }
 
-    // TODO Re-enable this after data source insertion API is merged
     test(s"$prefix: appending") {
       val data = (0 until 10).map(i => (i, i.toString))
       withParquetTable(data, "t") {

http://git-wip-us.apache.org/repos/asf/spark/blob/c4021401/sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala
index eae69af..e89b448 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/parquet/HiveParquetSuite.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.parquet
 
-import org.apache.spark.sql.QueryTest
+import org.apache.spark.sql.{SQLConf, QueryTest}
 import org.apache.spark.sql.catalyst.expressions.Row
 import org.apache.spark.sql.hive.test.TestHive
 
@@ -64,8 +64,7 @@ class HiveParquetSuite extends QueryTest with ParquetTest {
       }
     }
 
-    // TODO Re-enable this after data source insertion API is merged
-    ignore(s"$prefix: INSERT OVERWRITE TABLE Parquet table") {
+    test(s"$prefix: INSERT OVERWRITE TABLE Parquet table") {
       withParquetTable((1 to 10).map(i => (i, s"val_$i")), "t") {
         withTempPath { file =>
           sql("SELECT * FROM t LIMIT 
1").saveAsParquetFile(file.getCanonicalPath)
@@ -81,4 +80,12 @@ class HiveParquetSuite extends QueryTest with ParquetTest {
       }
     }
   }
+
+  withSQLConf(SQLConf.PARQUET_USE_DATA_SOURCE_API -> "true") {
+    run("Parquet data source enabled")
+  }
+
+  withSQLConf(SQLConf.PARQUET_USE_DATA_SOURCE_API -> "false") {
+    run("Parquet data source disabled")
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to