Repository: spark
Updated Branches:
  refs/heads/branch-2.0 da34e8e8f -> 31c3bcb46


[SPARK-16690][TEST] rename SQLTestUtils.withTempTable to withTempView

after https://github.com/apache/spark/pull/12945, we renamed the 
`registerTempTable` to `createTempView`, as we do create a view actually. This 
PR renames `SQLTestUtils.withTempTable` to reflect this change.

N/A

Author: Wenchen Fan <wenc...@databricks.com>

Closes #14318 from cloud-fan/minor4.

(cherry picked from commit 86c275206605c44e1ebca2f166d62868e44bf029)
Signed-off-by: Reynold Xin <r...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/31c3bcb4
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/31c3bcb4
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/31c3bcb4

Branch: refs/heads/branch-2.0
Commit: 31c3bcb46cb56b57d3cdcb8c42e7056dab0f7601
Parents: da34e8e
Author: Wenchen Fan <wenc...@databricks.com>
Authored: Sat Jul 23 11:39:48 2016 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Sat Jul 23 11:42:49 2016 -0700

----------------------------------------------------------------------
 .../org/apache/spark/sql/CachedTableSuite.scala | 22 ++++++-------
 .../apache/spark/sql/MetadataCacheSuite.scala   |  2 +-
 .../org/apache/spark/sql/SQLQuerySuite.scala    | 34 ++++++++++----------
 .../org/apache/spark/sql/StatisticsSuite.scala  |  4 +--
 .../org/apache/spark/sql/SubquerySuite.scala    |  4 +--
 .../spark/sql/execution/PlannerSuite.scala      |  8 ++---
 .../spark/sql/execution/command/DDLSuite.scala  |  6 ++--
 .../execution/datasources/json/JsonSuite.scala  |  6 ++--
 .../ParquetPartitionDiscoverySuite.scala        | 10 +++---
 .../datasources/parquet/ParquetTest.scala       |  2 +-
 .../sql/execution/metric/SQLMetricsSuite.scala  |  8 ++---
 .../org/apache/spark/sql/jdbc/JDBCSuite.scala   |  2 +-
 .../apache/spark/sql/test/SQLTestUtils.scala    |  2 +-
 .../spark/sql/hive/CachedTableSuite.scala       | 10 +++---
 .../spark/sql/hive/HiveMetadataCacheSuite.scala |  2 +-
 .../spark/sql/hive/HiveParquetSuite.scala       |  4 +--
 .../sql/hive/MetastoreDataSourcesSuite.scala    |  8 ++---
 .../hive/ParquetHiveCompatibilitySuite.scala    |  2 +-
 .../hive/execution/AggregationQuerySuite.scala  |  2 +-
 .../sql/hive/execution/HiveCommandSuite.scala   |  4 +--
 .../sql/hive/execution/HiveExplainSuite.scala   |  2 +-
 .../sql/hive/execution/HiveTableScanSuite.scala |  2 +-
 .../spark/sql/hive/execution/HiveUDFSuite.scala |  2 +-
 .../sql/hive/execution/SQLQuerySuite.scala      |  8 ++---
 .../spark/sql/hive/orc/OrcQuerySuite.scala      |  6 ++--
 .../org/apache/spark/sql/hive/orc/OrcTest.scala |  2 +-
 .../apache/spark/sql/hive/parquetSuites.scala   |  2 +-
 .../sql/sources/HadoopFsRelationTest.scala      | 12 +++----
 28 files changed, 89 insertions(+), 89 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index 6f6abfa..f42402e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -73,7 +73,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with SharedSQLContext
   }
 
   test("cache temp table") {
-    withTempTable("tempTable") {
+    withTempView("tempTable") {
       testData.select('key).createOrReplaceTempView("tempTable")
       assertCached(sql("SELECT COUNT(*) FROM tempTable"), 0)
       spark.catalog.cacheTable("tempTable")
@@ -97,7 +97,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with SharedSQLContext
   }
 
   test("cache table as select") {
-    withTempTable("tempTable") {
+    withTempView("tempTable") {
       sql("CACHE TABLE tempTable AS SELECT key FROM testData")
       assertCached(sql("SELECT COUNT(*) FROM tempTable"))
       spark.catalog.uncacheTable("tempTable")
@@ -227,7 +227,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with SharedSQLContext
   }
 
   test("CACHE TABLE tableName AS SELECT * FROM anotherTable") {
-    withTempTable("testCacheTable") {
+    withTempView("testCacheTable") {
       sql("CACHE TABLE testCacheTable AS SELECT * FROM testData")
       assertCached(spark.table("testCacheTable"))
 
@@ -244,7 +244,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with SharedSQLContext
   }
 
   test("CACHE TABLE tableName AS SELECT ...") {
-    withTempTable("testCacheTable") {
+    withTempView("testCacheTable") {
       sql("CACHE TABLE testCacheTable AS SELECT key FROM testData LIMIT 10")
       assertCached(spark.table("testCacheTable"))
 
@@ -413,7 +413,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with SharedSQLContext
     // Set up two tables distributed in the same way. Try this with the data 
distributed into
     // different number of partitions.
     for (numPartitions <- 1 until 10 by 4) {
-      withTempTable("t1", "t2") {
+      withTempView("t1", "t2") {
         testData.repartition(numPartitions, 
$"key").createOrReplaceTempView("t1")
         testData2.repartition(numPartitions, 
$"a").createOrReplaceTempView("t2")
         spark.catalog.cacheTable("t1")
@@ -435,7 +435,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with SharedSQLContext
     }
 
     // Distribute the tables into non-matching number of partitions. Need to 
shuffle one side.
-    withTempTable("t1", "t2") {
+    withTempView("t1", "t2") {
       testData.repartition(6, $"key").createOrReplaceTempView("t1")
       testData2.repartition(3, $"a").createOrReplaceTempView("t2")
       spark.catalog.cacheTable("t1")
@@ -452,7 +452,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with SharedSQLContext
     }
 
     // One side of join is not partitioned in the desired way. Need to shuffle 
one side.
-    withTempTable("t1", "t2") {
+    withTempView("t1", "t2") {
       testData.repartition(6, $"value").createOrReplaceTempView("t1")
       testData2.repartition(6, $"a").createOrReplaceTempView("t2")
       spark.catalog.cacheTable("t1")
@@ -468,7 +468,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with SharedSQLContext
       spark.catalog.uncacheTable("t2")
     }
 
-    withTempTable("t1", "t2") {
+    withTempView("t1", "t2") {
       testData.repartition(6, $"value").createOrReplaceTempView("t1")
       testData2.repartition(12, $"a").createOrReplaceTempView("t2")
       spark.catalog.cacheTable("t1")
@@ -487,7 +487,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with SharedSQLContext
     // One side of join is not partitioned in the desired way. Since the 
number of partitions of
     // the side that has already partitioned is smaller than the side that is 
not partitioned,
     // we shuffle both side.
-    withTempTable("t1", "t2") {
+    withTempView("t1", "t2") {
       testData.repartition(6, $"value").createOrReplaceTempView("t1")
       testData2.repartition(3, $"a").createOrReplaceTempView("t2")
       spark.catalog.cacheTable("t1")
@@ -504,7 +504,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with SharedSQLContext
 
     // repartition's column ordering is different from group by column 
ordering.
     // But they use the same set of columns.
-    withTempTable("t1") {
+    withTempView("t1") {
       testData.repartition(6, $"value", $"key").createOrReplaceTempView("t1")
       spark.catalog.cacheTable("t1")
 
@@ -520,7 +520,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with SharedSQLContext
     // We will still shuffle because hashcodes of a row depend on the column 
ordering.
     // If we do not shuffle, we may actually partition two tables in totally 
two different way.
     // See PartitioningSuite for more details.
-    withTempTable("t1", "t2") {
+    withTempView("t1", "t2") {
       val df1 = testData
       df1.repartition(6, $"value", $"key").createOrReplaceTempView("t1")
       val df2 = testData2.select($"a", $"b".cast("string"))

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala
index 3f8cc81..eacf254 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/MetadataCacheSuite.scala
@@ -60,7 +60,7 @@ class MetadataCacheSuite extends QueryTest with 
SharedSQLContext {
   }
 
   test("SPARK-16337 temporary view refresh") {
-    withTempTable("view_refresh") { withTempPath { (location: File) =>
+    withTempView("view_refresh") { withTempPath { (location: File) =>
       // Create a Parquet directory
       spark.range(start = 0, end = 100, step = 1, numPartitions = 3)
         .write.parquet(location.getAbsolutePath)

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 8a5ff2c..f1a2410 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -805,7 +805,7 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext 
{
   }
 
   test("count of empty table") {
-    withTempTable("t") {
+    withTempView("t") {
       Seq.empty[(Int, Int)].toDF("a", "b").createOrReplaceTempView("t")
       checkAnswer(
         sql("select count(a) from t"),
@@ -1671,7 +1671,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
   }
 
   test("SPARK-7952: fix the equality check between boolean and numeric types") 
{
-    withTempTable("t") {
+    withTempView("t") {
       // numeric field i, boolean field j, result of i = j, result of i <=> j
       Seq[(Integer, java.lang.Boolean, java.lang.Boolean, java.lang.Boolean)](
         (1, true, true, true),
@@ -1691,7 +1691,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
   }
 
   test("SPARK-7067: order by queries for complex ExtractValue chain") {
-    withTempTable("t") {
+    withTempView("t") {
       spark.read.json(sparkContext.makeRDD(
         """{"a": {"b": [{"c": 1}]}, "b": [{"d": 1}]}""" :: 
Nil)).createOrReplaceTempView("t")
       checkAnswer(sql("SELECT a.b FROM t ORDER BY b[0].d"), Row(Seq(Row(1))))
@@ -1699,14 +1699,14 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
   }
 
   test("SPARK-8782: ORDER BY NULL") {
-    withTempTable("t") {
+    withTempView("t") {
       Seq((1, 2), (1, 2)).toDF("a", "b").createOrReplaceTempView("t")
       checkAnswer(sql("SELECT * FROM t ORDER BY NULL"), Seq(Row(1, 2), Row(1, 
2)))
     }
   }
 
   test("SPARK-8837: use keyword in column name") {
-    withTempTable("t") {
+    withTempView("t") {
       val df = Seq(1 -> "a").toDF("count", "sort")
       checkAnswer(df.filter("count > 0"), Row(1, "a"))
       df.createOrReplaceTempView("t")
@@ -1820,7 +1820,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
   }
 
   test("SPARK-9511: error with table starting with number") {
-    withTempTable("1one") {
+    withTempView("1one") {
       sparkContext.parallelize(1 to 10).map(i => (i, i.toString))
         .toDF("num", "str")
         .createOrReplaceTempView("1one")
@@ -1864,7 +1864,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
   }
 
   test("SPARK-10130 type coercion for IF should have children resolved first") 
{
-    withTempTable("src") {
+    withTempView("src") {
       Seq((1, 1), (-1, 1)).toDF("key", "value").createOrReplaceTempView("src")
       checkAnswer(
         sql("SELECT IF(a > 0, a, 0) FROM (SELECT key a FROM src) temp"), 
Seq(Row(1), Row(0)))
@@ -1872,7 +1872,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
   }
 
   test("SPARK-10389: order by non-attribute grouping expression on Aggregate") 
{
-    withTempTable("src") {
+    withTempView("src") {
       Seq((1, 1), (-1, 1)).toDF("key", "value").createOrReplaceTempView("src")
       checkAnswer(sql("SELECT MAX(value) FROM src GROUP BY key + 1 ORDER BY 
key + 1"),
         Seq(Row(1), Row(1)))
@@ -1976,7 +1976,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
   }
 
   test("SPARK-11032: resolve having correctly") {
-    withTempTable("src") {
+    withTempView("src") {
       Seq(1 -> "a").toDF("i", "j").createOrReplaceTempView("src")
       checkAnswer(
         sql("SELECT MIN(t.i) FROM (SELECT * FROM src WHERE i > 0) t 
HAVING(COUNT(1) > 0)"),
@@ -2081,7 +2081,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
       Row(1, 1) :: Row(1, 2) :: Row(2, 1) :: Row(2, 2) :: Row(3, 1) :: Row(3, 
2) :: Nil)
 
     // Try with a temporary view
-    withTempTable("nestedStructTable") {
+    withTempView("nestedStructTable") {
       nestedStructData.createOrReplaceTempView("nestedStructTable")
       checkAnswer(
         sql("SELECT record.* FROM nestedStructTable"),
@@ -2104,7 +2104,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
         | SELECT struct(`col$.a_`, `a.b.c.`) as `r&&b.c` FROM
         |   (SELECT struct(a, b) as `col$.a_`, struct(b, a) as `a.b.c.` FROM 
testData2) tmp
       """.stripMargin)
-    withTempTable("specialCharacterTable") {
+    withTempView("specialCharacterTable") {
       specialCharacterPath.createOrReplaceTempView("specialCharacterTable")
       checkAnswer(
         specialCharacterPath.select($"`r&&b.c`.*"),
@@ -2128,7 +2128,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
   test("Struct Star Expansion - Name conflict") {
     // Create a data set that contains a naming conflict
     val nameConflict = sql("SELECT struct(a, b) as nameConflict, a as a FROM 
testData2")
-    withTempTable("nameConflict") {
+    withTempView("nameConflict") {
       nameConflict.createOrReplaceTempView("nameConflict")
       // Unqualified should resolve to table.
       checkAnswer(sql("SELECT nameConflict.* FROM nameConflict"),
@@ -2149,7 +2149,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
   }
 
   test("Star Expansion - table with zero column") {
-    withTempTable("temp_table_no_cols") {
+    withTempView("temp_table_no_cols") {
       val rddNoCols = sparkContext.parallelize(1 to 10).map(_ => Row.empty)
       val dfNoCols = spark.createDataFrame(rddNoCols, StructType(Seq.empty))
       dfNoCols.createTempView("temp_table_no_cols")
@@ -2464,7 +2464,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
 
   test("SPARK-13056: Null in map value causes NPE") {
     val df = Seq(1 -> Map("abc" -> "somestring", "cba" -> null)).toDF("key", 
"value")
-    withTempTable("maptest") {
+    withTempView("maptest") {
       df.createOrReplaceTempView("maptest")
       // local optimization will by pass codegen code, so we should keep the 
filter `key=1`
       checkAnswer(sql("SELECT value['abc'] FROM maptest where key = 1"), 
Row("somestring"))
@@ -2474,7 +2474,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
 
   test("hash function") {
     val df = Seq(1 -> "a", 2 -> "b").toDF("i", "j")
-    withTempTable("tbl") {
+    withTempView("tbl") {
       df.createOrReplaceTempView("tbl")
       checkAnswer(
         df.select(hash($"i", $"j")),
@@ -2526,7 +2526,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
   test("natural join") {
     val df1 = Seq(("one", 1), ("two", 2), ("three", 3)).toDF("k", "v1")
     val df2 = Seq(("one", 1), ("two", 22), ("one", 5)).toDF("k", "v2")
-    withTempTable("nt1", "nt2") {
+    withTempView("nt1", "nt2") {
       df1.createOrReplaceTempView("nt1")
       df2.createOrReplaceTempView("nt2")
       checkAnswer(
@@ -2554,7 +2554,7 @@ class SQLQuerySuite extends QueryTest with 
SharedSQLContext {
       ("r2c1", "r2c2", "t2r2c3"), ("r3c1y", "r3c2", "t2r3c3")).toDF("c1", 
"c2", "c3")
     val df3 = Seq((null, "r1c2", "t3r1c3"),
       ("r2c1", "r2c2", "t3r2c3"), ("r3c1y", "r3c2", "t3r3c3")).toDF("c1", 
"c2", "c3")
-    withTempTable("t1", "t2", "t3") {
+    withTempView("t1", "t2", "t3") {
       df1.createOrReplaceTempView("t1")
       df2.createOrReplaceTempView("t2")
       df3.createOrReplaceTempView("t3")

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/core/src/test/scala/org/apache/spark/sql/StatisticsSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/StatisticsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/StatisticsSuite.scala
index ab55242..2c81cbf 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/StatisticsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/StatisticsSuite.scala
@@ -34,7 +34,7 @@ class StatisticsSuite extends QueryTest with SharedSQLContext 
{
   }
 
   test("estimates the size of limit") {
-    withTempTable("test") {
+    withTempView("test") {
       Seq(("one", 1), ("two", 2), ("three", 3), ("four", 4)).toDF("k", "v")
         .createOrReplaceTempView("test")
       Seq((0, 1), (1, 24), (2, 48)).foreach { case (limit, expected) =>
@@ -58,7 +58,7 @@ class StatisticsSuite extends QueryTest with SharedSQLContext 
{
   }
 
   test("estimates the size of a limit 0 on outer join") {
-    withTempTable("test") {
+    withTempView("test") {
       Seq(("one", 1), ("two", 2), ("three", 3), ("four", 4)).toDF("k", "v")
         .createOrReplaceTempView("test")
       val df1 = spark.table("test")

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
index 1d9ff21..afed342 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
@@ -128,7 +128,7 @@ class SubquerySuite extends QueryTest with SharedSQLContext 
{
   }
 
   test("SPARK-15677: Queries against local relations with scalar subquery in 
Select list") {
-    withTempTable("t1", "t2") {
+    withTempView("t1", "t2") {
       Seq((1, 1), (2, 2)).toDF("c1", "c2").createOrReplaceTempView("t1")
       Seq((1, 1), (2, 2)).toDF("c1", "c2").createOrReplaceTempView("t2")
 
@@ -267,7 +267,7 @@ class SubquerySuite extends QueryTest with SharedSQLContext 
{
   }
 
   test("SPARK-15832: Test embedded existential predicate sub-queries") {
-    withTempTable("t1", "t2", "t3", "t4", "t5") {
+    withTempView("t1", "t2", "t3", "t4", "t5") {
       Seq((1, 1), (2, 2)).toDF("c1", "c2").createOrReplaceTempView("t1")
       Seq((1, 1), (2, 2)).toDF("c1", "c2").createOrReplaceTempView("t2")
       Seq((1, 1), (2, 2), (1, 2)).toDF("c1", 
"c2").createOrReplaceTempView("t3")

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
index c96239e..13490c3 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
@@ -71,7 +71,7 @@ class PlannerSuite extends SharedSQLContext {
 
   test("sizeInBytes estimation of limit operator for broadcast hash join 
optimization") {
     def checkPlan(fieldTypes: Seq[DataType]): Unit = {
-      withTempTable("testLimit") {
+      withTempView("testLimit") {
         val fields = fieldTypes.zipWithIndex.map {
           case (dataType, index) => StructField(s"c${index}", dataType, true)
         } :+ StructField("key", IntegerType, true)
@@ -131,7 +131,7 @@ class PlannerSuite extends SharedSQLContext {
 
   test("InMemoryRelation statistics propagation") {
     withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "81920") {
-      withTempTable("tiny") {
+      withTempView("tiny") {
         testData.limit(3).createOrReplaceTempView("tiny")
         sql("CACHE TABLE tiny")
 
@@ -157,7 +157,7 @@ class PlannerSuite extends SharedSQLContext {
       val df = spark.read.parquet(path)
       df.createOrReplaceTempView("testPushed")
 
-      withTempTable("testPushed") {
+      withTempView("testPushed") {
         val exp = sql("select * from testPushed where key = 
15").queryExecution.sparkPlan
         assert(exp.toString.contains("PushedFilters: [IsNotNull(key), 
EqualTo(key,15)]"))
       }
@@ -198,7 +198,7 @@ class PlannerSuite extends SharedSQLContext {
   }
 
   test("PartitioningCollection") {
-    withTempTable("normal", "small", "tiny") {
+    withTempView("normal", "small", "tiny") {
       testData.createOrReplaceTempView("normal")
       testData.limit(10).createOrReplaceTempView("small")
       testData.limit(3).createOrReplaceTempView("tiny")

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index b4294ed..f2ec393 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -497,7 +497,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with 
BeforeAndAfterEach {
   }
 
   test("rename temporary table - destination table with database name") {
-    withTempTable("tab1") {
+    withTempView("tab1") {
       sql(
         """
           |CREATE TEMPORARY TABLE tab1
@@ -522,7 +522,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with 
BeforeAndAfterEach {
   }
 
   test("rename temporary table - destination table already exists") {
-    withTempTable("tab1", "tab2") {
+    withTempView("tab1", "tab2") {
       sql(
         """
           |CREATE TEMPORARY TABLE tab1
@@ -677,7 +677,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with 
BeforeAndAfterEach {
   }
 
   test("show tables") {
-    withTempTable("show1a", "show2b") {
+    withTempView("show1a", "show2b") {
       sql(
         """
           |CREATE TEMPORARY TABLE show1a

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
index 6c72019..0b0e64a 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
@@ -1082,7 +1082,7 @@ class JsonSuite extends QueryTest with SharedSQLContext 
with TestJsonData {
   test("Corrupt records: PERMISSIVE mode") {
     // Test if we can query corrupt records.
     withSQLConf(SQLConf.COLUMN_NAME_OF_CORRUPT_RECORD.key -> "_unparsed") {
-      withTempTable("jsonTable") {
+      withTempView("jsonTable") {
         val jsonDF = spark.read.json(corruptRecords)
         jsonDF.createOrReplaceTempView("jsonTable")
         val schema = StructType(
@@ -1518,7 +1518,7 @@ class JsonSuite extends QueryTest with SharedSQLContext 
with TestJsonData {
   test("SPARK-12057 additional corrupt records do not throw exceptions") {
     // Test if we can query corrupt records.
     withSQLConf(SQLConf.COLUMN_NAME_OF_CORRUPT_RECORD.key -> "_unparsed") {
-      withTempTable("jsonTable") {
+      withTempView("jsonTable") {
         val schema = StructType(
           StructField("_unparsed", StringType, true) ::
             StructField("dummy", StringType, true) :: Nil)
@@ -1635,7 +1635,7 @@ class JsonSuite extends QueryTest with SharedSQLContext 
with TestJsonData {
   }
 
   test("Casting long as timestamp") {
-    withTempTable("jsonTable") {
+    withTempView("jsonTable") {
       val schema = (new StructType).add("ts", TimestampType)
       val jsonDF = spark.read.schema(schema).json(timestampAsLong)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetPartitionDiscoverySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetPartitionDiscoverySuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetPartitionDiscoverySuite.scala
index 133ffed..8d18be9 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetPartitionDiscoverySuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetPartitionDiscoverySuite.scala
@@ -404,7 +404,7 @@ class ParquetPartitionDiscoverySuite extends QueryTest with 
ParquetTest with Sha
 
       spark.read.parquet(base.getCanonicalPath).createOrReplaceTempView("t")
 
-      withTempTable("t") {
+      withTempView("t") {
         checkAnswer(
           sql("SELECT * FROM t"),
           for {
@@ -488,7 +488,7 @@ class ParquetPartitionDiscoverySuite extends QueryTest with 
ParquetTest with Sha
 
       spark.read.parquet(base.getCanonicalPath).createOrReplaceTempView("t")
 
-      withTempTable("t") {
+      withTempView("t") {
         checkAnswer(
           sql("SELECT * FROM t"),
           for {
@@ -537,7 +537,7 @@ class ParquetPartitionDiscoverySuite extends QueryTest with 
ParquetTest with Sha
       val parquetRelation = 
spark.read.format("parquet").load(base.getCanonicalPath)
       parquetRelation.createOrReplaceTempView("t")
 
-      withTempTable("t") {
+      withTempView("t") {
         checkAnswer(
           sql("SELECT * FROM t"),
           for {
@@ -577,7 +577,7 @@ class ParquetPartitionDiscoverySuite extends QueryTest with 
ParquetTest with Sha
       val parquetRelation = 
spark.read.format("parquet").load(base.getCanonicalPath)
       parquetRelation.createOrReplaceTempView("t")
 
-      withTempTable("t") {
+      withTempView("t") {
         checkAnswer(
           sql("SELECT * FROM t"),
           for {
@@ -613,7 +613,7 @@ class ParquetPartitionDiscoverySuite extends QueryTest with 
ParquetTest with Sha
         .load(base.getCanonicalPath)
         .createOrReplaceTempView("t")
 
-      withTempTable("t") {
+      withTempView("t") {
         checkAnswer(
           sql("SELECT * FROM t"),
           (1 to 10).map(i => Row(i, null, 1)) ++ (1 to 10).map(i => Row(i, 
i.toString, 2)))

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala
index 9fb34e0..85efca3 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala
@@ -91,7 +91,7 @@ private[sql] trait ParquetTest extends SQLTestUtils {
       (f: => Unit): Unit = {
     withParquetDataFrame(data, testVectorized) { df =>
       df.createOrReplaceTempView(tableName)
-      withTempTable(tableName)(f)
+      withTempView(tableName)(f)
     }
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala
index 579a095..bba40c6 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsSuite.scala
@@ -133,7 +133,7 @@ class SQLMetricsSuite extends SparkFunSuite with 
SharedSQLContext {
     // test should use the deterministic number of partitions.
     val testDataForJoin = testData2.filter('a < 2) // TestData2(1, 1) :: 
TestData2(1, 2)
     testDataForJoin.createOrReplaceTempView("testDataForJoin")
-    withTempTable("testDataForJoin") {
+    withTempView("testDataForJoin") {
       // Assume the execution plan is
       // ... -> SortMergeJoin(nodeId = 1) -> TungstenProject(nodeId = 0)
       val df = spark.sql(
@@ -151,7 +151,7 @@ class SQLMetricsSuite extends SparkFunSuite with 
SharedSQLContext {
     // this test should use the deterministic number of partitions.
     val testDataForJoin = testData2.filter('a < 2) // TestData2(1, 1) :: 
TestData2(1, 2)
     testDataForJoin.createOrReplaceTempView("testDataForJoin")
-    withTempTable("testDataForJoin") {
+    withTempView("testDataForJoin") {
       // Assume the execution plan is
       // ... -> SortMergeJoin(nodeId = 1) -> TungstenProject(nodeId = 0)
       val df = spark.sql(
@@ -206,7 +206,7 @@ class SQLMetricsSuite extends SparkFunSuite with 
SharedSQLContext {
     val testDataForJoin = testData2.filter('a < 2) // TestData2(1, 1) :: 
TestData2(1, 2)
     testDataForJoin.createOrReplaceTempView("testDataForJoin")
     withSQLConf(SQLConf.CROSS_JOINS_ENABLED.key -> "true") {
-      withTempTable("testDataForJoin") {
+      withTempView("testDataForJoin") {
         // Assume the execution plan is
         // ... -> BroadcastNestedLoopJoin(nodeId = 1) -> 
TungstenProject(nodeId = 0)
         val df = spark.sql(
@@ -236,7 +236,7 @@ class SQLMetricsSuite extends SparkFunSuite with 
SharedSQLContext {
     withSQLConf(SQLConf.CROSS_JOINS_ENABLED.key -> "true") {
       val testDataForJoin = testData2.filter('a < 2) // TestData2(1, 1) :: 
TestData2(1, 2)
       testDataForJoin.createOrReplaceTempView("testDataForJoin")
-      withTempTable("testDataForJoin") {
+      withTempView("testDataForJoin") {
         // Assume the execution plan is
         // ... -> CartesianProduct(nodeId = 1) -> TungstenProject(nodeId = 0)
         val df = spark.sql(

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
index 228e425..995b120 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
@@ -752,7 +752,7 @@ class JDBCSuite extends SparkFunSuite
     assertEmptyQuery(s"SELECT * FROM foobar WHERE $FALSE1 AND ($FALSE2 OR 
$TRUE)")
 
     // Tests JDBCPartition whereClause clause push down.
-    withTempTable("tempFrame") {
+    withTempView("tempFrame") {
       val jdbcPartitionWhereClause = s"$FALSE1 OR $TRUE"
       val df = spark.read.jdbc(
         urlWithUserAndPass,

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
index 26bd3fb..5286ee5 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
@@ -150,7 +150,7 @@ private[sql] trait SQLTestUtils
   /**
    * Drops temporary table `tableName` after calling `f`.
    */
-  protected def withTempTable(tableNames: String*)(f: => Unit): Unit = {
+  protected def withTempView(tableNames: String*)(f: => Unit): Unit = {
     try f finally {
       // If the test failed part way, we don't want to mask the failure by 
failing to remove
       // temp tables that never got created.

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala
index f7c3e34..7d4ef6f 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala
@@ -129,7 +129,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with TestHiveSingleto
   }
 
   test("CACHE TABLE tableName AS SELECT * FROM anotherTable") {
-    withTempTable("testCacheTable") {
+    withTempView("testCacheTable") {
       sql("CACHE TABLE testCacheTable AS SELECT * FROM src")
       assertCached(table("testCacheTable"))
 
@@ -144,7 +144,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with TestHiveSingleto
   }
 
   test("CACHE TABLE tableName AS SELECT ...") {
-    withTempTable("testCacheTable") {
+    withTempView("testCacheTable") {
       sql("CACHE TABLE testCacheTable AS SELECT key FROM src LIMIT 10")
       assertCached(table("testCacheTable"))
 
@@ -177,7 +177,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with TestHiveSingleto
   }
 
   test("CACHE TABLE with Hive UDF") {
-    withTempTable("udfTest") {
+    withTempView("udfTest") {
       sql("CACHE TABLE udfTest AS SELECT * FROM src WHERE floor(key) = 1")
       assertCached(table("udfTest"))
       uncacheTable("udfTest")
@@ -276,7 +276,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with TestHiveSingleto
 
   test("Cache/Uncache Qualified Tables") {
     withTempDatabase { db =>
-      withTempTable("cachedTable") {
+      withTempView("cachedTable") {
         sql(s"CREATE TABLE $db.cachedTable STORED AS PARQUET AS SELECT 1")
         sql(s"CACHE TABLE $db.cachedTable")
         assertCached(spark.table(s"$db.cachedTable"))
@@ -298,7 +298,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils 
with TestHiveSingleto
 
   test("Cache Table As Select - having database name") {
     withTempDatabase { db =>
-      withTempTable("cachedTable") {
+      withTempView("cachedTable") {
         val e = intercept[ParseException] {
           sql(s"CACHE TABLE $db.cachedTable AS SELECT 1")
         }.getMessage

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala
index 5714d06..3414f5e 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetadataCacheSuite.scala
@@ -30,7 +30,7 @@ import org.apache.spark.sql.test.SQLTestUtils
 class HiveMetadataCacheSuite extends QueryTest with SQLTestUtils with 
TestHiveSingleton {
 
   test("SPARK-16337 temporary view refresh") {
-    withTempTable("view_refresh") {
+    withTempView("view_refresh") {
       withTable("view_table") {
         // Create a Parquet directory
         spark.range(start = 0, end = 100, step = 1, numPartitions = 3)

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala
index 33252ad..09c1547 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveParquetSuite.scala
@@ -52,7 +52,7 @@ class HiveParquetSuite extends QueryTest with ParquetTest 
with TestHiveSingleton
     withTempPath { dir =>
       sql("SELECT * FROM src").write.parquet(dir.getCanonicalPath)
       spark.read.parquet(dir.getCanonicalPath).createOrReplaceTempView("p")
-      withTempTable("p") {
+      withTempView("p") {
         checkAnswer(
           sql("SELECT * FROM src ORDER BY key"),
           sql("SELECT * from p ORDER BY key").collect().toSeq)
@@ -66,7 +66,7 @@ class HiveParquetSuite extends QueryTest with ParquetTest 
with TestHiveSingleton
       withTempPath { file =>
         sql("SELECT * FROM t LIMIT 1").write.parquet(file.getCanonicalPath)
         spark.read.parquet(file.getCanonicalPath).createOrReplaceTempView("p")
-        withTempTable("p") {
+        withTempView("p") {
           // let's do three overwrites for good measure
           sql("INSERT OVERWRITE TABLE p SELECT * FROM t")
           sql("INSERT OVERWRITE TABLE p SELECT * FROM t")

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index 12d250d..af071f9 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -79,7 +79,7 @@ class MetastoreDataSourcesSuite extends QueryTest with 
SQLTestUtils with TestHiv
            |)
          """.stripMargin)
 
-      withTempTable("expectedJsonTable") {
+      withTempView("expectedJsonTable") {
         read.json(jsonFilePath).createOrReplaceTempView("expectedJsonTable")
         checkAnswer(
           sql("SELECT a, b, `c_!@(3)`, `<d>`.`d!`, `<d>`.`=` FROM jsonTable"),
@@ -109,7 +109,7 @@ class MetastoreDataSourcesSuite extends QueryTest with 
SQLTestUtils with TestHiv
 
       assert(expectedSchema === table("jsonTable").schema)
 
-      withTempTable("expectedJsonTable") {
+      withTempView("expectedJsonTable") {
         read.json(jsonFilePath).createOrReplaceTempView("expectedJsonTable")
         checkAnswer(
           sql("SELECT b, `<d>`.`=` FROM jsonTable"),
@@ -247,7 +247,7 @@ class MetastoreDataSourcesSuite extends QueryTest with 
SQLTestUtils with TestHiv
            |)
          """.stripMargin)
 
-      withTempTable("expectedJsonTable") {
+      withTempView("expectedJsonTable") {
         read.json(jsonFilePath).createOrReplaceTempView("expectedJsonTable")
 
         checkAnswer(
@@ -553,7 +553,7 @@ class MetastoreDataSourcesSuite extends QueryTest with 
SQLTestUtils with TestHiv
 
   test("scan a parquet table created through a CTAS statement") {
     withSQLConf(HiveUtils.CONVERT_METASTORE_PARQUET.key -> "true") {
-      withTempTable("jt") {
+      withTempView("jt") {
         (1 to 10).map(i => i -> s"str$i").toDF("a", 
"b").createOrReplaceTempView("jt")
 
         withTable("test_parquet_ctas") {

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala
index ac89bbb..7d429f4 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ParquetHiveCompatibilitySuite.scala
@@ -54,7 +54,7 @@ class ParquetHiveCompatibilitySuite extends 
ParquetCompatibilityTest with TestHi
 
         // Don't convert Hive metastore Parquet tables to let Hive write those 
Parquet files.
         withSQLConf(HiveUtils.CONVERT_METASTORE_PARQUET.key -> "false") {
-          withTempTable("data") {
+          withTempView("data") {
             val fields = hiveTypes.zipWithIndex.map { case (typ, index) => s"  
col_$index $typ" }
 
             val ddl =

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
index a16fe32..2dcf13c 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/AggregationQuerySuite.scala
@@ -923,7 +923,7 @@ abstract class AggregationQuerySuite extends QueryTest with 
SQLTestUtils with Te
   }
 
   test("udaf without specifying inputSchema") {
-    withTempTable("noInputSchemaUDAF") {
+    withTempView("noInputSchemaUDAF") {
       spark.udf.register("noInputSchema", new 
ScalaAggregateFunctionWithoutInputSchema)
 
       val data =

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
index 741abcb..5d51019 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
@@ -139,7 +139,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils 
with TestHiveSingleto
   }
 
   test("show tblproperties for spark temporary table - empty row") {
-    withTempTable("parquet_temp") {
+    withTempView("parquet_temp") {
       sql(
         """
           |CREATE TEMPORARY TABLE parquet_temp (c1 INT, c2 STRING)
@@ -397,7 +397,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils 
with TestHiveSingleto
   }
 
   test("show partitions - empty row") {
-    withTempTable("parquet_temp") {
+    withTempView("parquet_temp") {
       sql(
         """
           |CREATE TEMPORARY TABLE parquet_temp (c1 INT, c2 STRING)

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
index a43eed9..98afd99 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveExplainSuite.scala
@@ -78,7 +78,7 @@ class HiveExplainSuite extends QueryTest with SQLTestUtils 
with TestHiveSingleto
   }
 
   test("SPARK-6212: The EXPLAIN output of CTAS only shows the analyzed plan") {
-    withTempTable("jt") {
+    withTempView("jt") {
       val rdd = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, 
"b":"str$i"}"""))
       spark.read.json(rdd).createOrReplaceTempView("jt")
       val outputs = sql(

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
index 76d3f3d..5b46476 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveTableScanSuite.scala
@@ -102,7 +102,7 @@ class HiveTableScanSuite extends HiveComparisonTest with 
SQLTestUtils with TestH
 
   test("Verify SQLConf HIVE_METASTORE_PARTITION_PRUNING") {
     val view = "src"
-    withTempTable(view) {
+    withTempView(view) {
       spark.range(1, 5).createOrReplaceTempView(view)
       val table = "table_with_partition"
       withTable(table) {

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
index def4601..f690035 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
@@ -358,7 +358,7 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton 
with SQLTestUtils {
   }
 
   test("Hive UDF in group by") {
-    withTempTable("tab1") {
+    withTempView("tab1") {
       Seq(Tuple1(1451400761)).toDF("test_date").createOrReplaceTempView("tab1")
       sql(s"CREATE TEMPORARY FUNCTION testUDFToDate AS 
'${classOf[GenericUDFToDate].getName}'")
       val count = sql("select testUDFToDate(cast(test_date as timestamp))" +

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index 84fa7f9..9a36fd6 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -122,7 +122,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
   }
 
   test("SPARK-13651: generator outputs shouldn't be resolved from its child's 
output") {
-    withTempTable("src") {
+    withTempView("src") {
       Seq(("id1", "value1")).toDF("key", 
"value").createOrReplaceTempView("src")
       val query =
         sql("SELECT genoutput.* FROM src " +
@@ -952,7 +952,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
   }
 
   test("Sorting columns are not in Generate") {
-    withTempTable("data") {
+    withTempView("data") {
       spark.range(1, 5)
         .select(array($"id", $"id" + 1).as("a"), $"id".as("b"), (lit(10) - 
$"id").as("c"))
         .createOrReplaceTempView("data")
@@ -1229,7 +1229,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
 
   test("SPARK-10741: Sort on Aggregate using parquet") {
     withTable("test10741") {
-      withTempTable("src") {
+      withTempView("src") {
         Seq("a" -> 5, "a" -> 9, "b" -> 6).toDF("c1", 
"c2").createOrReplaceTempView("src")
         sql("CREATE TABLE test10741 STORED AS PARQUET AS SELECT * FROM src")
       }
@@ -1483,7 +1483,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
   }
 
   test("multi-insert with lateral view") {
-    withTempTable("t1") {
+    withTempView("t1") {
       spark.range(10)
         .select(array($"id", $"id" + 1).as("arr"), $"id")
         .createOrReplaceTempView("source")

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
index cd41da7..46595ee 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
@@ -92,7 +92,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll 
with OrcTest {
   test("Creating case class RDD table") {
     val data = (1 to 100).map(i => (i, s"val_$i"))
     sparkContext.parallelize(data).toDF().createOrReplaceTempView("t")
-    withTempTable("t") {
+    withTempView("t") {
       checkAnswer(sql("SELECT * FROM t"), data.toDF().collect())
     }
   }
@@ -309,7 +309,7 @@ class OrcQuerySuite extends QueryTest with 
BeforeAndAfterAll with OrcTest {
       val path = dir.getCanonicalPath
 
       withTable("empty_orc") {
-        withTempTable("empty", "single") {
+        withTempView("empty", "single") {
           spark.sql(
             s"""CREATE TABLE empty_orc(key INT, value STRING)
                |STORED AS ORC
@@ -401,7 +401,7 @@ class OrcQuerySuite extends QueryTest with 
BeforeAndAfterAll with OrcTest {
   }
 
   test("Verify the ORC conversion parameter: CONVERT_METASTORE_ORC") {
-    withTempTable("single") {
+    withTempView("single") {
       val singleRowDF = Seq((0, "foo")).toDF("key", "value")
       singleRowDF.createOrReplaceTempView("single")
 

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcTest.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcTest.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcTest.scala
index 2a64711..7226ed5 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcTest.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcTest.scala
@@ -62,7 +62,7 @@ private[sql] trait OrcTest extends SQLTestUtils with 
TestHiveSingleton {
       (f: => Unit): Unit = {
     withOrcDataFrame(data) { df =>
       df.createOrReplaceTempView(tableName)
-      withTempTable(tableName)(f)
+      withTempView(tableName)(f)
     }
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
index 96beb2d..31b6197 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
@@ -702,7 +702,7 @@ class ParquetSourceSuite extends ParquetPartitioningTest {
   }
 
   test("Verify the PARQUET conversion parameter: CONVERT_METASTORE_PARQUET") {
-    withTempTable("single") {
+    withTempView("single") {
       val singleRowDF = Seq((0, "foo")).toDF("key", "value")
       singleRowDF.createOrReplaceTempView("single")
 

http://git-wip-us.apache.org/repos/asf/spark/blob/31c3bcb4/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
index 6299857..047b08c 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
@@ -92,7 +92,7 @@ abstract class HadoopFsRelationTest extends QueryTest with 
SQLTestUtils with Tes
 
     // Self-join
     df.createOrReplaceTempView("t")
-    withTempTable("t") {
+    withTempView("t") {
       checkAnswer(
         sql(
           """SELECT l.a, r.b, l.p1, r.p2
@@ -339,7 +339,7 @@ abstract class HadoopFsRelationTest extends QueryTest with 
SQLTestUtils with Tes
   test("saveAsTable()/load() - non-partitioned table - ErrorIfExists") {
     Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
 
-    withTempTable("t") {
+    withTempView("t") {
       intercept[AnalysisException] {
         
testDF.write.format(dataSourceName).mode(SaveMode.ErrorIfExists).saveAsTable("t")
       }
@@ -349,7 +349,7 @@ abstract class HadoopFsRelationTest extends QueryTest with 
SQLTestUtils with Tes
   test("saveAsTable()/load() - non-partitioned table - Ignore") {
     Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
 
-    withTempTable("t") {
+    withTempView("t") {
       
testDF.write.format(dataSourceName).mode(SaveMode.Ignore).saveAsTable("t")
       assert(spark.table("t").collect().isEmpty)
     }
@@ -461,7 +461,7 @@ abstract class HadoopFsRelationTest extends QueryTest with 
SQLTestUtils with Tes
   test("saveAsTable()/load() - partitioned table - ErrorIfExists") {
     Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
 
-    withTempTable("t") {
+    withTempView("t") {
       intercept[AnalysisException] {
         partitionedTestDF.write
           .format(dataSourceName)
@@ -476,7 +476,7 @@ abstract class HadoopFsRelationTest extends QueryTest with 
SQLTestUtils with Tes
   test("saveAsTable()/load() - partitioned table - Ignore") {
     Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
 
-    withTempTable("t") {
+    withTempView("t") {
       partitionedTestDF.write
         .format(dataSourceName)
         .mode(SaveMode.Ignore)
@@ -722,7 +722,7 @@ abstract class HadoopFsRelationTest extends QueryTest with 
SQLTestUtils with Tes
         'p3.cast(FloatType).as('pf1),
         'f)
 
-      withTempTable("t") {
+      withTempView("t") {
         input
           .write
           .format(dataSourceName)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to