Repository: spark
Updated Branches:
  refs/heads/master b674e67c2 -> 25b315e6c


http://git-wip-us.apache.org/repos/asf/spark/blob/25b315e6/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index ac9a393..81f3ea8 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -102,14 +102,14 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
 
   test("SPARK-6835: udtf in lateral view") {
     val df = Seq((1, 1)).toDF("c1", "c2")
-    df.registerTempTable("table1")
+    df.createOrReplaceTempView("table1")
     val query = sql("SELECT c1, v FROM table1 LATERAL VIEW stack(3, 1, c1 + 1, 
c1 + 2) d AS v")
     checkAnswer(query, Row(1, 1) :: Row(1, 2) :: Row(1, 3) :: Nil)
   }
 
   test("SPARK-13651: generator outputs shouldn't be resolved from its child's 
output") {
     withTempTable("src") {
-      Seq(("id1", "value1")).toDF("key", "value").registerTempTable("src")
+      Seq(("id1", "value1")).toDF("key", 
"value").createOrReplaceTempView("src")
       val query =
         sql("SELECT genoutput.* FROM src " +
           "LATERAL VIEW explode(map('key1', 100, 'key2', 200)) genoutput AS 
key, value")
@@ -135,8 +135,8 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
       Order(1, "Atlas", "MTB", 434, "2015-01-07", "John D", "Pacifica", "CA", 
20151),
       Order(11, "Swift", "YFlikr", 137, "2015-01-23", "John D", "Hayward", 
"CA", 20151))
 
-    orders.toDF.registerTempTable("orders1")
-    orderUpdates.toDF.registerTempTable("orderupdates1")
+    orders.toDF.createOrReplaceTempView("orders1")
+    orderUpdates.toDF.createOrReplaceTempView("orderupdates1")
 
     sql(
       """CREATE TABLE orders(
@@ -305,7 +305,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
 
   test("SPARK-5371: union with null and sum") {
     val df = Seq((1, 1)).toDF("c1", "c2")
-    df.registerTempTable("table1")
+    df.createOrReplaceTempView("table1")
 
     val query = sql(
       """
@@ -329,7 +329,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
 
   test("CTAS with WITH clause") {
     val df = Seq((1, 1)).toDF("c1", "c2")
-    df.registerTempTable("table1")
+    df.createOrReplaceTempView("table1")
 
     sql(
       """
@@ -346,7 +346,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
   }
 
   test("explode nested Field") {
-    Seq(NestedArray1(NestedArray2(Seq(1, 2, 
3)))).toDF.registerTempTable("nestedArray")
+    Seq(NestedArray1(NestedArray2(Seq(1, 2, 
3)))).toDF.createOrReplaceTempView("nestedArray")
     checkAnswer(
       sql("SELECT ints FROM nestedArray LATERAL VIEW explode(a.b) a AS ints"),
       Row(1) :: Row(2) :: Row(3) :: Nil)
@@ -543,7 +543,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
   }
 
   test("specifying the column list for CTAS") {
-    Seq((1, "111111"), (2, "222222")).toDF("key", 
"value").registerTempTable("mytable1")
+    Seq((1, "111111"), (2, "222222")).toDF("key", 
"value").createOrReplaceTempView("mytable1")
 
     sql("create table gen__tmp(a int, b string) as select key, value from 
mytable1")
     checkAnswer(
@@ -598,7 +598,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
 
   test("double nested data") {
     sparkContext.parallelize(Nested1(Nested2(Nested3(1))) :: Nil)
-      .toDF().registerTempTable("nested")
+      .toDF().createOrReplaceTempView("nested")
     checkAnswer(
       sql("SELECT f1.f2.f3 FROM nested"),
       Row(1))
@@ -682,7 +682,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
   test("SPARK-4963 DataFrame sample on mutable row return wrong result") {
     sql("SELECT * FROM src WHERE key % 2 = 0")
       .sample(withReplacement = false, fraction = 0.3)
-      .registerTempTable("sampled")
+      .createOrReplaceTempView("sampled")
     (1 to 10).foreach { i =>
       checkAnswer(
         sql("SELECT * FROM sampled WHERE key % 2 = 1"),
@@ -707,7 +707,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
 
     val rowRdd = sparkContext.parallelize(row :: Nil)
 
-    hiveContext.createDataFrame(rowRdd, schema).registerTempTable("testTable")
+    hiveContext.createDataFrame(rowRdd, 
schema).createOrReplaceTempView("testTable")
 
     sql(
       """CREATE TABLE nullValuesInInnerComplexTypes
@@ -733,14 +733,14 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
 
   test("SPARK-4296 Grouping field with Hive UDF as sub expression") {
     val rdd = sparkContext.makeRDD( """{"a": "str", "b":"1", "c":"1970-01-01 
00:00:00"}""" :: Nil)
-    read.json(rdd).registerTempTable("data")
+    read.json(rdd).createOrReplaceTempView("data")
     checkAnswer(
       sql("SELECT concat(a, '-', b), year(c) FROM data GROUP BY concat(a, '-', 
b), year(c)"),
       Row("str-1", 1970))
 
     dropTempTable("data")
 
-    read.json(rdd).registerTempTable("data")
+    read.json(rdd).createOrReplaceTempView("data")
     checkAnswer(sql("SELECT year(c) + 1 FROM data GROUP BY year(c) + 1"), 
Row(1971))
 
     dropTempTable("data")
@@ -748,14 +748,14 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
 
   test("resolve udtf in projection #1") {
     val rdd = sparkContext.makeRDD((1 to 5).map(i => s"""{"a":[$i, ${i + 
1}]}"""))
-    read.json(rdd).registerTempTable("data")
+    read.json(rdd).createOrReplaceTempView("data")
     val df = sql("SELECT explode(a) AS val FROM data")
     val col = df("val")
   }
 
   test("resolve udtf in projection #2") {
     val rdd = sparkContext.makeRDD((1 to 2).map(i => s"""{"a":[$i, ${i + 
1}]}"""))
-    read.json(rdd).registerTempTable("data")
+    read.json(rdd).createOrReplaceTempView("data")
     checkAnswer(sql("SELECT explode(map(1, 1)) FROM data LIMIT 1"), Row(1, 1) 
:: Nil)
     checkAnswer(sql("SELECT explode(map(1, 1)) as (k1, k2) FROM data LIMIT 
1"), Row(1, 1) :: Nil)
     intercept[AnalysisException] {
@@ -770,7 +770,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
   // TGF with non-TGF in project is allowed in Spark SQL, but not in Hive
   test("TGF with non-TGF in projection") {
     val rdd = sparkContext.makeRDD( """{"a": "1", "b":"1"}""" :: Nil)
-    read.json(rdd).registerTempTable("data")
+    read.json(rdd).createOrReplaceTempView("data")
     checkAnswer(
       sql("SELECT explode(map(a, b)) as (k1, k2), a, b FROM data"),
       Row("1", "1", "1", "1") :: Nil)
@@ -784,7 +784,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
     // PreInsertionCasts will actually start to work before ImplicitGenerate 
and then
     // generates an invalid query plan.
     val rdd = sparkContext.makeRDD((1 to 5).map(i => s"""{"a":[$i, ${i + 
1}]}"""))
-    read.json(rdd).registerTempTable("data")
+    read.json(rdd).createOrReplaceTempView("data")
     val originalConf = sessionState.convertCTAS
     setConf(HiveUtils.CONVERT_CTAS, false)
 
@@ -824,7 +824,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
     Seq.empty[(java.math.BigDecimal, java.math.BigDecimal)]
       .toDF("d1", "d2")
       .select($"d1".cast(DecimalType(10, 5)).as("d"))
-      .registerTempTable("dn")
+      .createOrReplaceTempView("dn")
 
     sql("select d from dn union all select d * 2 from dn")
       .queryExecution.analyzed
@@ -832,27 +832,27 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
 
   test("Star Expansion - script transform") {
     val data = (1 to 100000).map { i => (i, i, i) }
-    data.toDF("d1", "d2", "d3").registerTempTable("script_trans")
+    data.toDF("d1", "d2", "d3").createOrReplaceTempView("script_trans")
     assert(100000 === sql("SELECT TRANSFORM (*) USING 'cat' FROM 
script_trans").count())
   }
 
   test("test script transform for stdout") {
     val data = (1 to 100000).map { i => (i, i, i) }
-    data.toDF("d1", "d2", "d3").registerTempTable("script_trans")
+    data.toDF("d1", "d2", "d3").createOrReplaceTempView("script_trans")
     assert(100000 ===
       sql("SELECT TRANSFORM (d1, d2, d3) USING 'cat' AS (a,b,c) FROM 
script_trans").count())
   }
 
   test("test script transform for stderr") {
     val data = (1 to 100000).map { i => (i, i, i) }
-    data.toDF("d1", "d2", "d3").registerTempTable("script_trans")
+    data.toDF("d1", "d2", "d3").createOrReplaceTempView("script_trans")
     assert(0 ===
       sql("SELECT TRANSFORM (d1, d2, d3) USING 'cat 1>&2' AS (a,b,c) FROM 
script_trans").count())
   }
 
   test("test script transform data type") {
     val data = (1 to 5).map { i => (i, i) }
-    data.toDF("key", "value").registerTempTable("test")
+    data.toDF("key", "value").createOrReplaceTempView("test")
     checkAnswer(
       sql("""FROM
           |(FROM test SELECT TRANSFORM(key, value) USING 'cat' AS (`thing1` 
int, thing2 string)) t
@@ -864,7 +864,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
     withTempTable("data") {
       spark.range(1, 5)
         .select(array($"id", $"id" + 1).as("a"), $"id".as("b"), (lit(10) - 
$"id").as("c"))
-        .registerTempTable("data")
+        .createOrReplaceTempView("data")
 
       // case 1: missing sort columns are resolvable if join is true
       checkAnswer(
@@ -888,7 +888,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
   }
 
   test("test case key when") {
-    (1 to 5).map(i => (i, i.toString)).toDF("k", "v").registerTempTable("t")
+    (1 to 5).map(i => (i, i.toString)).toDF("k", 
"v").createOrReplaceTempView("t")
     checkAnswer(
       sql("SELECT CASE k WHEN 2 THEN 22 WHEN 4 THEN 44 ELSE 0 END, v FROM t"),
       Row(0, "1") :: Row(22, "2") :: Row(0, "3") :: Row(44, "4") :: Row(0, 
"5") :: Nil)
@@ -897,7 +897,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
   test("SPARK-7269 Check analysis failed in case in-sensitive") {
     Seq(1, 2, 3).map { i =>
       (i.toString, i.toString)
-    }.toDF("key", "value").registerTempTable("df_analysis")
+    }.toDF("key", "value").createOrReplaceTempView("df_analysis")
     sql("SELECT kEy from df_analysis group by key").collect()
     sql("SELECT kEy+3 from df_analysis group by key+3").collect()
     sql("SELECT kEy+3, a.kEy, A.kEy from df_analysis A group by key").collect()
@@ -1031,7 +1031,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
   test("SPARK-8588 HiveTypeCoercion.inConversion fires too early") {
     val df =
       createDataFrame(Seq((1, "2014-01-01"), (2, "2015-01-01"), (3, 
"2016-01-01")))
-    df.toDF("id", "datef").registerTempTable("test_SPARK8588")
+    df.toDF("id", "datef").createOrReplaceTempView("test_SPARK8588")
     checkAnswer(
       sql(
         """
@@ -1046,7 +1046,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
   test("SPARK-9371: fix the support for special chars in column names for hive 
context") {
     read.json(sparkContext.makeRDD(
       """{"a": {"c.b": 1}, "b.$q": [{"a@!.q": 1}], "q.w": {"w.i&": [1]}}""" :: 
Nil))
-      .registerTempTable("t")
+      .createOrReplaceTempView("t")
 
     checkAnswer(sql("SELECT a.`c.b`, `b.$q`[0].`a@!.q`, `q.w`.`w.i&`[0] FROM 
t"), Row(1, 1, 1))
   }
@@ -1125,7 +1125,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
     spark
       .range(5)
       .selectExpr("id AS a", "id AS b")
-      .registerTempTable("test")
+      .createOrReplaceTempView("test")
 
     checkAnswer(
       sql(
@@ -1143,7 +1143,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
     spark
       .range(5)
       .selectExpr("id AS a", "id AS b")
-      .registerTempTable("test")
+      .createOrReplaceTempView("test")
 
     val df = sql(
       """FROM test
@@ -1162,7 +1162,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
   test("SPARK-10741: Sort on Aggregate using parquet") {
     withTable("test10741") {
       withTempTable("src") {
-        Seq("a" -> 5, "a" -> 9, "b" -> 6).toDF().registerTempTable("src")
+        Seq("a" -> 5, "a" -> 9, "b" -> 6).toDF().createOrReplaceTempView("src")
         sql("CREATE TABLE test10741(c1 STRING, c2 INT) STORED AS PARQUET AS 
SELECT * FROM src")
       }
 
@@ -1374,7 +1374,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
     withTempTable("t1") {
       spark.range(10)
         .select(array($"id", $"id" + 1).as("arr"), $"id")
-        .registerTempTable("source")
+        .createOrReplaceTempView("source")
       withTable("dest1", "dest2") {
         sql("CREATE TABLE dest1 (i INT)")
         sql("CREATE TABLE dest2 (i INT)")
@@ -1407,7 +1407,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
       withTempTable("t1", "t2") {
         val path = dir.getCanonicalPath
         val ds = spark.range(10)
-        ds.registerTempTable("t1")
+        ds.createOrReplaceTempView("t1")
 
         sql(
           s"""CREATE TEMPORARY TABLE t2
@@ -1431,7 +1431,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
     "shouldn always be used together with PATH data source option"
   ) {
     withTempTable("t") {
-      spark.range(10).registerTempTable("t")
+      spark.range(10).createOrReplaceTempView("t")
 
       val message = intercept[IllegalArgumentException] {
         sql(

http://git-wip-us.apache.org/repos/asf/spark/blob/25b315e6/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala
index 4d284e1..47ceefb 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLWindowFunctionSuite.scala
@@ -40,7 +40,7 @@ class SQLWindowFunctionSuite extends QueryTest with 
SQLTestUtils with TestHiveSi
       WindowData(5, "c", 9),
       WindowData(6, "c", 10)
     )
-    sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+    sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
 
     checkAnswer(
       sql(
@@ -112,7 +112,7 @@ class SQLWindowFunctionSuite extends QueryTest with 
SQLTestUtils with TestHiveSi
       WindowData(5, "c", 9),
       WindowData(6, "c", 10)
     )
-    sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+    sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
 
     checkAnswer(
       sql(
@@ -139,7 +139,7 @@ class SQLWindowFunctionSuite extends QueryTest with 
SQLTestUtils with TestHiveSi
       WindowData(5, "c", 9),
       WindowData(6, "c", 10)
     )
-    sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+    sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
 
     checkAnswer(
       sql(
@@ -182,7 +182,7 @@ class SQLWindowFunctionSuite extends QueryTest with 
SQLTestUtils with TestHiveSi
       WindowData(5, "c", 9),
       WindowData(6, "c", 10)
     )
-    sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+    sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
 
     val e = intercept[AnalysisException] {
       sql(
@@ -203,7 +203,7 @@ class SQLWindowFunctionSuite extends QueryTest with 
SQLTestUtils with TestHiveSi
       WindowData(5, "c", 9),
       WindowData(6, "c", 10)
     )
-    sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+    sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
 
     checkAnswer(
       sql(
@@ -232,7 +232,7 @@ class SQLWindowFunctionSuite extends QueryTest with 
SQLTestUtils with TestHiveSi
       WindowData(5, "c", 9),
       WindowData(6, "c", 11)
     )
-    sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+    sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
 
     checkAnswer(
       sql("select month, product, sum(product + 1) over() from windowData 
order by area"),
@@ -301,7 +301,7 @@ class SQLWindowFunctionSuite extends QueryTest with 
SQLTestUtils with TestHiveSi
       WindowData(5, "c", 9),
       WindowData(6, "c", 11)
     )
-    sparkContext.parallelize(data).toDF().registerTempTable("windowData")
+    sparkContext.parallelize(data).toDF().createOrReplaceTempView("windowData")
 
     checkAnswer(
       sql(
@@ -322,7 +322,7 @@ class SQLWindowFunctionSuite extends QueryTest with 
SQLTestUtils with TestHiveSi
 
   test("window function: multiple window expressions in a single expression") {
     val nums = sparkContext.parallelize(1 to 10).map(x => (x, x % 
2)).toDF("x", "y")
-    nums.registerTempTable("nums")
+    nums.createOrReplaceTempView("nums")
 
     val expected =
       Row(1, 1, 1, 55, 1, 57) ::

http://git-wip-us.apache.org/repos/asf/spark/blob/25b315e6/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcPartitionDiscoverySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcPartitionDiscoverySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcPartitionDiscoverySuite.scala
index 6161412..fed0d11 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcPartitionDiscoverySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcPartitionDiscoverySuite.scala
@@ -90,7 +90,7 @@ class OrcPartitionDiscoverySuite extends QueryTest with 
TestHiveSingleton with B
           makePartitionDir(base, defaultPartitionName, "pi" -> pi, "ps" -> ps))
       }
 
-      read.orc(base.getCanonicalPath).registerTempTable("t")
+      read.orc(base.getCanonicalPath).createOrReplaceTempView("t")
 
       withTempTable("t") {
         checkAnswer(
@@ -137,7 +137,7 @@ class OrcPartitionDiscoverySuite extends QueryTest with 
TestHiveSingleton with B
           makePartitionDir(base, defaultPartitionName, "pi" -> pi, "ps" -> ps))
       }
 
-      read.orc(base.getCanonicalPath).registerTempTable("t")
+      read.orc(base.getCanonicalPath).createOrReplaceTempView("t")
 
       withTempTable("t") {
         checkAnswer(
@@ -189,7 +189,7 @@ class OrcPartitionDiscoverySuite extends QueryTest with 
TestHiveSingleton with B
       read
         .option(ConfVars.DEFAULTPARTITIONNAME.varname, defaultPartitionName)
         .orc(base.getCanonicalPath)
-        .registerTempTable("t")
+        .createOrReplaceTempView("t")
 
       withTempTable("t") {
         checkAnswer(
@@ -231,7 +231,7 @@ class OrcPartitionDiscoverySuite extends QueryTest with 
TestHiveSingleton with B
       read
         .option(ConfVars.DEFAULTPARTITIONNAME.varname, defaultPartitionName)
         .orc(base.getCanonicalPath)
-        .registerTempTable("t")
+        .createOrReplaceTempView("t")
 
       withTempTable("t") {
         checkAnswer(

http://git-wip-us.apache.org/repos/asf/spark/blob/25b315e6/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
index 9a08858..f83b3a3 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
@@ -98,7 +98,7 @@ class OrcQuerySuite extends QueryTest with BeforeAndAfterAll 
with OrcTest {
 
   test("Creating case class RDD table") {
     val data = (1 to 100).map(i => (i, s"val_$i"))
-    sparkContext.parallelize(data).toDF().registerTempTable("t")
+    sparkContext.parallelize(data).toDF().createOrReplaceTempView("t")
     withTempTable("t") {
       checkAnswer(sql("SELECT * FROM t"), data.toDF().collect())
     }
@@ -223,7 +223,7 @@ class OrcQuerySuite extends QueryTest with 
BeforeAndAfterAll with OrcTest {
 
   test("appending") {
     val data = (0 until 10).map(i => (i, i.toString))
-    createDataFrame(data).toDF("c1", "c2").registerTempTable("tmp")
+    createDataFrame(data).toDF("c1", "c2").createOrReplaceTempView("tmp")
     withOrcTable(data, "t") {
       sql("INSERT INTO TABLE t SELECT * FROM tmp")
       checkAnswer(table("t"), (data ++ data).map(Row.fromTuple))
@@ -233,7 +233,7 @@ class OrcQuerySuite extends QueryTest with 
BeforeAndAfterAll with OrcTest {
 
   test("overwriting") {
     val data = (0 until 10).map(i => (i, i.toString))
-    createDataFrame(data).toDF("c1", "c2").registerTempTable("tmp")
+    createDataFrame(data).toDF("c1", "c2").createOrReplaceTempView("tmp")
     withOrcTable(data, "t") {
       sql("INSERT OVERWRITE TABLE t SELECT * FROM tmp")
       checkAnswer(table("t"), data.map(Row.fromTuple))
@@ -324,7 +324,7 @@ class OrcQuerySuite extends QueryTest with 
BeforeAndAfterAll with OrcTest {
              """.stripMargin)
 
           val emptyDF = Seq.empty[(Int, String)].toDF("key", 
"value").coalesce(1)
-          emptyDF.registerTempTable("empty")
+          emptyDF.createOrReplaceTempView("empty")
 
           // This creates 1 empty ORC file with Hive ORC SerDe.  We are using 
this trick because
           // Spark SQL ORC data source always avoids write empty ORC files.
@@ -340,7 +340,7 @@ class OrcQuerySuite extends QueryTest with 
BeforeAndAfterAll with OrcTest {
           assert(errorMessage.contains("Unable to infer schema for ORC"))
 
           val singleRowDF = Seq((0, "foo")).toDF("key", "value").coalesce(1)
-          singleRowDF.registerTempTable("single")
+          singleRowDF.createOrReplaceTempView("single")
 
           spark.sql(
             s"""INSERT INTO TABLE empty_orc
@@ -422,7 +422,7 @@ class OrcQuerySuite extends QueryTest with 
BeforeAndAfterAll with OrcTest {
                """.stripMargin)
 
             val singleRowDF = Seq((0, "foo")).toDF("key", "value").coalesce(1)
-            singleRowDF.registerTempTable("single")
+            singleRowDF.createOrReplaceTempView("single")
 
             spark.sql(
               s"""INSERT INTO TABLE dummy_orc

http://git-wip-us.apache.org/repos/asf/spark/blob/25b315e6/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
index 96a7364..6081d86 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala
@@ -51,7 +51,7 @@ abstract class OrcSuite extends QueryTest with 
TestHiveSingleton with BeforeAndA
       .makeRDD(1 to 10)
       .map(i => OrcData(i, s"part-$i"))
       .toDF()
-      .registerTempTable(s"orc_temp_table")
+      .createOrReplaceTempView(s"orc_temp_table")
 
     sql(
       s"""CREATE EXTERNAL TABLE normal_orc(

http://git-wip-us.apache.org/repos/asf/spark/blob/25b315e6/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
index f52c6e4..7fe158c 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/parquetSuites.scala
@@ -171,8 +171,9 @@ class ParquetMetastoreSuite extends ParquetPartitioningTest 
{
       sql(s"ALTER TABLE partitioned_parquet_with_complextypes ADD PARTITION 
(p=$p)")
     }
 
-    (1 to 10).map(i => (i, s"str$i")).toDF("a", "b").registerTempTable("jt")
-    (1 to 10).map(i => Tuple1(Seq(new Integer(i), 
null))).toDF("a").registerTempTable("jt_array")
+    (1 to 10).map(i => (i, s"str$i")).toDF("a", 
"b").createOrReplaceTempView("jt")
+    (1 to 10).map(i => Tuple1(Seq(new Integer(i), null))).toDF("a")
+      .createOrReplaceTempView("jt_array")
 
     setConf(HiveUtils.CONVERT_METASTORE_PARQUET, true)
   }
@@ -541,8 +542,8 @@ class ParquetMetastoreSuite extends ParquetPartitioningTest 
{
             |STORED AS PARQUET
           """.stripMargin)
 
-        // Temp table to insert data into partitioned table
-        Seq("foo", "bar").toDF("a").registerTempTable("test_temp")
+        // Temp view that is used to insert data into partitioned table
+        Seq("foo", "bar").toDF("a").createOrReplaceTempView("test_temp")
         sql("INSERT INTO test_added_partitions PARTITION(b='0') SELECT a FROM 
test_temp")
 
         checkAnswer(

http://git-wip-us.apache.org/repos/asf/spark/blob/25b315e6/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
index 78d2dc2..a3183f2 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
@@ -91,7 +91,7 @@ abstract class HadoopFsRelationTest extends QueryTest with 
SQLTestUtils with Tes
         yield Row(s"val_$i", s"val_$i", s"val_$i", s"val_$i", 1, 1, 1, 1))
 
     // Self-join
-    df.registerTempTable("t")
+    df.createOrReplaceTempView("t")
     withTempTable("t") {
       checkAnswer(
         sql(
@@ -337,7 +337,7 @@ abstract class HadoopFsRelationTest extends QueryTest with 
SQLTestUtils with Tes
   }
 
   test("saveAsTable()/load() - non-partitioned table - ErrorIfExists") {
-    Seq.empty[(Int, String)].toDF().registerTempTable("t")
+    Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
 
     withTempTable("t") {
       intercept[AnalysisException] {
@@ -347,7 +347,7 @@ abstract class HadoopFsRelationTest extends QueryTest with 
SQLTestUtils with Tes
   }
 
   test("saveAsTable()/load() - non-partitioned table - Ignore") {
-    Seq.empty[(Int, String)].toDF().registerTempTable("t")
+    Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
 
     withTempTable("t") {
       
testDF.write.format(dataSourceName).mode(SaveMode.Ignore).saveAsTable("t")
@@ -459,7 +459,7 @@ abstract class HadoopFsRelationTest extends QueryTest with 
SQLTestUtils with Tes
   }
 
   test("saveAsTable()/load() - partitioned table - ErrorIfExists") {
-    Seq.empty[(Int, String)].toDF().registerTempTable("t")
+    Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
 
     withTempTable("t") {
       intercept[AnalysisException] {
@@ -474,7 +474,7 @@ abstract class HadoopFsRelationTest extends QueryTest with 
SQLTestUtils with Tes
   }
 
   test("saveAsTable()/load() - partitioned table - Ignore") {
-    Seq.empty[(Int, String)].toDF().registerTempTable("t")
+    Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t")
 
     withTempTable("t") {
       partitionedTestDF.write

http://git-wip-us.apache.org/repos/asf/spark/blob/25b315e6/sql/hivecontext-compatibility/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hivecontext-compatibility/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala
 
b/sql/hivecontext-compatibility/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala
index 5df674d..1c1db72 100644
--- 
a/sql/hivecontext-compatibility/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala
+++ 
b/sql/hivecontext-compatibility/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala
@@ -66,7 +66,7 @@ class HiveContextCompatibilitySuite extends SparkFunSuite 
with BeforeAndAfterEac
     val res = df3.collect()
     val expected = Seq((18, 18, 8)).toDF("a", "x", "b").collect()
     assert(res.toSeq == expected.toSeq)
-    df3.registerTempTable("mai_table")
+    df3.createOrReplaceTempView("mai_table")
     val df4 = hc.table("mai_table")
     val res2 = df4.collect()
     assert(res2.toSeq == expected.toSeq)
@@ -82,7 +82,7 @@ class HiveContextCompatibilitySuite extends SparkFunSuite 
with BeforeAndAfterEac
     val databases2 = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
     assert(databases2.toSet == Set("default", "mee_db"))
     val df = (1 to 10).map { i => ("bob" + i.toString, i) }.toDF("name", "age")
-    df.registerTempTable("mee_table")
+    df.createOrReplaceTempView("mee_table")
     hc.sql("CREATE TABLE moo_table (name string, age int)")
     hc.sql("INSERT INTO moo_table SELECT * FROM mee_table")
     assert(


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to