[2/3] spark git commit: [SPARK-15171][SQL] Remove the references to deprecated method dataset.registerTempTable

2016-05-17 Thread lian
http://git-wip-us.apache.org/repos/asf/spark/blob/5f5270ea/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
--
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
index d2e1ea1..2a5295d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
@@ -78,7 +78,7 @@ class PlannerSuite extends SharedSQLContext {
 val schema = StructType(fields)
 val row = Row.fromSeq(Seq.fill(fields.size)(null))
 val rowRDD = sparkContext.parallelize(row :: Nil)
-spark.createDataFrame(rowRDD, schema).registerTempTable("testLimit")
+spark.createDataFrame(rowRDD, 
schema).createOrReplaceTempView("testLimit")
 
 val planned = sql(
   """
@@ -132,7 +132,7 @@ class PlannerSuite extends SharedSQLContext {
   test("InMemoryRelation statistics propagation") {
 withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "81920") {
   withTempTable("tiny") {
-testData.limit(3).registerTempTable("tiny")
+testData.limit(3).createOrReplaceTempView("tiny")
 sql("CACHE TABLE tiny")
 
 val a = testData.as("a")
@@ -199,9 +199,9 @@ class PlannerSuite extends SharedSQLContext {
 
   test("PartitioningCollection") {
 withTempTable("normal", "small", "tiny") {
-  testData.registerTempTable("normal")
-  testData.limit(10).registerTempTable("small")
-  testData.limit(3).registerTempTable("tiny")
+  testData.createOrReplaceTempView("normal")
+  testData.limit(10).createOrReplaceTempView("small")
+  testData.limit(3).createOrReplaceTempView("tiny")
 
   // Disable broadcast join
   withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1") {

http://git-wip-us.apache.org/repos/asf/spark/blob/5f5270ea/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/AggregateBenchmark.scala
--
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/AggregateBenchmark.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/AggregateBenchmark.scala
index b31338e..bf3a39c 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/AggregateBenchmark.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/AggregateBenchmark.scala
@@ -134,7 +134,8 @@ class AggregateBenchmark extends BenchmarkBase {
 val N = 20 << 22
 
 val benchmark = new Benchmark("Aggregate w keys", N)
-sparkSession.range(N).selectExpr("id", "floor(rand() * 1) as 
k").registerTempTable("test")
+sparkSession.range(N).selectExpr("id", "floor(rand() * 1) as k")
+  .createOrReplaceTempView("test")
 
 def f(): Unit = sparkSession.sql("select k, k, sum(id) from test group by 
k, k").collect()
 

http://git-wip-us.apache.org/repos/asf/spark/blob/5f5270ea/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
--
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
index 2099d4e..e2fb913 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
@@ -42,7 +42,7 @@ class InMemoryColumnarQuerySuite extends QueryTest with 
SharedSQLContext {
   test("default size avoids broadcast") {
 // TODO: Improve this test when we have better statistics
 sparkContext.parallelize(1 to 10).map(i => TestData(i, i.toString))
-  .toDF().registerTempTable("sizeTst")
+  .toDF().createOrReplaceTempView("sizeTst")
 spark.catalog.cacheTable("sizeTst")
 assert(
   spark.table("sizeTst").queryExecution.analyzed.statistics.sizeInBytes >
@@ -92,7 +92,7 @@ class InMemoryColumnarQuerySuite extends QueryTest with 
SharedSQLContext {
 
   test("SPARK-2729 regression: timestamp data type") {
 val timestamps = (0 to 3).map(i => Tuple1(new Timestamp(i))).toDF("time")
-timestamps.registerTempTable("timestamps")
+timestamps.createOrReplaceTempView("timestamps")
 
 checkAnswer(
   sql("SELECT time FROM timestamps"),
@@ -133,7 +133,7 @@ class InMemoryColumnarQuerySuite extends QueryTest with 
SharedSQLContext {
 
 assert(df.schema.head.dataType === DecimalType(15, 10))
 
-df.cache().registerTempTable("test_fixed_decimal")
+df.cache().createOrReplaceTempView("test_fixed_decimal")
 checkAnswer(
   sql("SELECT * FROM test_fixed_decimal"),

[2/3] spark git commit: [SPARK-15171][SQL] Remove the references to deprecated method dataset.registerTempTable

2016-05-17 Thread lian
http://git-wip-us.apache.org/repos/asf/spark/blob/25b315e6/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
--
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
index d2e1ea1..2a5295d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
@@ -78,7 +78,7 @@ class PlannerSuite extends SharedSQLContext {
 val schema = StructType(fields)
 val row = Row.fromSeq(Seq.fill(fields.size)(null))
 val rowRDD = sparkContext.parallelize(row :: Nil)
-spark.createDataFrame(rowRDD, schema).registerTempTable("testLimit")
+spark.createDataFrame(rowRDD, 
schema).createOrReplaceTempView("testLimit")
 
 val planned = sql(
   """
@@ -132,7 +132,7 @@ class PlannerSuite extends SharedSQLContext {
   test("InMemoryRelation statistics propagation") {
 withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "81920") {
   withTempTable("tiny") {
-testData.limit(3).registerTempTable("tiny")
+testData.limit(3).createOrReplaceTempView("tiny")
 sql("CACHE TABLE tiny")
 
 val a = testData.as("a")
@@ -199,9 +199,9 @@ class PlannerSuite extends SharedSQLContext {
 
   test("PartitioningCollection") {
 withTempTable("normal", "small", "tiny") {
-  testData.registerTempTable("normal")
-  testData.limit(10).registerTempTable("small")
-  testData.limit(3).registerTempTable("tiny")
+  testData.createOrReplaceTempView("normal")
+  testData.limit(10).createOrReplaceTempView("small")
+  testData.limit(3).createOrReplaceTempView("tiny")
 
   // Disable broadcast join
   withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1") {

http://git-wip-us.apache.org/repos/asf/spark/blob/25b315e6/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/AggregateBenchmark.scala
--
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/AggregateBenchmark.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/AggregateBenchmark.scala
index b31338e..bf3a39c 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/AggregateBenchmark.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/AggregateBenchmark.scala
@@ -134,7 +134,8 @@ class AggregateBenchmark extends BenchmarkBase {
 val N = 20 << 22
 
 val benchmark = new Benchmark("Aggregate w keys", N)
-sparkSession.range(N).selectExpr("id", "floor(rand() * 1) as 
k").registerTempTable("test")
+sparkSession.range(N).selectExpr("id", "floor(rand() * 1) as k")
+  .createOrReplaceTempView("test")
 
 def f(): Unit = sparkSession.sql("select k, k, sum(id) from test group by 
k, k").collect()
 

http://git-wip-us.apache.org/repos/asf/spark/blob/25b315e6/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
--
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
index 2099d4e..e2fb913 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala
@@ -42,7 +42,7 @@ class InMemoryColumnarQuerySuite extends QueryTest with 
SharedSQLContext {
   test("default size avoids broadcast") {
 // TODO: Improve this test when we have better statistics
 sparkContext.parallelize(1 to 10).map(i => TestData(i, i.toString))
-  .toDF().registerTempTable("sizeTst")
+  .toDF().createOrReplaceTempView("sizeTst")
 spark.catalog.cacheTable("sizeTst")
 assert(
   spark.table("sizeTst").queryExecution.analyzed.statistics.sizeInBytes >
@@ -92,7 +92,7 @@ class InMemoryColumnarQuerySuite extends QueryTest with 
SharedSQLContext {
 
   test("SPARK-2729 regression: timestamp data type") {
 val timestamps = (0 to 3).map(i => Tuple1(new Timestamp(i))).toDF("time")
-timestamps.registerTempTable("timestamps")
+timestamps.createOrReplaceTempView("timestamps")
 
 checkAnswer(
   sql("SELECT time FROM timestamps"),
@@ -133,7 +133,7 @@ class InMemoryColumnarQuerySuite extends QueryTest with 
SharedSQLContext {
 
 assert(df.schema.head.dataType === DecimalType(15, 10))
 
-df.cache().registerTempTable("test_fixed_decimal")
+df.cache().createOrReplaceTempView("test_fixed_decimal")
 checkAnswer(
   sql("SELECT * FROM test_fixed_decimal"),