Repository: spark
Updated Branches:
  refs/heads/branch-2.0 b148b0364 -> 1f3b5a5ac


[SPARK-15925][SQL][SPARKR] Replaces registerTempTable with 
createOrReplaceTempView

## What changes were proposed in this pull request?

This PR replaces `registerTempTable` with `createOrReplaceTempView` as a 
follow-up task of #12945.

## How was this patch tested?

Existing SparkR tests.

Author: Cheng Lian <l...@databricks.com>

Closes #13644 from liancheng/spark-15925-temp-view-for-r.

(cherry picked from commit ced8d669b359d6465c3bf476af0f68cc4db04a25)
Signed-off-by: Shivaram Venkataraman <shiva...@cs.berkeley.edu>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/1f3b5a5a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/1f3b5a5a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/1f3b5a5a

Branch: refs/heads/branch-2.0
Commit: 1f3b5a5ac061c0420f30bb1a696f6445aa64b566
Parents: b148b03
Author: Cheng Lian <l...@databricks.com>
Authored: Mon Jun 13 15:46:50 2016 -0700
Committer: Shivaram Venkataraman <shiva...@cs.berkeley.edu>
Committed: Mon Jun 13 15:46:58 2016 -0700

----------------------------------------------------------------------
 R/pkg/NAMESPACE                           |  2 +-
 R/pkg/R/DataFrame.R                       | 23 ++++++++++++-----------
 R/pkg/R/SQLContext.R                      | 10 +++++-----
 R/pkg/R/generics.R                        |  7 +++++--
 R/pkg/inst/tests/testthat/test_sparkSQL.R | 15 ++++++++-------
 5 files changed, 31 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/1f3b5a5a/R/pkg/NAMESPACE
----------------------------------------------------------------------
diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE
index ba386da..a8cf53f 100644
--- a/R/pkg/NAMESPACE
+++ b/R/pkg/NAMESPACE
@@ -45,6 +45,7 @@ exportMethods("arrange",
               "corr",
               "covar_samp",
               "covar_pop",
+              "createOrReplaceTempView",
               "crosstab",
               "dapply",
               "dapplyCollect",
@@ -80,7 +81,6 @@ exportMethods("arrange",
               "persist",
               "printSchema",
               "rbind",
-              "registerTempTable",
               "rename",
               "repartition",
               "sample",

http://git-wip-us.apache.org/repos/asf/spark/blob/1f3b5a5a/R/pkg/R/DataFrame.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/DataFrame.R b/R/pkg/R/DataFrame.R
index 30a5675..0ff350d 100644
--- a/R/pkg/R/DataFrame.R
+++ b/R/pkg/R/DataFrame.R
@@ -428,16 +428,17 @@ setMethod("coltypes<-",
             dataFrame(nx@sdf)
           })
 
-#' Register Temporary Table
+#' Creates a temporary view using the given name.
 #'
-#' Registers a SparkDataFrame as a Temporary Table in the SQLContext
+#' Creates a new temporary view using a SparkDataFrame in the SQLContext. If a
+#' temporary view with the same name already exists, replaces it.
 #'
 #' @param x A SparkDataFrame
-#' @param tableName A character vector containing the name of the table
+#' @param viewName A character vector containing the name of the table
 #'
 #' @family SparkDataFrame functions
-#' @rdname registerTempTable
-#' @name registerTempTable
+#' @rdname createOrReplaceTempView
+#' @name createOrReplaceTempView
 #' @export
 #' @examples
 #'\dontrun{
@@ -445,13 +446,13 @@ setMethod("coltypes<-",
 #' sqlContext <- sparkRSQL.init(sc)
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
-#' registerTempTable(df, "json_df")
+#' createOrReplaceTempView(df, "json_df")
 #' new_df <- sql("SELECT * FROM json_df")
 #'}
-setMethod("registerTempTable",
-          signature(x = "SparkDataFrame", tableName = "character"),
-          function(x, tableName) {
-              invisible(callJMethod(x@sdf, "registerTempTable", tableName))
+setMethod("createOrReplaceTempView",
+          signature(x = "SparkDataFrame", viewName = "character"),
+          function(x, viewName) {
+              invisible(callJMethod(x@sdf, "createOrReplaceTempView", 
viewName))
           })
 
 #' insertInto
@@ -473,7 +474,7 @@ setMethod("registerTempTable",
 #' sqlContext <- sparkRSQL.init(sc)
 #' df <- read.df(path, "parquet")
 #' df2 <- read.df(path2, "parquet")
-#' registerTempTable(df, "table1")
+#' createOrReplaceTempView(df, "table1")
 #' insertInto(df2, "table1", overwrite = TRUE)
 #'}
 setMethod("insertInto",

http://git-wip-us.apache.org/repos/asf/spark/blob/1f3b5a5a/R/pkg/R/SQLContext.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/SQLContext.R b/R/pkg/R/SQLContext.R
index e7e9e35..914b02a 100644
--- a/R/pkg/R/SQLContext.R
+++ b/R/pkg/R/SQLContext.R
@@ -411,7 +411,7 @@ read.text <- function(x, ...) {
 #' sqlContext <- sparkRSQL.init(sc)
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
-#' registerTempTable(df, "table")
+#' createOrReplaceTempView(df, "table")
 #' new_df <- sql("SELECT * FROM table")
 #' }
 #' @name sql
@@ -443,7 +443,7 @@ sql <- function(x, ...) {
 #' sqlContext <- sparkRSQL.init(sc)
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
-#' registerTempTable(df, "table")
+#' createOrReplaceTempView(df, "table")
 #' new_df <- tableToDF("table")
 #' }
 #' @note since 2.0.0
@@ -529,7 +529,7 @@ tableNames <- function(x, ...) {
 #' sqlContext <- sparkRSQL.init(sc)
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
-#' registerTempTable(df, "table")
+#' createOrReplaceTempView(df, "table")
 #' cacheTable("table")
 #' }
 #' @name cacheTable
@@ -558,7 +558,7 @@ cacheTable <- function(x, ...) {
 #' sqlContext <- sparkRSQL.init(sc)
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
-#' registerTempTable(df, "table")
+#' createOrReplaceTempView(df, "table")
 #' uncacheTable("table")
 #' }
 #' @name uncacheTable
@@ -608,7 +608,7 @@ clearCache <- function() {
 #' sc <- sparkR.init()
 #' sqlContext <- sparkRSQL.init(sc)
 #' df <- read.df(path, "parquet")
-#' registerTempTable(df, "table")
+#' createOrReplaceTempView(df, "table")
 #' dropTempTable("table")
 #' }
 #' @name dropTempTable

http://git-wip-us.apache.org/repos/asf/spark/blob/1f3b5a5a/R/pkg/R/generics.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/generics.R b/R/pkg/R/generics.R
index f0cde56..50fc204 100644
--- a/R/pkg/R/generics.R
+++ b/R/pkg/R/generics.R
@@ -547,9 +547,12 @@ setGeneric("printSchema", function(x) { 
standardGeneric("printSchema") })
 #' @export
 setGeneric("rename", function(x, ...) { standardGeneric("rename") })
 
-#' @rdname registerTempTable
+#' @rdname createOrReplaceTempView
 #' @export
-setGeneric("registerTempTable", function(x, tableName) { 
standardGeneric("registerTempTable") })
+setGeneric("createOrReplaceTempView",
+           function(x, viewName) {
+             standardGeneric("createOrReplaceTempView")
+           })
 
 #' @rdname sample
 #' @export

http://git-wip-us.apache.org/repos/asf/spark/blob/1f3b5a5a/R/pkg/inst/tests/testthat/test_sparkSQL.R
----------------------------------------------------------------------
diff --git a/R/pkg/inst/tests/testthat/test_sparkSQL.R 
b/R/pkg/inst/tests/testthat/test_sparkSQL.R
index 375cb6f..d1ca3b7 100644
--- a/R/pkg/inst/tests/testthat/test_sparkSQL.R
+++ b/R/pkg/inst/tests/testthat/test_sparkSQL.R
@@ -445,7 +445,7 @@ test_that("jsonRDD() on a RDD with json string", {
 
 test_that("test cache, uncache and clearCache", {
   df <- read.json(jsonPath)
-  registerTempTable(df, "table1")
+  createOrReplaceTempView(df, "table1")
   cacheTable("table1")
   uncacheTable("table1")
   clearCache()
@@ -454,16 +454,17 @@ test_that("test cache, uncache and clearCache", {
 
 test_that("test tableNames and tables", {
   df <- read.json(jsonPath)
-  registerTempTable(df, "table1")
+  createOrReplaceTempView(df, "table1")
   expect_equal(length(tableNames()), 1)
   df <- tables()
   expect_equal(count(df), 1)
   dropTempTable("table1")
 })
 
-test_that("registerTempTable() results in a queryable table and sql() results 
in a new DataFrame", {
+test_that(
+  "createOrReplaceTempView() results in a queryable table and sql() results in 
a new DataFrame", {
   df <- read.json(jsonPath)
-  registerTempTable(df, "table1")
+  createOrReplaceTempView(df, "table1")
   newdf <- sql("SELECT * FROM table1 where name = 'Michael'")
   expect_is(newdf, "SparkDataFrame")
   expect_equal(count(newdf), 1)
@@ -484,13 +485,13 @@ test_that("insertInto() on a registered table", {
   write.df(df2, parquetPath2, "parquet", "overwrite")
   dfParquet2 <- read.df(parquetPath2, "parquet")
 
-  registerTempTable(dfParquet, "table1")
+  createOrReplaceTempView(dfParquet, "table1")
   insertInto(dfParquet2, "table1")
   expect_equal(count(sql("select * from table1")), 5)
   expect_equal(first(sql("select * from table1 order by age"))$name, "Michael")
   dropTempTable("table1")
 
-  registerTempTable(dfParquet, "table1")
+  createOrReplaceTempView(dfParquet, "table1")
   insertInto(dfParquet2, "table1", overwrite = TRUE)
   expect_equal(count(sql("select * from table1")), 2)
   expect_equal(first(sql("select * from table1 order by age"))$name, "Bob")
@@ -502,7 +503,7 @@ test_that("insertInto() on a registered table", {
 
 test_that("tableToDF() returns a new DataFrame", {
   df <- read.json(jsonPath)
-  registerTempTable(df, "table1")
+  createOrReplaceTempView(df, "table1")
   tabledf <- tableToDF("table1")
   expect_is(tabledf, "SparkDataFrame")
   expect_equal(count(tabledf), 3)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to