Repository: spark
Updated Branches:
  refs/heads/master c1b4ce432 -> 864de3bf4


[SPARK-10079] [SPARKR] Make 'column' and 'col' functions be S4 functions.

1.  Add a "col" function into DataFrame.
2.  Move the current "col" function in Column.R to functions.R, convert it to 
S4 function.
3.  Add a s4 "column" function in functions.R.
4.  Convert the "column" function in Column.R to S4 function. This is for 
private use.

Author: Sun Rui <rui....@intel.com>

Closes #8864 from sun-rui/SPARK-10079.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/864de3bf
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/864de3bf
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/864de3bf

Branch: refs/heads/master
Commit: 864de3bf4041c829e95d278b9569e91448bab0cc
Parents: c1b4ce4
Author: Sun Rui <rui....@intel.com>
Authored: Fri Oct 9 23:05:38 2015 -0700
Committer: Shivaram Venkataraman <shiva...@cs.berkeley.edu>
Committed: Fri Oct 9 23:05:38 2015 -0700

----------------------------------------------------------------------
 R/pkg/NAMESPACE                  |  1 +
 R/pkg/R/column.R                 | 12 +++++-------
 R/pkg/R/functions.R              | 22 ++++++++++++++++++++++
 R/pkg/R/generics.R               |  4 ++++
 R/pkg/inst/tests/test_sparkSQL.R |  4 ++--
 5 files changed, 34 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/864de3bf/R/pkg/NAMESPACE
----------------------------------------------------------------------
diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE
index 255be2e..95d949e 100644
--- a/R/pkg/NAMESPACE
+++ b/R/pkg/NAMESPACE
@@ -107,6 +107,7 @@ exportMethods("%in%",
               "cbrt",
               "ceil",
               "ceiling",
+              "column",
               "concat",
               "concat_ws",
               "contains",

http://git-wip-us.apache.org/repos/asf/spark/blob/864de3bf/R/pkg/R/column.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/column.R b/R/pkg/R/column.R
index 42e9d12..20de390 100644
--- a/R/pkg/R/column.R
+++ b/R/pkg/R/column.R
@@ -36,13 +36,11 @@ setMethod("initialize", "Column", function(.Object, jc) {
   .Object
 })
 
-column <- function(jc) {
-  new("Column", jc)
-}
-
-col <- function(x) {
-  column(callJStatic("org.apache.spark.sql.functions", "col", x))
-}
+setMethod("column",
+          signature(x = "jobj"),
+          function(x) {
+            new("Column", x)
+          })
 
 #' @rdname show
 #' @name show

http://git-wip-us.apache.org/repos/asf/spark/blob/864de3bf/R/pkg/R/functions.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/functions.R b/R/pkg/R/functions.R
index 94687ed..a220ad8 100644
--- a/R/pkg/R/functions.R
+++ b/R/pkg/R/functions.R
@@ -233,6 +233,28 @@ setMethod("ceil",
             column(jc)
           })
 
+#' Though scala functions has "col" function, we don't expose it in SparkR
+#' because we don't want to conflict with the "col" function in the R base
+#' package and we also have "column" function exported which is an alias of 
"col".
+col <- function(x) {
+  column(callJStatic("org.apache.spark.sql.functions", "col", x))
+}
+
+#' column
+#'
+#' Returns a Column based on the given column name.
+#'
+#' @rdname col
+#' @name column
+#' @family normal_funcs
+#' @export
+#' @examples \dontrun{column(df)}
+setMethod("column",
+          signature(x = "character"),
+          function(x) {
+            col(x)
+          })
+
 #' cos
 #'
 #' Computes the cosine of the given value.

http://git-wip-us.apache.org/repos/asf/spark/blob/864de3bf/R/pkg/R/generics.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/generics.R b/R/pkg/R/generics.R
index c447413..8fad170 100644
--- a/R/pkg/R/generics.R
+++ b/R/pkg/R/generics.R
@@ -686,6 +686,10 @@ setGeneric("cbrt", function(x) { standardGeneric("cbrt") })
 #' @export
 setGeneric("ceil", function(x) { standardGeneric("ceil") })
 
+#' @rdname col
+#' @export
+setGeneric("column", function(x) { standardGeneric("column") })
+
 #' @rdname concat
 #' @export
 setGeneric("concat", function(x, ...) { standardGeneric("concat") })

http://git-wip-us.apache.org/repos/asf/spark/blob/864de3bf/R/pkg/inst/tests/test_sparkSQL.R
----------------------------------------------------------------------
diff --git a/R/pkg/inst/tests/test_sparkSQL.R b/R/pkg/inst/tests/test_sparkSQL.R
index 4804ecf..3a04edb 100644
--- a/R/pkg/inst/tests/test_sparkSQL.R
+++ b/R/pkg/inst/tests/test_sparkSQL.R
@@ -787,7 +787,7 @@ test_that("test HiveContext", {
 })
 
 test_that("column operators", {
-  c <- SparkR:::col("a")
+  c <- column("a")
   c2 <- (- c + 1 - 2) * 3 / 4.0
   c3 <- (c + c2 - c2) * c2 %% c2
   c4 <- (c > c2) & (c2 <= c3) | (c == c2) & (c2 != c3)
@@ -795,7 +795,7 @@ test_that("column operators", {
 })
 
 test_that("column functions", {
-  c <- SparkR:::col("a")
+  c <- column("a")
   c1 <- abs(c) + acos(c) + approxCountDistinct(c) + ascii(c) + asin(c) + 
atan(c)
   c2 <- avg(c) + base64(c) + bin(c) + bitwiseNOT(c) + cbrt(c) + ceil(c) + 
cos(c)
   c3 <- cosh(c) + count(c) + crc32(c) + exp(c)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to