spark git commit: [SPARK-10118] [SPARKR] [DOCS] Improve SparkR API docs for 1.5 release

2015-08-24 Thread shivaram
Repository: spark
Updated Branches:
  refs/heads/master 2bf338c62 - 6511bf559


[SPARK-10118] [SPARKR] [DOCS] Improve SparkR API docs for 1.5 release

cc: shivaram

## Summary

- Modify `tdname` of expression functions. i.e. `ascii`: `rdname functions` = 
`rdname ascii`
- Replace the dynamical function definitions to the static ones because of thir 
documentations.

## Generated PDF File
https://drive.google.com/file/d/0B9biIZIU47lLX2t6ZjRoRnBTSEU/view?usp=sharing

## JIRA
[[SPARK-10118] Improve SparkR API docs for 1.5 release - ASF 
JIRA](https://issues.apache.org/jira/browse/SPARK-10118)

Author: Yu ISHIKAWA yuu.ishik...@gmail.com
Author: Yuu ISHIKAWA yuu.ishik...@gmail.com

Closes #8386 from yu-iskw/SPARK-10118.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6511bf55
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6511bf55
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6511bf55

Branch: refs/heads/master
Commit: 6511bf559b736d8e23ae398901c8d78938e66869
Parents: 2bf338c
Author: Yu ISHIKAWA yuu.ishik...@gmail.com
Authored: Mon Aug 24 18:17:51 2015 -0700
Committer: Shivaram Venkataraman shiva...@cs.berkeley.edu
Committed: Mon Aug 24 18:17:51 2015 -0700

--
 R/create-docs.sh|2 +-
 R/pkg/R/column.R|5 +-
 R/pkg/R/functions.R | 1603 ++
 R/pkg/R/generics.R  |  214 +++
 4 files changed, 1596 insertions(+), 228 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/6511bf55/R/create-docs.sh
--
diff --git a/R/create-docs.sh b/R/create-docs.sh
index 6a4687b..d2ae160 100755
--- a/R/create-docs.sh
+++ b/R/create-docs.sh
@@ -39,7 +39,7 @@ pushd $FWDIR
 mkdir -p pkg/html
 pushd pkg/html
 
-Rscript -e 'library(SparkR, lib.loc=../../lib); library(knitr); 
knit_rd(SparkR)'
+Rscript -e 'libDir - ../../lib; library(SparkR, lib.loc=libDir); 
library(knitr); knit_rd(SparkR, links = tools::findHTMLlinks(paste(libDir, 
SparkR, sep=/)))'
 
 popd
 

http://git-wip-us.apache.org/repos/asf/spark/blob/6511bf55/R/pkg/R/column.R
--
diff --git a/R/pkg/R/column.R b/R/pkg/R/column.R
index 5a07ebd..a1f50c3 100644
--- a/R/pkg/R/column.R
+++ b/R/pkg/R/column.R
@@ -169,8 +169,7 @@ setMethod(between, signature(x = Column),
 #'
 #' @rdname column
 #'
-#' @examples
-#' \dontrun{
+#' @examples \dontrun{
 #'   cast(df$age, string)
 #'   cast(df$name, list(type=array, elementType=byte, containsNull = TRUE))
 #' }
@@ -192,7 +191,7 @@ setMethod(cast,
 #'
 #' @rdname column
 #' @return a matched values as a result of comparing with given values.
-#' \dontrun{
+#' @examples \dontrun{
 #'   filter(df, age in (10, 30))
 #'   where(df, df$age %in% c(10, 30))
 #' }

http://git-wip-us.apache.org/repos/asf/spark/blob/6511bf55/R/pkg/R/functions.R
--
diff --git a/R/pkg/R/functions.R b/R/pkg/R/functions.R
index b5879bd..d848730 100644
--- a/R/pkg/R/functions.R
+++ b/R/pkg/R/functions.R
@@ -18,69 +18,1298 @@
 #' @include generics.R column.R
 NULL
 
-#' @title S4 expression functions for DataFrame column(s)
-#' @description These are expression functions on DataFrame columns
-
-functions1 - c(
-  abs, acos, approxCountDistinct, ascii, asin, atan,
-  avg, base64, bin, bitwiseNOT, cbrt, ceil, cos, cosh, count,
-  crc32, dayofmonth, dayofyear, exp, explode, expm1, factorial,
-  first, floor, hex, hour, initcap, isNaN, last, last_day,
-  length, log, log10, log1p, log2, lower, ltrim, max, md5,
-  mean, min, minute, month, negate, quarter, reverse,
-  rint, round, rtrim, second, sha1, signum, sin, sinh, size,
-  soundex, sqrt, sum, sumDistinct, tan, tanh, toDegrees,
-  toRadians, to_date, trim, unbase64, unhex, upper, weekofyear,
-  year)
-functions2 - c(
-  atan2, datediff, hypot, levenshtein, months_between, nanvl, 
pmod)
-
-createFunction1 - function(name) {
-  setMethod(name,
-signature(x = Column),
-function(x) {
-  jc - callJStatic(org.apache.spark.sql.functions, name, x@jc)
-  column(jc)
-})
-}
-
-createFunction2 - function(name) {
-  setMethod(name,
-signature(y = Column),
-function(y, x) {
-  if (class(x) == Column) {
-x - x@jc
-  }
-  jc - callJStatic(org.apache.spark.sql.functions, name, y@jc, 
x)
-  column(jc)
-})
-}
+#' Creates a \code{Column} of literal value.
+#'
+#' The passed in object is returned directly if it is already a 
\linkS4class{Column}.
+#' If the object is a Scala Symbol, it is converted into a 
\linkS4class{Column} also.
+#' Otherwise, a new \linkS4class{Column} is created to 

spark git commit: [SPARK-10118] [SPARKR] [DOCS] Improve SparkR API docs for 1.5 release

2015-08-24 Thread shivaram
Repository: spark
Updated Branches:
  refs/heads/branch-1.5 228e429eb - ec5d09c0f


[SPARK-10118] [SPARKR] [DOCS] Improve SparkR API docs for 1.5 release

cc: shivaram

## Summary

- Modify `tdname` of expression functions. i.e. `ascii`: `rdname functions` = 
`rdname ascii`
- Replace the dynamical function definitions to the static ones because of thir 
documentations.

## Generated PDF File
https://drive.google.com/file/d/0B9biIZIU47lLX2t6ZjRoRnBTSEU/view?usp=sharing

## JIRA
[[SPARK-10118] Improve SparkR API docs for 1.5 release - ASF 
JIRA](https://issues.apache.org/jira/browse/SPARK-10118)

Author: Yu ISHIKAWA yuu.ishik...@gmail.com
Author: Yuu ISHIKAWA yuu.ishik...@gmail.com

Closes #8386 from yu-iskw/SPARK-10118.

(cherry picked from commit 6511bf559b736d8e23ae398901c8d78938e66869)
Signed-off-by: Shivaram Venkataraman shiva...@cs.berkeley.edu


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ec5d09c0
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ec5d09c0
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ec5d09c0

Branch: refs/heads/branch-1.5
Commit: ec5d09c0f0f1f61d6d80a35adaba3a8102184740
Parents: 228e429
Author: Yu ISHIKAWA yuu.ishik...@gmail.com
Authored: Mon Aug 24 18:17:51 2015 -0700
Committer: Shivaram Venkataraman shiva...@cs.berkeley.edu
Committed: Mon Aug 24 18:17:58 2015 -0700

--
 R/create-docs.sh|2 +-
 R/pkg/R/column.R|5 +-
 R/pkg/R/functions.R | 1603 ++
 R/pkg/R/generics.R  |  214 +++
 4 files changed, 1596 insertions(+), 228 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/ec5d09c0/R/create-docs.sh
--
diff --git a/R/create-docs.sh b/R/create-docs.sh
index 6a4687b..d2ae160 100755
--- a/R/create-docs.sh
+++ b/R/create-docs.sh
@@ -39,7 +39,7 @@ pushd $FWDIR
 mkdir -p pkg/html
 pushd pkg/html
 
-Rscript -e 'library(SparkR, lib.loc=../../lib); library(knitr); 
knit_rd(SparkR)'
+Rscript -e 'libDir - ../../lib; library(SparkR, lib.loc=libDir); 
library(knitr); knit_rd(SparkR, links = tools::findHTMLlinks(paste(libDir, 
SparkR, sep=/)))'
 
 popd
 

http://git-wip-us.apache.org/repos/asf/spark/blob/ec5d09c0/R/pkg/R/column.R
--
diff --git a/R/pkg/R/column.R b/R/pkg/R/column.R
index 5a07ebd..a1f50c3 100644
--- a/R/pkg/R/column.R
+++ b/R/pkg/R/column.R
@@ -169,8 +169,7 @@ setMethod(between, signature(x = Column),
 #'
 #' @rdname column
 #'
-#' @examples
-#' \dontrun{
+#' @examples \dontrun{
 #'   cast(df$age, string)
 #'   cast(df$name, list(type=array, elementType=byte, containsNull = TRUE))
 #' }
@@ -192,7 +191,7 @@ setMethod(cast,
 #'
 #' @rdname column
 #' @return a matched values as a result of comparing with given values.
-#' \dontrun{
+#' @examples \dontrun{
 #'   filter(df, age in (10, 30))
 #'   where(df, df$age %in% c(10, 30))
 #' }

http://git-wip-us.apache.org/repos/asf/spark/blob/ec5d09c0/R/pkg/R/functions.R
--
diff --git a/R/pkg/R/functions.R b/R/pkg/R/functions.R
index b5879bd..d848730 100644
--- a/R/pkg/R/functions.R
+++ b/R/pkg/R/functions.R
@@ -18,69 +18,1298 @@
 #' @include generics.R column.R
 NULL
 
-#' @title S4 expression functions for DataFrame column(s)
-#' @description These are expression functions on DataFrame columns
-
-functions1 - c(
-  abs, acos, approxCountDistinct, ascii, asin, atan,
-  avg, base64, bin, bitwiseNOT, cbrt, ceil, cos, cosh, count,
-  crc32, dayofmonth, dayofyear, exp, explode, expm1, factorial,
-  first, floor, hex, hour, initcap, isNaN, last, last_day,
-  length, log, log10, log1p, log2, lower, ltrim, max, md5,
-  mean, min, minute, month, negate, quarter, reverse,
-  rint, round, rtrim, second, sha1, signum, sin, sinh, size,
-  soundex, sqrt, sum, sumDistinct, tan, tanh, toDegrees,
-  toRadians, to_date, trim, unbase64, unhex, upper, weekofyear,
-  year)
-functions2 - c(
-  atan2, datediff, hypot, levenshtein, months_between, nanvl, 
pmod)
-
-createFunction1 - function(name) {
-  setMethod(name,
-signature(x = Column),
-function(x) {
-  jc - callJStatic(org.apache.spark.sql.functions, name, x@jc)
-  column(jc)
-})
-}
-
-createFunction2 - function(name) {
-  setMethod(name,
-signature(y = Column),
-function(y, x) {
-  if (class(x) == Column) {
-x - x@jc
-  }
-  jc - callJStatic(org.apache.spark.sql.functions, name, y@jc, 
x)
-  column(jc)
-})
-}
+#' Creates a \code{Column} of literal value.
+#'
+#' The passed in object is returned directly if it is already a 
\linkS4class{Column}.
+#' If