spark git commit: [SPARK-15159][SPARKR] SparkSession roxygen2 doc, programming guide, example updates

2016-06-20 Thread shivaram
Repository: spark
Updated Branches:
  refs/heads/branch-2.0 45c41aa33 -> f90b2ea1d


[SPARK-15159][SPARKR] SparkSession roxygen2 doc, programming guide, example 
updates

## What changes were proposed in this pull request?

roxygen2 doc, programming guide, example updates

## How was this patch tested?

manual checks
shivaram

Author: Felix Cheung 

Closes #13751 from felixcheung/rsparksessiondoc.

(cherry picked from commit 359c2e827d5682249c009e83379a5ee8e5aa4e89)
Signed-off-by: Shivaram Venkataraman 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f90b2ea1
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f90b2ea1
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f90b2ea1

Branch: refs/heads/branch-2.0
Commit: f90b2ea1d96bba4650b8d1ce37a60c81c89bca96
Parents: 45c41aa
Author: Felix Cheung 
Authored: Mon Jun 20 13:46:24 2016 -0700
Committer: Shivaram Venkataraman 
Committed: Mon Jun 20 13:46:32 2016 -0700

--
 R/pkg/R/DataFrame.R | 169 +--
 R/pkg/R/SQLContext.R|  47 +++-
 R/pkg/R/mllib.R |   6 +-
 R/pkg/R/schema.R|  24 ++--
 R/pkg/R/sparkR.R|   7 +-
 docs/sparkr.md  |  99 
 examples/src/main/r/data-manipulation.R |  15 +--
 examples/src/main/r/dataframe.R |  13 +--
 examples/src/main/r/ml.R|  21 ++--
 9 files changed, 162 insertions(+), 239 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/f90b2ea1/R/pkg/R/DataFrame.R
--
diff --git a/R/pkg/R/DataFrame.R b/R/pkg/R/DataFrame.R
index f3a3eff..583d3ae 100644
--- a/R/pkg/R/DataFrame.R
+++ b/R/pkg/R/DataFrame.R
@@ -35,12 +35,11 @@ setOldClass("structType")
 #' @slot env An R environment that stores bookkeeping states of the 
SparkDataFrame
 #' @slot sdf A Java object reference to the backing Scala DataFrame
 #' @seealso \link{createDataFrame}, \link{read.json}, \link{table}
-#' @seealso 
\url{https://spark.apache.org/docs/latest/sparkr.html#sparkr-dataframes}
+#' @seealso 
\url{https://spark.apache.org/docs/latest/sparkr.html#sparkdataframe}
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' df <- createDataFrame(faithful)
 #'}
 setClass("SparkDataFrame",
@@ -77,8 +76,7 @@ dataFrame <- function(sdf, isCached = FALSE) {
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' printSchema(df)
@@ -102,8 +100,7 @@ setMethod("printSchema",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' dfSchema <- schema(df)
@@ -126,8 +123,7 @@ setMethod("schema",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' explain(df, TRUE)
@@ -157,8 +153,7 @@ setMethod("explain",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' isLocal(df)
@@ -182,8 +177,7 @@ setMethod("isLocal",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' showDF(df)
@@ -207,8 +201,7 @@ setMethod("showDF",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' df
@@ -234,8 +227,7 @@ setMethod("show", "SparkDataFrame",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' dtypes(df)
@@ -261,8 +253,7 @@ setMethod("dtypes",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' columns(df)
@@ -396,8 +387,7 @@ setMethod("coltypes",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' coltypes(df) <- c("character", "integer")
@@ -432,7 +422,7 @@ setMethod("coltypes<-",
 
 #' Creates a 

spark git commit: [SPARK-15159][SPARKR] SparkSession roxygen2 doc, programming guide, example updates

2016-06-20 Thread shivaram
Repository: spark
Updated Branches:
  refs/heads/master b0f2fb5b9 -> 359c2e827


[SPARK-15159][SPARKR] SparkSession roxygen2 doc, programming guide, example 
updates

## What changes were proposed in this pull request?

roxygen2 doc, programming guide, example updates

## How was this patch tested?

manual checks
shivaram

Author: Felix Cheung 

Closes #13751 from felixcheung/rsparksessiondoc.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/359c2e82
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/359c2e82
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/359c2e82

Branch: refs/heads/master
Commit: 359c2e827d5682249c009e83379a5ee8e5aa4e89
Parents: b0f2fb5
Author: Felix Cheung 
Authored: Mon Jun 20 13:46:24 2016 -0700
Committer: Shivaram Venkataraman 
Committed: Mon Jun 20 13:46:24 2016 -0700

--
 R/pkg/R/DataFrame.R | 169 +--
 R/pkg/R/SQLContext.R|  47 +++-
 R/pkg/R/mllib.R |   6 +-
 R/pkg/R/schema.R|  24 ++--
 R/pkg/R/sparkR.R|   7 +-
 docs/sparkr.md  |  99 
 examples/src/main/r/data-manipulation.R |  15 +--
 examples/src/main/r/dataframe.R |  13 +--
 examples/src/main/r/ml.R|  21 ++--
 9 files changed, 162 insertions(+), 239 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/359c2e82/R/pkg/R/DataFrame.R
--
diff --git a/R/pkg/R/DataFrame.R b/R/pkg/R/DataFrame.R
index f3a3eff..583d3ae 100644
--- a/R/pkg/R/DataFrame.R
+++ b/R/pkg/R/DataFrame.R
@@ -35,12 +35,11 @@ setOldClass("structType")
 #' @slot env An R environment that stores bookkeeping states of the 
SparkDataFrame
 #' @slot sdf A Java object reference to the backing Scala DataFrame
 #' @seealso \link{createDataFrame}, \link{read.json}, \link{table}
-#' @seealso 
\url{https://spark.apache.org/docs/latest/sparkr.html#sparkr-dataframes}
+#' @seealso 
\url{https://spark.apache.org/docs/latest/sparkr.html#sparkdataframe}
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' df <- createDataFrame(faithful)
 #'}
 setClass("SparkDataFrame",
@@ -77,8 +76,7 @@ dataFrame <- function(sdf, isCached = FALSE) {
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' printSchema(df)
@@ -102,8 +100,7 @@ setMethod("printSchema",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' dfSchema <- schema(df)
@@ -126,8 +123,7 @@ setMethod("schema",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' explain(df, TRUE)
@@ -157,8 +153,7 @@ setMethod("explain",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' isLocal(df)
@@ -182,8 +177,7 @@ setMethod("isLocal",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' showDF(df)
@@ -207,8 +201,7 @@ setMethod("showDF",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' df
@@ -234,8 +227,7 @@ setMethod("show", "SparkDataFrame",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' dtypes(df)
@@ -261,8 +253,7 @@ setMethod("dtypes",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' columns(df)
@@ -396,8 +387,7 @@ setMethod("coltypes",
 #' @export
 #' @examples
 #'\dontrun{
-#' sc <- sparkR.init()
-#' sqlContext <- sparkRSQL.init(sc)
+#' sparkR.session()
 #' path <- "path/to/file.json"
 #' df <- read.json(path)
 #' coltypes(df) <- c("character", "integer")
@@ -432,7 +422,7 @@ setMethod("coltypes<-",
 
 #' Creates a temporary view using the given name.
 #'
-#' Creates a new temporary view using a SparkDataFrame in the SQLContext. If a
+#' Creates a new temporary