Repository: spark
Updated Branches:
  refs/heads/master ab9872db1 -> a3626ca33


[SPARK-19387][SPARKR] Tests do not run with SparkR source package in CRAN check

## What changes were proposed in this pull request?

- this is cause by changes in SPARK-18444, SPARK-18643 that we no longer 
install Spark when `master = ""` (default), but also related to SPARK-18449 
since the real `master` value is not known at the time the R code in 
`sparkR.session` is run. (`master` cannot default to "local" since it could be 
overridden by spark-submit commandline or spark config)
- as a result, while running SparkR as a package in IDE is working fine, CRAN 
check is not as it is launching it via non-interactive script
- fix is to add check to the beginning of each test and vignettes; the same 
would also work by changing `sparkR.session()` to `sparkR.session(master = 
"local")` in tests, but I think being more explicit is better.

## How was this patch tested?

Tested this by reverting version to 2.1, since it needs to download the release 
jar with matching version. But since there are changes in 2.2 (specifically 
around SparkR ML) that are incompatible with 2.1, some tests are failing in 
this config. Will need to port this to branch-2.1 and retest with 2.1 release 
jar.

manually as:
```
# modify DESCRIPTION to revert version to 2.1.0
SPARK_HOME=/usr/spark R CMD build pkg
# run cran check without SPARK_HOME
R CMD check --as-cran SparkR_2.1.0.tar.gz
```

Author: Felix Cheung <felixcheun...@hotmail.com>

Closes #16720 from felixcheung/rcranchecktest.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/a3626ca3
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/a3626ca3
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/a3626ca3

Branch: refs/heads/master
Commit: a3626ca333e6e1881e2f09ccae0fa8fa7243223e
Parents: ab9872d
Author: Felix Cheung <felixcheun...@hotmail.com>
Authored: Tue Feb 14 13:51:27 2017 -0800
Committer: Shivaram Venkataraman <shiva...@cs.berkeley.edu>
Committed: Tue Feb 14 13:51:27 2017 -0800

----------------------------------------------------------------------
 R/pkg/R/install.R                    | 16 +++++++++++++---
 R/pkg/R/sparkR.R                     |  6 ++----
 R/pkg/tests/run-all.R                |  3 +++
 R/pkg/vignettes/sparkr-vignettes.Rmd |  3 +++
 4 files changed, 21 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/a3626ca3/R/pkg/R/install.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/install.R b/R/pkg/R/install.R
index 72386e6..4ca7aa6 100644
--- a/R/pkg/R/install.R
+++ b/R/pkg/R/install.R
@@ -21,9 +21,9 @@
 #' Download and Install Apache Spark to a Local Directory
 #'
 #' \code{install.spark} downloads and installs Spark to a local directory if
-#' it is not found. The Spark version we use is the same as the SparkR version.
-#' Users can specify a desired Hadoop version, the remote mirror site, and
-#' the directory where the package is installed locally.
+#' it is not found. If SPARK_HOME is set in the environment, and that 
directory is found, that is
+#' returned. The Spark version we use is the same as the SparkR version. Users 
can specify a desired
+#' Hadoop version, the remote mirror site, and the directory where the package 
is installed locally.
 #'
 #' The full url of remote file is inferred from \code{mirrorUrl} and 
\code{hadoopVersion}.
 #' \code{mirrorUrl} specifies the remote path to a Spark folder. It is 
followed by a subfolder
@@ -68,6 +68,16 @@
 #'          \href{http://spark.apache.org/downloads.html}{Apache Spark}
 install.spark <- function(hadoopVersion = "2.7", mirrorUrl = NULL,
                           localDir = NULL, overwrite = FALSE) {
+  sparkHome <- Sys.getenv("SPARK_HOME")
+  if (isSparkRShell()) {
+    stopifnot(nchar(sparkHome) > 0)
+    message("Spark is already running in sparkR shell.")
+    return(invisible(sparkHome))
+  } else if (!is.na(file.info(sparkHome)$isdir)) {
+    message("Spark package found in SPARK_HOME: ", sparkHome)
+    return(invisible(sparkHome))
+  }
+
   version <- paste0("spark-", packageVersion("SparkR"))
   hadoopVersion <- tolower(hadoopVersion)
   hadoopVersionName <- hadoopVersionName(hadoopVersion)

http://git-wip-us.apache.org/repos/asf/spark/blob/a3626ca3/R/pkg/R/sparkR.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/sparkR.R b/R/pkg/R/sparkR.R
index 870e76b..61773ed 100644
--- a/R/pkg/R/sparkR.R
+++ b/R/pkg/R/sparkR.R
@@ -588,13 +588,11 @@ processSparkPackages <- function(packages) {
 sparkCheckInstall <- function(sparkHome, master, deployMode) {
   if (!isSparkRShell()) {
     if (!is.na(file.info(sparkHome)$isdir)) {
-      msg <- paste0("Spark package found in SPARK_HOME: ", sparkHome)
-      message(msg)
+      message("Spark package found in SPARK_HOME: ", sparkHome)
       NULL
     } else {
       if (interactive() || isMasterLocal(master)) {
-        msg <- paste0("Spark not found in SPARK_HOME: ", sparkHome)
-        message(msg)
+        message("Spark not found in SPARK_HOME: ", sparkHome)
         packageLocalDir <- install.spark()
         packageLocalDir
       } else if (isClientMode(master) || deployMode == "client") {

http://git-wip-us.apache.org/repos/asf/spark/blob/a3626ca3/R/pkg/tests/run-all.R
----------------------------------------------------------------------
diff --git a/R/pkg/tests/run-all.R b/R/pkg/tests/run-all.R
index 1d04656..ab8d1ca 100644
--- a/R/pkg/tests/run-all.R
+++ b/R/pkg/tests/run-all.R
@@ -21,4 +21,7 @@ library(SparkR)
 # Turn all warnings into errors
 options("warn" = 2)
 
+# Setup global test environment
+install.spark()
+
 test_package("SparkR")

http://git-wip-us.apache.org/repos/asf/spark/blob/a3626ca3/R/pkg/vignettes/sparkr-vignettes.Rmd
----------------------------------------------------------------------
diff --git a/R/pkg/vignettes/sparkr-vignettes.Rmd 
b/R/pkg/vignettes/sparkr-vignettes.Rmd
index f13e0b3..a742484 100644
--- a/R/pkg/vignettes/sparkr-vignettes.Rmd
+++ b/R/pkg/vignettes/sparkr-vignettes.Rmd
@@ -44,6 +44,9 @@ library(SparkR)
 
 We use default settings in which it runs in local mode. It auto downloads 
Spark package in the background if no previous installation is found. For more 
details about setup, see [Spark Session](#SetupSparkSession).
 
+```{r, include=FALSE}
+install.spark()
+```
 ```{r, message=FALSE, results="hide"}
 sparkR.session()
 ```


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to