Github user felixcheung commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22589#discussion_r221440760
  
    --- Diff: R/pkg/tests/run-all.R ---
    @@ -18,50 +18,55 @@
     library(testthat)
     library(SparkR)
     
    -# Turn all warnings into errors
    -options("warn" = 2)
    +# SPARK-25572
    +if (identical(Sys.getenv("NOT_CRAN"), "true")) {
     
    -if (.Platform$OS.type == "windows") {
    -  Sys.setenv(TZ = "GMT")
    -}
    +  # Turn all warnings into errors
    +  options("warn" = 2)
     
    -# Setup global test environment
    -# Install Spark first to set SPARK_HOME
    +  if (.Platform$OS.type == "windows") {
    +    Sys.setenv(TZ = "GMT")
    +  }
     
    -# NOTE(shivaram): We set overwrite to handle any old tar.gz files or 
directories left behind on
    -# CRAN machines. For Jenkins we should already have SPARK_HOME set.
    -install.spark(overwrite = TRUE)
    +  # Setup global test environment
    +  # Install Spark first to set SPARK_HOME
     
    -sparkRDir <- file.path(Sys.getenv("SPARK_HOME"), "R")
    -sparkRWhitelistSQLDirs <- c("spark-warehouse", "metastore_db")
    -invisible(lapply(sparkRWhitelistSQLDirs,
    -                 function(x) { unlink(file.path(sparkRDir, x), recursive = 
TRUE, force = TRUE)}))
    -sparkRFilesBefore <- list.files(path = sparkRDir, all.files = TRUE)
    +  # NOTE(shivaram): We set overwrite to handle any old tar.gz files or 
directories left behind on
    +  # CRAN machines. For Jenkins we should already have SPARK_HOME set.
    +  install.spark(overwrite = TRUE)
     
    -sparkRTestMaster <- "local[1]"
    -sparkRTestConfig <- list()
    -if (identical(Sys.getenv("NOT_CRAN"), "true")) {
    -  sparkRTestMaster <- ""
    -} else {
    -  # Disable hsperfdata on CRAN
    -  old_java_opt <- Sys.getenv("_JAVA_OPTIONS")
    -  Sys.setenv("_JAVA_OPTIONS" = paste("-XX:-UsePerfData", old_java_opt))
    -  tmpDir <- tempdir()
    -  tmpArg <- paste0("-Djava.io.tmpdir=", tmpDir)
    -  sparkRTestConfig <- list(spark.driver.extraJavaOptions = tmpArg,
    -                           spark.executor.extraJavaOptions = tmpArg)
    -}
    +  sparkRDir <- file.path(Sys.getenv("SPARK_HOME"), "R")
    +  sparkRWhitelistSQLDirs <- c("spark-warehouse", "metastore_db")
    +  invisible(lapply(sparkRWhitelistSQLDirs,
    +                   function(x) { unlink(file.path(sparkRDir, x), recursive 
= TRUE, force = TRUE)}))
    +  sparkRFilesBefore <- list.files(path = sparkRDir, all.files = TRUE)
     
    -test_package("SparkR")
    +  sparkRTestMaster <- "local[1]"
    +  sparkRTestConfig <- list()
    +  if (identical(Sys.getenv("NOT_CRAN"), "true")) {
    +    sparkRTestMaster <- ""
    +  } else {
    +    # Disable hsperfdata on CRAN
    +    old_java_opt <- Sys.getenv("_JAVA_OPTIONS")
    +    Sys.setenv("_JAVA_OPTIONS" = paste("-XX:-UsePerfData", old_java_opt))
    +    tmpDir <- tempdir()
    +    tmpArg <- paste0("-Djava.io.tmpdir=", tmpDir)
    +    sparkRTestConfig <- list(spark.driver.extraJavaOptions = tmpArg,
    +                             spark.executor.extraJavaOptions = tmpArg)
    +  }
     
    -if (identical(Sys.getenv("NOT_CRAN"), "true")) {
    -  # set random seed for predictable results. mostly for base's sample() in 
tree and classification
    -  set.seed(42)
    -  # for testthat 1.0.2 later, change reporter from "summary" to 
default_reporter()
    -  testthat:::run_tests("SparkR",
    -                       file.path(sparkRDir, "pkg", "tests", "fulltests"),
    -                       NULL,
    -                       "summary")
    -}
    +  test_package("SparkR")
    +
    +  if (identical(Sys.getenv("NOT_CRAN"), "true")) {
    --- End diff --
    
    We are trying this now - we can clean it up if this works



---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to