Repository: spark
Updated Branches:
  refs/heads/master f6ff6329e -> 243ce319a


[SPARKR] found some extra whitespace in the R tests

## What changes were proposed in this pull request?

during my ubuntu-port testing, i found some extra whitespace that for some 
reason wasn't getting caught on the centos lint-r build step.

## How was this patch tested?

the build system will test this!  i used one of my ubuntu testing builds and 
scped over the modified file.

before my fix:
https://amplab.cs.berkeley.edu/jenkins/job/spark-master-test-sbt-hadoop-2.7-ubuntu-testing/22/console

after my fix:
https://amplab.cs.berkeley.edu/jenkins/job/spark-master-test-sbt-hadoop-2.7-ubuntu-testing/23/console

Closes #22896 from shaneknapp/remove-extra-whitespace.

Authored-by: shane knapp <incompl...@gmail.com>
Signed-off-by: hyukjinkwon <gurwls...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/243ce319
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/243ce319
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/243ce319

Branch: refs/heads/master
Commit: 243ce319a06f20365d5b08d479642d75748645d9
Parents: f6ff632
Author: shane knapp <incompl...@gmail.com>
Authored: Wed Oct 31 10:32:26 2018 +0800
Committer: hyukjinkwon <gurwls...@apache.org>
Committed: Wed Oct 31 10:32:26 2018 +0800

----------------------------------------------------------------------
 R/pkg/tests/fulltests/test_sparkSQL_eager.R | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/243ce319/R/pkg/tests/fulltests/test_sparkSQL_eager.R
----------------------------------------------------------------------
diff --git a/R/pkg/tests/fulltests/test_sparkSQL_eager.R 
b/R/pkg/tests/fulltests/test_sparkSQL_eager.R
index df7354f..9b4489a 100644
--- a/R/pkg/tests/fulltests/test_sparkSQL_eager.R
+++ b/R/pkg/tests/fulltests/test_sparkSQL_eager.R
@@ -22,12 +22,12 @@ context("test show SparkDataFrame when eager execution is 
enabled.")
 test_that("eager execution is not enabled", {
   # Start Spark session without eager execution enabled
   sparkR.session(master = sparkRTestMaster, enableHiveSupport = FALSE)
-  
+
   df <- createDataFrame(faithful)
   expect_is(df, "SparkDataFrame")
   expected <- "eruptions:double, waiting:double"
   expect_output(show(df), expected)
-  
+
   # Stop Spark session
   sparkR.session.stop()
 })
@@ -35,9 +35,9 @@ test_that("eager execution is not enabled", {
 test_that("eager execution is enabled", {
   # Start Spark session with eager execution enabled
   sparkConfig <- list(spark.sql.repl.eagerEval.enabled = "true")
-  
+
   sparkR.session(master = sparkRTestMaster, enableHiveSupport = FALSE, 
sparkConfig = sparkConfig)
-  
+
   df <- createDataFrame(faithful)
   expect_is(df, "SparkDataFrame")
   expected <- paste0("(+---------+-------+\n",
@@ -45,7 +45,7 @@ test_that("eager execution is enabled", {
                      "+---------+-------+\n)*",
                      "(only showing top 20 rows)")
   expect_output(show(df), expected)
-  
+
   # Stop Spark session
   sparkR.session.stop()
 })
@@ -55,9 +55,9 @@ test_that("eager execution is enabled with maxNumRows and 
truncate set", {
   sparkConfig <- list(spark.sql.repl.eagerEval.enabled = "true",
                       spark.sql.repl.eagerEval.maxNumRows = as.integer(5),
                       spark.sql.repl.eagerEval.truncate = as.integer(2))
-  
+
   sparkR.session(master = sparkRTestMaster, enableHiveSupport = FALSE, 
sparkConfig = sparkConfig)
-  
+
   df <- arrange(createDataFrame(faithful), "waiting")
   expect_is(df, "SparkDataFrame")
   expected <- paste0("(+---------+-------+\n",
@@ -66,7 +66,7 @@ test_that("eager execution is enabled with maxNumRows and 
truncate set", {
                      "|       1.|     43|\n)*",
                      "(only showing top 5 rows)")
   expect_output(show(df), expected)
-  
+
   # Stop Spark session
   sparkR.session.stop()
 })


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to