[spark] branch branch-3.0 updated: [SPARK-32020][SQL] Better error message when SPARK_HOME or spark.test.home is not set

2020-06-18 Thread gurwls223
This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
 new 70f3ce6  [SPARK-32020][SQL] Better error message when SPARK_HOME or 
spark.test.home is not set
70f3ce6 is described below

commit 70f3ce69ee6ae01393e853ccea16a259804b5d85
Author: Dilip Biswal 
AuthorDate: Thu Jun 18 22:45:55 2020 +0900

[SPARK-32020][SQL] Better error message when SPARK_HOME or spark.test.home 
is not set

### What changes were proposed in this pull request?
Better error message when SPARK_HOME or spark,test.home is not set.

### Why are the changes needed?
Currently the error message is not easily consumable as it prints  (see 
below) the real error after printing the current environment which is rather 
long.

**Old output**
`
 time.name" -> "Java(TM) SE Runtime Environment", "sun.boot.library.path" 
-> "/Library/Java/JavaVirtualMachines/jdk1.8.0_221.jdk/Contents/Home/jre/lib",
 "java.vm.version" -> "25.221-b11",
 . . .
 . . .
 . . .
) did not contain key "SPARK_HOME" spark.test.home or SPARK_HOME is not set.
at org.scalatest.Assertions.newAssertionFailedExceptio
`

**New output**
An exception or error caused a run to abort: spark.test.home or SPARK_HOME 
is not set.
org.scalatest.exceptions.TestFailedException: spark.test.home or SPARK_HOME 
is not set
### Does this PR introduce any user-facing change?
`
No.

### How was this patch tested?
Ran the tests in intellej  manually to see the new error.

Closes #28825 from dilipbiswal/minor-spark-31950-followup.

Authored-by: Dilip Biswal 
Signed-off-by: HyukjinKwon 
(cherry picked from commit e4f503614625f81aa86455ad5f6bec3b61d525f1)
Signed-off-by: HyukjinKwon 
---
 .../sql/catalyst/parser/TableIdentifierParserSuite.scala |  6 --
 .../org/apache/spark/sql/catalyst/plans/SQLHelper.scala  |  9 +
 .../scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala  |  6 --
 .../scala/org/apache/spark/sql/IntegratedUDFTestUtils.scala  |  9 +
 .../test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala  | 12 +++-
 5 files changed, 13 insertions(+), 29 deletions(-)

diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
index 04c427d..a721e17 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
@@ -18,7 +18,6 @@ package org.apache.spark.sql.catalyst.parser
 
 import java.io.File
 import java.nio.file.Files
-import java.util.Locale
 
 import scala.collection.JavaConverters._
 import scala.collection.mutable
@@ -295,11 +294,6 @@ class TableIdentifierParserSuite extends SparkFunSuite 
with SQLHelper {
 
   private val sqlSyntaxDefs = {
 val sqlBasePath = {
-  val sparkHome = {
-assert(sys.props.contains("spark.test.home") ||
-  sys.env.contains("SPARK_HOME"), "spark.test.home or SPARK_HOME is 
not set.")
-sys.props.getOrElse("spark.test.home", sys.env("SPARK_HOME"))
-  }
   java.nio.file.Paths.get(sparkHome, "sql", "catalyst", "src", "main", 
"antlr4", "org",
 "apache", "spark", "sql", "catalyst", "parser", "SqlBase.g4").toFile
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
index d213743..5deab79 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
@@ -21,6 +21,8 @@ import java.time.ZoneId
 
 import scala.util.control.NonFatal
 
+import org.scalatest.Assertions.fail
+
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.util.DateTimeTestUtils
 import org.apache.spark.sql.catalyst.util.DateTimeUtils.getZoneId
@@ -83,4 +85,11 @@ trait SQLHelper {
   }
 }
   }
+
+  protected lazy val sparkHome: String = {
+if (!(sys.props.contains("spark.test.home") || 
sys.env.contains("SPARK_HOME"))) {
+  fail("spark.test.home or SPARK_HOME is not set.")
+}
+sys.props.getOrElse("spark.test.home", sys.env("SPARK_HOME"))
+  }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala
index d69ecd7..81c09d1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala
+++ 

[spark] branch branch-3.0 updated: [SPARK-32020][SQL] Better error message when SPARK_HOME or spark.test.home is not set

2020-06-18 Thread gurwls223
This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
 new 70f3ce6  [SPARK-32020][SQL] Better error message when SPARK_HOME or 
spark.test.home is not set
70f3ce6 is described below

commit 70f3ce69ee6ae01393e853ccea16a259804b5d85
Author: Dilip Biswal 
AuthorDate: Thu Jun 18 22:45:55 2020 +0900

[SPARK-32020][SQL] Better error message when SPARK_HOME or spark.test.home 
is not set

### What changes were proposed in this pull request?
Better error message when SPARK_HOME or spark,test.home is not set.

### Why are the changes needed?
Currently the error message is not easily consumable as it prints  (see 
below) the real error after printing the current environment which is rather 
long.

**Old output**
`
 time.name" -> "Java(TM) SE Runtime Environment", "sun.boot.library.path" 
-> "/Library/Java/JavaVirtualMachines/jdk1.8.0_221.jdk/Contents/Home/jre/lib",
 "java.vm.version" -> "25.221-b11",
 . . .
 . . .
 . . .
) did not contain key "SPARK_HOME" spark.test.home or SPARK_HOME is not set.
at org.scalatest.Assertions.newAssertionFailedExceptio
`

**New output**
An exception or error caused a run to abort: spark.test.home or SPARK_HOME 
is not set.
org.scalatest.exceptions.TestFailedException: spark.test.home or SPARK_HOME 
is not set
### Does this PR introduce any user-facing change?
`
No.

### How was this patch tested?
Ran the tests in intellej  manually to see the new error.

Closes #28825 from dilipbiswal/minor-spark-31950-followup.

Authored-by: Dilip Biswal 
Signed-off-by: HyukjinKwon 
(cherry picked from commit e4f503614625f81aa86455ad5f6bec3b61d525f1)
Signed-off-by: HyukjinKwon 
---
 .../sql/catalyst/parser/TableIdentifierParserSuite.scala |  6 --
 .../org/apache/spark/sql/catalyst/plans/SQLHelper.scala  |  9 +
 .../scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala  |  6 --
 .../scala/org/apache/spark/sql/IntegratedUDFTestUtils.scala  |  9 +
 .../test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala  | 12 +++-
 5 files changed, 13 insertions(+), 29 deletions(-)

diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
index 04c427d..a721e17 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
@@ -18,7 +18,6 @@ package org.apache.spark.sql.catalyst.parser
 
 import java.io.File
 import java.nio.file.Files
-import java.util.Locale
 
 import scala.collection.JavaConverters._
 import scala.collection.mutable
@@ -295,11 +294,6 @@ class TableIdentifierParserSuite extends SparkFunSuite 
with SQLHelper {
 
   private val sqlSyntaxDefs = {
 val sqlBasePath = {
-  val sparkHome = {
-assert(sys.props.contains("spark.test.home") ||
-  sys.env.contains("SPARK_HOME"), "spark.test.home or SPARK_HOME is 
not set.")
-sys.props.getOrElse("spark.test.home", sys.env("SPARK_HOME"))
-  }
   java.nio.file.Paths.get(sparkHome, "sql", "catalyst", "src", "main", 
"antlr4", "org",
 "apache", "spark", "sql", "catalyst", "parser", "SqlBase.g4").toFile
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
index d213743..5deab79 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/plans/SQLHelper.scala
@@ -21,6 +21,8 @@ import java.time.ZoneId
 
 import scala.util.control.NonFatal
 
+import org.scalatest.Assertions.fail
+
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.util.DateTimeTestUtils
 import org.apache.spark.sql.catalyst.util.DateTimeUtils.getZoneId
@@ -83,4 +85,11 @@ trait SQLHelper {
   }
 }
   }
+
+  protected lazy val sparkHome: String = {
+if (!(sys.props.contains("spark.test.home") || 
sys.env.contains("SPARK_HOME"))) {
+  fail("spark.test.home or SPARK_HOME is not set.")
+}
+sys.props.getOrElse("spark.test.home", sys.env("SPARK_HOME"))
+  }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala
index d69ecd7..81c09d1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala
+++