This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 76b5ed4  [SPARK-31935][SQL][TESTS][FOLLOWUP] Fix the test case for 
Hadoop2/3
76b5ed4 is described below

commit 76b5ed4ffaa82241944aeae0a0238cf8ee86e44a
Author: Gengliang Wang <gengliang.w...@databricks.com>
AuthorDate: Wed Jun 10 20:59:48 2020 -0700

    [SPARK-31935][SQL][TESTS][FOLLOWUP] Fix the test case for Hadoop2/3
    
    ### What changes were proposed in this pull request?
    
    This PR updates the test case to accept Hadoop 2/3 error message correctly.
    
    ### Why are the changes needed?
    
    SPARK-31935(#28760) breaks Hadoop 3.2 UT because Hadoop 2 and Hadoop 3 have 
different exception messages.
    In https://github.com/apache/spark/pull/28791, there are two test suites 
missed the fix
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    ### How was this patch tested?
    
    Unit test
    
    Closes #28796 from gengliangwang/SPARK-31926-followup.
    
    Authored-by: Gengliang Wang <gengliang.w...@databricks.com>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .../org/apache/spark/sql/execution/datasources/DataSourceSuite.scala  | 3 ++-
 .../scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala  | 4 ++--
 2 files changed, 4 insertions(+), 3 deletions(-)

diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/DataSourceSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/DataSourceSuite.scala
index 9345158..aa91791 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/DataSourceSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/DataSourceSuite.scala
@@ -142,7 +142,8 @@ class DataSourceSuite extends SharedSparkSession with 
PrivateMethodTester {
     val message = intercept[java.io.IOException] {
       dataSource invokePrivate checkAndGlobPathIfNecessary(false, false)
     }.getMessage
-    assert(message.equals("No FileSystem for scheme: nonexistsFs"))
+    val expectMessage = "No FileSystem for scheme nonexistsFs"
+    assert(message.filterNot(Set(':', '"').contains) == expectMessage)
   }
 }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
index 32dceaa..7b16aeb 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala
@@ -536,11 +536,11 @@ class FileStreamSourceSuite extends FileStreamSourceTest {
     withTempDir { dir =>
       val path = dir.getCanonicalPath
       val defaultFs = "nonexistFS://nonexistFS"
-      val expectMessage = "No FileSystem for scheme: nonexistFS"
+      val expectMessage = "No FileSystem for scheme nonexistFS"
       val message = intercept[java.io.IOException] {
         spark.readStream.option("fs.defaultFS", defaultFs).text(path)
       }.getMessage
-      assert(message == expectMessage)
+      assert(message.filterNot(Set(':', '"').contains) == expectMessage)
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to