This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
     new 4a43b4d7dde [SPARK-36681][CORE][TESTS][FOLLOW-UP] Handle LinkageError 
when Snappy native library is not available in low Hadoop versions
4a43b4d7dde is described below

commit 4a43b4d7ddea96873095ddedae517268cbbe1663
Author: Peter Toth <pt...@cloudera.com>
AuthorDate: Thu May 26 10:35:05 2022 -0700

    [SPARK-36681][CORE][TESTS][FOLLOW-UP] Handle LinkageError when Snappy 
native library is not available in low Hadoop versions
    
    ### What changes were proposed in this pull request?
    
    This is a follow-up to https://github.com/apache/spark/pull/36136 to fix 
`LinkageError` handling in `FileSuite` to avoid test suite abort when Snappy 
native library is not available in low Hadoop versions:
    ```
    23:16:22 FileSuite:
    23:16:22 org.apache.spark.FileSuite *** ABORTED ***
    23:16:22   java.lang.RuntimeException: Unable to load a Suite class that 
was discovered in the runpath: org.apache.spark.FileSuite
    23:16:22   at 
org.scalatest.tools.DiscoverySuite$.getSuiteInstance(DiscoverySuite.scala:81)
    23:16:22   at 
org.scalatest.tools.DiscoverySuite.$anonfun$nestedSuites$1(DiscoverySuite.scala:38)
    23:16:22   at 
scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
    23:16:22   at scala.collection.Iterator.foreach(Iterator.scala:941)
    23:16:22   at scala.collection.Iterator.foreach$(Iterator.scala:941)
    23:16:22   at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
    23:16:22   at scala.collection.IterableLike.foreach(IterableLike.scala:74)
    23:16:22   at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
    23:16:22   at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
    23:16:22   at 
scala.collection.TraversableLike.map(TraversableLike.scala:238)
    23:16:22   ...
    23:16:22   Cause: java.lang.UnsatisfiedLinkError: 
org.apache.hadoop.util.NativeCodeLoader.buildSupportsSnappy()Z
    23:16:22   at 
org.apache.hadoop.util.NativeCodeLoader.buildSupportsSnappy(Native Method)
    23:16:22   at 
org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:63)
    23:16:22   at 
org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:136)
    23:16:22   at 
org.apache.spark.FileSuite.$anonfun$new$12(FileSuite.scala:145)
    23:16:22   at scala.util.Try$.apply(Try.scala:213)
    23:16:22   at org.apache.spark.FileSuite.<init>(FileSuite.scala:141)
    23:16:22   at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native 
Method)
    23:16:22   at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    23:16:22   at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    23:16:22   at 
java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    ```
    Scala's `Try` can handle only `NonFatal` throwables.
    
    ### Why are the changes needed?
    To make the tests robust.
    
    ### Does this PR introduce _any_ user-facing change?
    Nope, this is test-only.
    
    ### How was this patch tested?
    Manual test.
    
    Closes #36687 from peter-toth/SPARK-36681-handle-linkageerror.
    
    Authored-by: Peter Toth <pt...@cloudera.com>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
    (cherry picked from commit dbde77856d2e51ff502a7fc1dba7f10316c2211b)
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 core/src/test/scala/org/apache/spark/FileSuite.scala | 10 +++++++---
 1 file changed, 7 insertions(+), 3 deletions(-)

diff --git a/core/src/test/scala/org/apache/spark/FileSuite.scala 
b/core/src/test/scala/org/apache/spark/FileSuite.scala
index 97795c55c82..9c22ee09d0b 100644
--- a/core/src/test/scala/org/apache/spark/FileSuite.scala
+++ b/core/src/test/scala/org/apache/spark/FileSuite.scala
@@ -23,6 +23,7 @@ import java.nio.charset.StandardCharsets
 import java.util.zip.GZIPOutputStream
 
 import scala.io.Source
+import scala.util.control.NonFatal
 
 import com.google.common.io.Files
 import org.apache.hadoop.conf.Configuration
@@ -137,13 +138,16 @@ class FileSuite extends SparkFunSuite with 
LocalSparkContext {
 
   // Hadoop "gzip" and "zstd" codecs require native library installed for 
sequence files
   private val codecs = Seq((new DefaultCodec(), "default"), (new BZip2Codec(), 
"bzip2")) ++ {
-    scala.util.Try {
+    try {
       // See HADOOP-17125. Hadoop lower than 3.3.1 can throw an exception when 
its native
       // library for Snappy is unavailable. Here it calls 
`SnappyCodec.getCompressorType`
       // to indirectly test if the Snappy native library is available in lower 
Hadoop versions.
       new SnappyCodec().getCompressorType
-      (new SnappyCodec(), "snappy")
-    }.toOption
+      Some(new SnappyCodec(), "snappy")
+    } catch {
+      case _: LinkageError => None
+      case NonFatal(_) => None
+    }
   } ++ {
     if (VersionUtils.isHadoop3) Seq((new Lz4Codec(), "lz4")) else Seq.empty
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to