This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
     new d7b78a353f3 [SPARK-40648][YARN][TESTS][3.3] Add @ExtendedLevelDBTest 
to LevelDB relevant tests in the yarn module
d7b78a353f3 is described below

commit d7b78a353f39e85f9426218de495b1774ace4e22
Author: yangjie01 <yangji...@baidu.com>
AuthorDate: Tue Oct 4 13:40:37 2022 -0700

    [SPARK-40648][YARN][TESTS][3.3] Add @ExtendedLevelDBTest to LevelDB 
relevant tests in the yarn module
    
    ### What changes were proposed in this pull request?
    SPARK-40490 make  the test case related to `YarnShuffleIntegrationSuite` 
starts to verify the `registeredExecFile` reload test scenario again,so this pr 
add `ExtendedLevelDBTest` to `LevelDB` relevant tests in the `yarn` module so 
that the `MacOs/Apple Silicon` can skip the tests through 
`-Dtest.exclude.tags=org.apache.spark.tags.ExtendedLevelDBTest`.
    
    ### Why are the changes needed?
    According to convention, Add `ExtendedLevelDBTest` to LevelDB relevant 
tests to make `yarn` module can skip these tests through 
`-Dtest.exclude.tags=org.apache.spark.tags.ExtendedLevelDBTest` on `MacOs/Apple 
Silicon`.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    - Pass GitHub Actions
    - Manual test on `MacOs/Apple Silicon`
    
    ```
    build/sbt clean "yarn/testOnly *YarnShuffleIntegrationSuite*" -Pyarn 
-Dtest.exclude.tags=org.apache.spark.tags.ExtendedLevelDBTest
    build/sbt clean "yarn/testOnly *YarnShuffleAuthSuite*" -Pyarn 
-Dtest.exclude.tags=org.apache.spark.tags.ExtendedLevelDBTest
    build/sbt clean "yarn/testOnly *YarnShuffleAlternateNameConfigSuite*" 
-Pyarn -Dtest.exclude.tags=org.apache.spark.tags.ExtendedLevelDBTest
    ```
    
    **Before**
    
    All 3 case aborted as follows
    ```
    [info] YarnShuffleIntegrationSuite:
    [info] org.apache.spark.deploy.yarn.YarnShuffleIntegrationSuite *** ABORTED 
*** (1 second, 144 milliseconds)
    [info]   java.lang.UnsatisfiedLinkError: Could not load library. Reasons: 
[no leveldbjni64-1.8 in java.library.path, no leveldbjni-1.8 in 
java.library.path, no leveldbjni in java.library.path, 
/Users/yangjie01/SourceCode/git/spark-source/target/tmp/libleveldbjni-64-1-7065283280142546801.8:
 
dlopen(/Users/yangjie01/SourceCode/git/spark-source/target/tmp/libleveldbjni-64-1-7065283280142546801.8,
 1): no suitable image found.  Did find:
    [info]  
/Users/yangjie01/SourceCode/git/spark-source/target/tmp/libleveldbjni-64-1-7065283280142546801.8:
 no matching architecture in universal wrapper
    [info]  
/Users/yangjie01/SourceCode/git/spark-source/target/tmp/libleveldbjni-64-1-7065283280142546801.8:
 no matching architecture in universal wrapper]
    [info]   at org.fusesource.hawtjni.runtime.Library.doLoad(Library.java:182)
    [info]   at org.fusesource.hawtjni.runtime.Library.load(Library.java:140)
    [info]   at 
org.fusesource.leveldbjni.JniDBFactory.<clinit>(JniDBFactory.java:48)
    [info]   at 
org.apache.spark.network.util.LevelDBProvider.initLevelDB(LevelDBProvider.java:48)
    [info]   at 
org.apache.spark.network.shuffle.ExternalShuffleBlockResolver.<init>(ExternalShuffleBlockResolver.java:126)
    [info]   at 
org.apache.spark.network.shuffle.ExternalShuffleBlockResolver.<init>(ExternalShuffleBlockResolver.java:99)
    [info]   at 
org.apache.spark.network.shuffle.ExternalBlockHandler.<init>(ExternalBlockHandler.java:90)
    [info]   at 
org.apache.spark.network.yarn.YarnShuffleService.serviceInit(YarnShuffleService.java:247)
    [info]   at 
org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
    [info]   at 
org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices.initAuxService(AuxServices.java:475)
    [info]   at 
org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServices.serviceInit(AuxServices.java:758)
    [info]   at 
org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
    [info]   at 
org.apache.hadoop.service.CompositeService.serviceInit(CompositeService.java:109)
    [info]   at 
org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl.serviceInit(ContainerManagerImpl.java:327)
    [info]   at 
org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
    [info]   at 
org.apache.hadoop.service.CompositeService.serviceInit(CompositeService.java:109)
    [info]   at 
org.apache.hadoop.yarn.server.nodemanager.NodeManager.serviceInit(NodeManager.java:494)
    [info]   at 
org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
    [info]   at 
org.apache.hadoop.yarn.server.MiniYARNCluster$NodeManagerWrapper.serviceInit(MiniYARNCluster.java:597)
    [info]   at 
org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
    [info]   at 
org.apache.hadoop.service.CompositeService.serviceInit(CompositeService.java:109)
    [info]   at 
org.apache.hadoop.yarn.server.MiniYARNCluster.serviceInit(MiniYARNCluster.java:327)
    [info]   at 
org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
    [info]   at 
org.apache.spark.deploy.yarn.BaseYarnClusterSuite.beforeAll(BaseYarnClusterSuite.scala:111)
    [info]   at 
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
    [info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
    [info]   at 
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
    [info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:64)
    [info]   at 
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:318)
    [info]   at 
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:513)
    [info]   at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413)
    [info]   at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    [info]   at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
    [info]   at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
    [info]   at java.lang.Thread.run(Thread.java:750)
    ```
    
    **After**
    
    All 3 case as follows:
    
    ```
    [info] YarnShuffleAlternateNameConfigSuite:
    [info] Run completed in 1 second, 288 milliseconds.
    [info] Total number of tests run: 0
    [info] Suites: completed 1, aborted 0
    [info] Tests: succeeded 0, failed 0,
    ```
    
    Closes #38096 from LuciferYang/SPARK-40648-33.
    
    Authored-by: yangjie01 <yangji...@baidu.com>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .../spark/deploy/yarn/YarnShuffleAlternateNameConfigSuite.scala       | 3 ++-
 .../org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala    | 4 +++-
 2 files changed, 5 insertions(+), 2 deletions(-)

diff --git 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleAlternateNameConfigSuite.scala
 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleAlternateNameConfigSuite.scala
index db001a946fd..55ae7a4769b 100644
--- 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleAlternateNameConfigSuite.scala
+++ 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleAlternateNameConfigSuite.scala
@@ -24,12 +24,13 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration
 import org.apache.spark._
 import org.apache.spark.internal.config._
 import org.apache.spark.network.yarn.{YarnShuffleService, YarnTestAccessor}
-import org.apache.spark.tags.ExtendedYarnTest
+import org.apache.spark.tags.{ExtendedLevelDBTest, ExtendedYarnTest}
 
 /**
  * SPARK-34828: Integration test for the external shuffle service with an 
alternate name and
  * configs (by using a configuration overlay)
  */
+@ExtendedLevelDBTest
 @ExtendedYarnTest
 class YarnShuffleAlternateNameConfigSuite extends YarnShuffleIntegrationSuite {
 
diff --git 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala
 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala
index a3447d352bb..c559388de1d 100644
--- 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala
+++ 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnShuffleIntegrationSuite.scala
@@ -33,11 +33,12 @@ import org.apache.spark.internal.config._
 import org.apache.spark.internal.config.Network._
 import org.apache.spark.network.shuffle.ShuffleTestAccessor
 import org.apache.spark.network.yarn.{YarnShuffleService, YarnTestAccessor}
-import org.apache.spark.tags.ExtendedYarnTest
+import org.apache.spark.tags.{ExtendedLevelDBTest, ExtendedYarnTest}
 
 /**
  * Integration test for the external shuffle service with a yarn mini-cluster
  */
+@ExtendedLevelDBTest
 @ExtendedYarnTest
 class YarnShuffleIntegrationSuite extends BaseYarnClusterSuite {
 
@@ -86,6 +87,7 @@ class YarnShuffleIntegrationSuite extends 
BaseYarnClusterSuite {
 /**
  * Integration test for the external shuffle service with auth on.
  */
+@ExtendedLevelDBTest
 @ExtendedYarnTest
 class YarnShuffleAuthSuite extends YarnShuffleIntegrationSuite {
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to