This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new 2a66771b45a [SPARK-44968][BUILD] Downgrade ivy from 2.5.2 to 2.5.1
2a66771b45a is described below

commit 2a66771b45a7729143ddc45da5dcf095820fd80d
Author: yangjie01 <yangji...@baidu.com>
AuthorDate: Sat Aug 26 17:31:09 2023 +0800

    [SPARK-44968][BUILD] Downgrade ivy from 2.5.2 to 2.5.1
    
    ### What changes were proposed in this pull request?
    After upgrading Ivy from 2.5.1 to 2.5.2 in SPARK-44914, daily tests for 
Java 11 and Java 17 began to experience ABORTED in the 
`HiveExternalCatalogVersionsSuite` test.
    
    Java 11
    
    - https://github.com/apache/spark/actions/runs/5953716283/job/16148657660
    - https://github.com/apache/spark/actions/runs/5966131923/job/16185159550
    
    Java 17
    
    - https://github.com/apache/spark/actions/runs/5956925790/job/16158714165
    - https://github.com/apache/spark/actions/runs/5969348559/job/16195073478
    
    ```
    2023-08-23T23:00:49.6547573Z [info]   2023-08-23 16:00:48.209 - stdout> : 
java.lang.RuntimeException: problem during retrieve of 
org.apache.spark#spark-submit-parent-4c061f04-b951-4d06-8909-cde5452988d9: 
java.lang.RuntimeException: Multiple artifacts of the module log4j#log4j;1.2.17 
are retrieved to the same file! Update the retrieve pattern to fix this error.
    2023-08-23T23:00:49.6548745Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.ivy.core.retrieve.RetrieveEngine.retrieve(RetrieveEngine.java:238)
    2023-08-23T23:00:49.6549572Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.ivy.core.retrieve.RetrieveEngine.retrieve(RetrieveEngine.java:89)
    2023-08-23T23:00:49.6550334Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at org.apache.ivy.Ivy.retrieve(Ivy.java:551)
    2023-08-23T23:00:49.6551079Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.deploy.SparkSubmitUtils$.resolveMavenCoordinates(SparkSubmit.scala:1464)
    2023-08-23T23:00:49.6552024Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.hive.client.IsolatedClientLoader$.$anonfun$downloadVersion$2(IsolatedClientLoader.scala:138)
    2023-08-23T23:00:49.6552884Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at org.apache.spark.sql.catalyst.util.package$.quietly(package.scala:42)
    2023-08-23T23:00:49.6553755Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.hive.client.IsolatedClientLoader$.downloadVersion(IsolatedClientLoader.scala:138)
    2023-08-23T23:00:49.6554705Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.hive.client.IsolatedClientLoader$.liftedTree1$1(IsolatedClientLoader.scala:65)
    2023-08-23T23:00:49.6555637Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.hive.client.IsolatedClientLoader$.forVersion(IsolatedClientLoader.scala:64)
    2023-08-23T23:00:49.6556554Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:443)
    2023-08-23T23:00:49.6557340Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:356)
    2023-08-23T23:00:49.6558187Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:71)
    2023-08-23T23:00:49.6559061Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:70)
    2023-08-23T23:00:49.6559962Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.hive.HiveExternalCatalog.$anonfun$databaseExists$1(HiveExternalCatalog.scala:224)
    2023-08-23T23:00:49.6560766Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23)
    2023-08-23T23:00:49.6561584Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:102)
    2023-08-23T23:00:49.6562510Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:224)
    2023-08-23T23:00:49.6563435Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:150)
    2023-08-23T23:00:49.6564323Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:140)
    2023-08-23T23:00:49.6565340Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:45)
    2023-08-23T23:00:49.6566321Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$1(HiveSessionStateBuilder.scala:60)
    2023-08-23T23:00:49.6567363Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog$lzycompute(SessionCatalog.scala:118)
    2023-08-23T23:00:49.6568372Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog(SessionCatalog.scala:118)
    2023-08-23T23:00:49.6569393Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.catalyst.catalog.SessionCatalog.tableExists(SessionCatalog.scala:490)
    2023-08-23T23:00:49.6570685Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.command.CreateDataSourceTableAsSelectCommand.run(createDataSourceTables.scala:155)
    2023-08-23T23:00:49.6571842Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:113)
    2023-08-23T23:00:49.6572932Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:111)
    2023-08-23T23:00:49.6573996Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.executeCollect(commands.scala:125)
    2023-08-23T23:00:49.6575045Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:97)
    2023-08-23T23:00:49.6576066Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
    2023-08-23T23:00:49.6576937Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
    2023-08-23T23:00:49.6577807Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
    2023-08-23T23:00:49.6578620Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
    2023-08-23T23:00:49.6579432Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
    2023-08-23T23:00:49.6580357Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:97)
    2023-08-23T23:00:49.6581331Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:93)
    2023-08-23T23:00:49.6582239Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:481)
    2023-08-23T23:00:49.6583101Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:82)
    2023-08-23T23:00:49.6584088Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:481)
    2023-08-23T23:00:49.6585236Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
    2023-08-23T23:00:49.6586519Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
    2023-08-23T23:00:49.6587686Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
    2023-08-23T23:00:49.6588898Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
    2023-08-23T23:00:49.6590014Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
    2023-08-23T23:00:49.6590993Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:457)
    2023-08-23T23:00:49.6591930Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:93)
    2023-08-23T23:00:49.6592914Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:80)
    2023-08-23T23:00:49.6593856Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:78)
    2023-08-23T23:00:49.6594687Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at org.apache.spark.sql.Dataset.<init>(Dataset.scala:219)
    2023-08-23T23:00:49.6595379Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99)
    2023-08-23T23:00:49.6596103Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
    2023-08-23T23:00:49.6596807Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
    2023-08-23T23:00:49.6597520Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:618)
    2023-08-23T23:00:49.6598276Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775)
    2023-08-23T23:00:49.6599022Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:613)
    2023-08-23T23:00:49.6599819Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native 
Method)
    2023-08-23T23:00:49.6600723Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
    2023-08-23T23:00:49.6601707Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    2023-08-23T23:00:49.6602513Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at java.base/java.lang.reflect.Method.invoke(Method.java:568)
    2023-08-23T23:00:49.6603272Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
    2023-08-23T23:00:49.6604007Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
    2023-08-23T23:00:49.6604724Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at py4j.Gateway.invoke(Gateway.java:282)
    2023-08-23T23:00:49.6605416Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
    2023-08-23T23:00:49.6606209Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at py4j.commands.CallCommand.execute(CallCommand.java:79)
    2023-08-23T23:00:49.6606969Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:182)
    2023-08-23T23:00:49.6607743Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at py4j.ClientServerConnection.run(ClientServerConnection.java:106)
    2023-08-23T23:00:49.6608415Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at java.base/java.lang.Thread.run(Thread.java:833)
    2023-08-23T23:00:49.6609288Z [info]   2023-08-23 16:00:48.209 - stdout> 
Caused by: java.lang.RuntimeException: Multiple artifacts of the module 
log4j#log4j;1.2.17 are retrieved to the same file! Update the retrieve pattern 
to fix this error.
    2023-08-23T23:00:49.6610288Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.ivy.core.retrieve.RetrieveEngine.determineArtifactsToCopy(RetrieveEngine.java:426)
    2023-08-23T23:00:49.6611332Z [info]   2023-08-23 16:00:48.209 - stdout>     
    at 
org.apache.ivy.core.retrieve.RetrieveEngine.retrieve(RetrieveEngine.java:122)
    2023-08-23T23:00:49.6612046Z [info]   2023-08-23 16:00:48.209 - stdout>     
    ... 66 more
    2023-08-23T23:00:49.6612498Z [info]   2023-08-23 16:00:48.209 - stdout>
    ```
    
    So this pr downgrade ivy from 2.5.2 to 2.5.1 to restore Java 11/17 daily 
tests.
    
    ### Why are the changes needed?
    To restore Java 11/17 daily tests.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    By changing the default Java version in `build_and_test.yml` to 17 for 
verification, the tests succeed after downgrading the Ivy to 2.5.1.
    
    - 
https://github.com/LuciferYang/spark/actions/runs/5972232677/job/16209970934
    
    <img width="1116" alt="image" 
src="https://github.com/apache/spark/assets/1475305/cd4002d8-893d-4845-8b2e-c01ff3106f7f";>
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #42668 from LuciferYang/test-java17.
    
    Authored-by: yangjie01 <yangji...@baidu.com>
    Signed-off-by: yangjie01 <yangji...@baidu.com>
    (cherry picked from commit 4f8a1991e793bba2a6620760b6ee2cdc8f3ff21d)
    Signed-off-by: yangjie01 <yangji...@baidu.com>
---
 dev/deps/spark-deps-hadoop-3-hive-2.3 | 2 +-
 pom.xml                               | 6 +++++-
 2 files changed, 6 insertions(+), 2 deletions(-)

diff --git a/dev/deps/spark-deps-hadoop-3-hive-2.3 
b/dev/deps/spark-deps-hadoop-3-hive-2.3
index 8f898fc1ef5..b6aba589d5f 100644
--- a/dev/deps/spark-deps-hadoop-3-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-3-hive-2.3
@@ -98,7 +98,7 @@ httpclient/4.5.14//httpclient-4.5.14.jar
 httpcore/4.4.16//httpcore-4.4.16.jar
 ini4j/0.5.4//ini4j-0.5.4.jar
 istack-commons-runtime/3.0.8//istack-commons-runtime-3.0.8.jar
-ivy/2.5.2//ivy-2.5.2.jar
+ivy/2.5.1//ivy-2.5.1.jar
 jackson-annotations/2.15.2//jackson-annotations-2.15.2.jar
 jackson-core-asl/1.9.13//jackson-core-asl-1.9.13.jar
 jackson-core/2.15.2//jackson-core-2.15.2.jar
diff --git a/pom.xml b/pom.xml
index f94b3b729c2..154ca4005f6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -146,7 +146,11 @@
     <jetty.version>9.4.51.v20230217</jetty.version>
     <jakartaservlet.version>4.0.3</jakartaservlet.version>
     <chill.version>0.10.0</chill.version>
-    <ivy.version>2.5.2</ivy.version>
+    <!--
+      SPARK-44968: don't upgrade Ivy to version 2.5.2 until the test aborted of
+      `HiveExternalCatalogVersionsSuite` in Java 11/17 daily tests is resolved.
+    -->
+    <ivy.version>2.5.1</ivy.version>
     <oro.version>2.0.8</oro.version>
     <!--
     If you changes codahale.metrics.version, you also need to change


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to