pan3793 commented on PR #7274:
URL: https://github.com/apache/kyuubi/pull/7274#issuecomment-3654879837

   unfortunately, `FinalStageResourceManagerSuite` breaks for Spark 4.0 due 
serialVersionUID of `scala.collection.immutable.ArraySeq` changed in Scala 
2.13.17
   ```
   25/12/15 18:05:25 WARN TransportChannelHandler: Exception in connection from 
/10.242.159.140:62128
   java.io.InvalidClassException: scala.collection.immutable.ArraySeq; local 
class incompatible: stream classdesc serialVersionUID = -1506415583798956640, 
local class serialVersionUID = -8615987390676041167
        at 
java.base/java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:597)
        at 
java.base/java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:2051)
        at 
java.base/java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1898)
        at 
java.base/java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:2051)
        at 
java.base/java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1898)
        at 
java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2224)
        at 
java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1733)
        at 
java.base/java.io.ObjectInputStream$FieldValues.<init>(ObjectInputStream.java:2606)
        at 
java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2457)
        at 
java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2257)
        at 
java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1733)
        at 
java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:509)
        at 
java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:467)
        at 
org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:88)
        at 
org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:130)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$deserialize$2(NettyRpcEnv.scala:302)
        at scala.util.DynamicVariable.withValue(DynamicVariable.scala:59)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.deserialize(NettyRpcEnv.scala:355)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$deserialize$1(NettyRpcEnv.scala:301)
        at scala.util.DynamicVariable.withValue(DynamicVariable.scala:59)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.deserialize(NettyRpcEnv.scala:301)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$7(NettyRpcEnv.scala:249)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.$anonfun$askAbortable$7$adapted(NettyRpcEnv.scala:249)
        at 
org.apache.spark.rpc.netty.RpcOutboxMessage.onSuccess(Outbox.scala:90)
        at 
org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:203)
        at 
org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:145)
        at 
org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:55)
        at 
io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:99)
        at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)
        at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
        at 
io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
        at 
io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:289)
        at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442)
        at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
        at 
io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
        at 
io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:107)
        at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)
        at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
        at 
io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
        at 
org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:102)
        at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)
        at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
        at 
io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
        at 
io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1357)
        at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440)
        at 
io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
        at 
io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:868)
        at 
io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:166)
        at 
io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:796)
        at 
io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:732)
        at 
io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:658)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:562)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:998)
        at 
io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
        at 
io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
        at java.base/java.lang.Thread.run(Thread.java:840)
   Exception in thread "main" java.lang.reflect.UndeclaredThrowableException
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1969)
        at 
org.apache.spark.deploy.SparkHadoopUtil.runAsSparkUser(SparkHadoopUtil.scala:62)
        at 
org.apache.spark.executor.CoarseGrainedExecutorBackend$.run(CoarseGrainedExecutorBackend.scala:429)
        at 
org.apache.spark.executor.CoarseGrainedExecutorBackend$.main(CoarseGrainedExecutorBackend.scala:417)
        at 
org.apache.spark.executor.CoarseGrainedExecutorBackend.main(CoarseGrainedExecutorBackend.scala)
   Caused by: org.apache.spark.rpc.RpcTimeoutException: Future timed out after 
[120 seconds]. This timeout is controlled by spark.rpc.askTimeout
        at 
org.apache.spark.rpc.RpcTimeout.org$apache$spark$rpc$RpcTimeout$$createRpcTimeoutException(RpcTimeout.scala:47)
        at 
org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:62)
        at 
org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:58)
        at 
scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:35)
        at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:76)
        at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:101)
        at org.apache.spark.rpc.RpcEndpointRef.askSync(RpcEndpointRef.scala:85)
        at 
org.apache.spark.executor.CoarseGrainedExecutorBackend$.$anonfun$run$7(CoarseGrainedExecutorBackend.scala:457)
        at 
org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:63)
        at 
org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:62)
        at 
java.base/java.security.AccessController.doPrivileged(AccessController.java:712)
        at java.base/javax.security.auth.Subject.doAs(Subject.java:439)
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953)
        ... 4 more
   Caused by: java.util.concurrent.TimeoutException: Future timed out after 
[120 seconds]
        at 
scala.concurrent.impl.Promise$DefaultPromise.tryAwait0(Promise.scala:248)
        at 
scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:261)
        at 
org.apache.spark.util.SparkThreadUtils$.awaitResultNoSparkExceptionConversion(SparkThreadUtils.scala:61)
        at 
org.apache.spark.util.SparkThreadUtils$.awaitResult(SparkThreadUtils.scala:45)
        at org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:342)
        at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:75)
        ... 12 more
   ```


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to