Yuming Wang created SPARK-47441:
-----------------------------------

             Summary: Do not add log link for unmanaged AM in Spark UI
                 Key: SPARK-47441
                 URL: https://issues.apache.org/jira/browse/SPARK-47441
             Project: Spark
          Issue Type: Bug
          Components: YARN
    Affects Versions: 3.5.1, 3.5.0
            Reporter: Yuming Wang


{noformat}
24/03/18 04:58:25,022 ERROR [spark-listener-group-appStatus] 
scheduler.AsyncEventQueue:97 : Listener AppStatusListener threw an exception
java.lang.NumberFormatException: For input string: "null"
        at 
java.lang.NumberFormatException.forInputString(NumberFormatException.java:67) 
~[?:?]
        at java.lang.Integer.parseInt(Integer.java:668) ~[?:?]
        at java.lang.Integer.parseInt(Integer.java:786) ~[?:?]
        at scala.collection.immutable.StringLike.toInt(StringLike.scala:310) 
~[scala-library-2.12.18.jar:?]
        at scala.collection.immutable.StringLike.toInt$(StringLike.scala:310) 
~[scala-library-2.12.18.jar:?]
        at scala.collection.immutable.StringOps.toInt(StringOps.scala:33) 
~[scala-library-2.12.18.jar:?]
        at org.apache.spark.util.Utils$.parseHostPort(Utils.scala:1105) 
~[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
org.apache.spark.status.ProcessSummaryWrapper.<init>(storeTypes.scala:609) 
~[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
org.apache.spark.status.LiveMiscellaneousProcess.doUpdate(LiveEntity.scala:1045)
 ~[spark-core_2.12-3.5.1.jar:3.5.1]
        at org.apache.spark.status.LiveEntity.write(LiveEntity.scala:50) 
~[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
org.apache.spark.status.AppStatusListener.update(AppStatusListener.scala:1233) 
~[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
org.apache.spark.status.AppStatusListener.onMiscellaneousProcessAdded(AppStatusListener.scala:1445)
 ~[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
org.apache.spark.status.AppStatusListener.onOtherEvent(AppStatusListener.scala:113)
 ~[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
org.apache.spark.scheduler.SparkListenerBus.doPostEvent(SparkListenerBus.scala:100)
 ~[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
org.apache.spark.scheduler.SparkListenerBus.doPostEvent$(SparkListenerBus.scala:28)
 ~[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
org.apache.spark.scheduler.AsyncEventQueue.doPostEvent(AsyncEventQueue.scala:37)
 ~[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
org.apache.spark.scheduler.AsyncEventQueue.doPostEvent(AsyncEventQueue.scala:37)
 ~[spark-core_2.12-3.5.1.jar:3.5.1]
        at org.apache.spark.util.ListenerBus.postToAll(ListenerBus.scala:117) 
~[spark-core_2.12-3.5.1.jar:3.5.1]
        at org.apache.spark.util.ListenerBus.postToAll$(ListenerBus.scala:101) 
~[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
org.apache.spark.scheduler.AsyncEventQueue.super$postToAll(AsyncEventQueue.scala:105)
 ~[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
org.apache.spark.scheduler.AsyncEventQueue.$anonfun$dispatch$1(AsyncEventQueue.scala:105)
 ~[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
scala.runtime.java8.JFunction0$mcJ$sp.apply(JFunction0$mcJ$sp.java:23) 
~[scala-library-2.12.18.jar:?]
        at scala.util.DynamicVariable.withValue(DynamicVariable.scala:62) 
~[scala-library-2.12.18.jar:?]
        at 
org.apache.spark.scheduler.AsyncEventQueue.org$apache$spark$scheduler$AsyncEventQueue$$dispatch(AsyncEventQueue.scala:100)
 ~[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
org.apache.spark.scheduler.AsyncEventQueue$$anon$2.$anonfun$run$1(AsyncEventQueue.scala:96)
 ~[spark-core_2.12-3.5.1.jar:3.5.1]
        at org.apache.spark.util.Utils$.tryOrStopSparkContext(Utils.scala:1356) 
[spark-core_2.12-3.5.1.jar:3.5.1]
        at 
org.apache.spark.scheduler.AsyncEventQueue$$anon$2.run(AsyncEventQueue.scala:96)
 [spark-core_2.12-3.5.1.jar:3.5.1]
{noformat}




--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to