[ 
https://issues.apache.org/jira/browse/SPARK-32024?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17154051#comment-17154051
 ] 

Apache Spark commented on SPARK-32024:
--------------------------------------

User 'HeartSaVioR' has created a pull request for this issue:
https://github.com/apache/spark/pull/29046

> Disk usage tracker went negative in HistoryServerDiskManager
> ------------------------------------------------------------
>
>                 Key: SPARK-32024
>                 URL: https://issues.apache.org/jira/browse/SPARK-32024
>             Project: Spark
>          Issue Type: Bug
>          Components: Web UI
>    Affects Versions: 2.4.4, 3.0.0, 3.1.0
>         Environment: System: Windows, Linux.
> Config:
> spark.history.retainedApplications 200
> spark.history.store.maxDiskUsage 10g
> spark.history.store.path /cache_hs
>            Reporter: Zhen Li
>            Assignee: Zhen Li
>            Priority: Minor
>             Fix For: 2.4.7, 3.0.1, 3.1.0
>
>
> After restart history server, we would see below error randomly.
> h2. HTTP ERROR 500 java.lang.IllegalStateException: Disk usage tracker went 
> negative (now = -****, delta = -****)
> ||URI:|/history/********/*/stages/|
> ||STATUS:|500|
> ||MESSAGE:|java.lang.IllegalStateException: Disk usage tracker went negative 
> (now = -****, delta = -****)|
> ||SERVLET:|org.apache.spark.deploy.history.HistoryServer$$anon$1-6ce1f601|
> ||CAUSED BY:|java.lang.IllegalStateException: Disk usage tracker went 
> negative (now = -****, delta = -****)|
> h3. Caused by:
> java.lang.IllegalStateException: Disk usage tracker went negative (now = 
> -633925, delta = -38947) at 
> org.apache.spark.deploy.history.HistoryServerDiskManager.org$apache$spark$deploy$history$HistoryServerDiskManager$$updateUsage(HistoryServerDiskManager.scala:258)
>  at 
> org.apache.spark.deploy.history.HistoryServerDiskManager$Lease.rollback(HistoryServerDiskManager.scala:316)
>  at 
> org.apache.spark.deploy.history.FsHistoryProvider.loadDiskStore(FsHistoryProvider.scala:1192)
>  at 
> org.apache.spark.deploy.history.FsHistoryProvider.getAppUI(FsHistoryProvider.scala:363)
>  at 
> org.apache.spark.deploy.history.HistoryServer.getAppUI(HistoryServer.scala:191)
>  at 
> org.apache.spark.deploy.history.ApplicationCache.$anonfun$loadApplicationEntry$2(ApplicationCache.scala:163)
>  at 
> org.apache.spark.deploy.history.ApplicationCache.time(ApplicationCache.scala:135)
>  at 
> org.apache.spark.deploy.history.ApplicationCache.org$apache$spark$deploy$history$ApplicationCache$$loadApplicationEntry(ApplicationCache.scala:161)
>  at 
> org.apache.spark.deploy.history.ApplicationCache$$anon$1.load(ApplicationCache.scala:56)
>  at 
> org.apache.spark.deploy.history.ApplicationCache$$anon$1.load(ApplicationCache.scala:52)
>  at 
> org.sparkproject.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
>  at 
> org.sparkproject.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
>  at 
> org.sparkproject.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
>  at org.sparkproject.guava.cache.LocalCache$Segment.get(LocalCache.java:2257) 
> at org.sparkproject.guava.cache.LocalCache.get(LocalCache.java:4000) at 
> org.sparkproject.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004) at 
> org.sparkproject.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
>  at 
> org.apache.spark.deploy.history.ApplicationCache.get(ApplicationCache.scala:89)
>  at 
> org.apache.spark.deploy.history.ApplicationCache.withSparkUI(ApplicationCache.scala:101)
>  at 
> org.apache.spark.deploy.history.HistoryServer.org$apache$spark$deploy$history$HistoryServer$$loadAppUi(HistoryServer.scala:248)
>  at 
> org.apache.spark.deploy.history.HistoryServer$$anon$1.doGet(HistoryServer.scala:101)
>  at javax.servlet.http.HttpServlet.service(HttpServlet.java:687) at 
> javax.servlet.http.HttpServlet.service(HttpServlet.java:790) at 
> org.sparkproject.jetty.servlet.ServletHolder.handle(ServletHolder.java:763) 
> at 
> org.sparkproject.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1631)
>  at 
> org.apache.spark.ui.HttpSecurityFilter.doFilter(HttpSecurityFilter.scala:95) 
> at 
> org.sparkproject.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1618)
>  at 
> org.sparkproject.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:549)
>  at 
> org.sparkproject.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:233)
>  at 
> org.sparkproject.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1363)
>  at 
> org.sparkproject.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:188)
>  at 
> org.sparkproject.jetty.servlet.ServletHandler.doScope(ServletHandler.java:489)
>  at 
> org.sparkproject.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:186)
>  at 
> org.sparkproject.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1278)
>  at 
> org.sparkproject.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:141)
>  at 
> org.sparkproject.jetty.server.handler.gzip.GzipHandler.handle(GzipHandler.java:767)
>  at 
> org.sparkproject.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:221)
>  at 
> org.sparkproject.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:127)
>  at org.sparkproject.jetty.server.Server.handle(Server.java:500) at 
> org.sparkproject.jetty.server.HttpChannel.lambda$handle$1(HttpChannel.java:383)
>  at org.sparkproject.jetty.server.HttpChannel.dispatch(HttpChannel.java:547) 
> at org.sparkproject.jetty.server.HttpChannel.handle(HttpChannel.java:375) at 
> org.sparkproject.jetty.server.HttpConnection.onFillable(HttpConnection.java:273)
>  at 
> org.sparkproject.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:311)
>  at org.sparkproject.jetty.io.FillInterest.fillable(FillInterest.java:103) at 
> org.sparkproject.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:117) at 
> org.sparkproject.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:336)
>  at 
> org.sparkproject.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:313)
>  at 
> org.sparkproject.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:171)
>  at 
> org.sparkproject.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:129)
>  at 
> org.sparkproject.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:375)
>  at 
> org.sparkproject.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:806)
>  at 
> org.sparkproject.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:938)
>  at java.lang.Thread.run(Thread.java:748)



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to