Github user andrewor14 commented on a diff in the pull request: https://github.com/apache/spark/pull/4525#discussion_r25490585 --- Diff: core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala --- @@ -187,47 +200,74 @@ private[history] class FsHistoryProvider(conf: SparkConf) extends ApplicationHis } .flatMap { entry => try { - Some(replay(entry, new ReplayListenerBus())) + Some((getModificationTime(entry).get, entry)) } catch { case e: Exception => logError(s"Failed to load application log data from $entry.", e) None } } - .sortWith(compareAppInfo) + .sortWith(_._1 >= _._1) + .map { case (_, file) => file } + + logInfos.sliding(20, 20).foreach { batch => + replayExecutor.submit(new Runnable { + override def run(): Unit = mergeApplicationListing(batch) + }) + } lastModifiedTime = newLastModifiedTime + } catch { + case e: Exception => logError("Exception in checking for event log updates", e) + } + } - // When there are new logs, merge the new list with the existing one, maintaining - // the expected ordering (descending end time). Maintaining the order is important - // to avoid having to sort the list every time there is a request for the log list. - if (!logInfos.isEmpty) { - val newApps = new mutable.LinkedHashMap[String, FsApplicationHistoryInfo]() - def addIfAbsent(info: FsApplicationHistoryInfo) = { - if (!newApps.contains(info.id) || - newApps(info.id).logPath.endsWith(EventLoggingListener.IN_PROGRESS) && - !info.logPath.endsWith(EventLoggingListener.IN_PROGRESS)) { - newApps += (info.id -> info) - } - } + /** + * Replay the log files in the list and merge the list of old applications with new ones + */ + private def mergeApplicationListing(logs: Seq[FileStatus]): Unit = { + def addIfAbsent( + newApps: mutable.LinkedHashMap[String, FsApplicationHistoryInfo], + info: FsApplicationHistoryInfo): Unit = { + if (!newApps.contains(info.id) || + newApps(info.id).logPath.endsWith(EventLoggingListener.IN_PROGRESS) && + !info.logPath.endsWith(EventLoggingListener.IN_PROGRESS)) { --- End diff -- I personally find this `a || b && c` a little hard to read. I believe it always evaluates to `a || (b && c)`, so maybe it makes sense to rewrite this in a way that makes this more obvious: ``` val appJustFinished = newApps(info.id).logPath.endsWith(...) && !info.logPath.endsWith(...) if (!newApps.contains(info.id) || appJustFinished) { ... } ```
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org