Github user ankuriitg commented on a diff in the pull request: https://github.com/apache/spark/pull/22504#discussion_r224189470 --- Diff: core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala --- @@ -806,6 +806,22 @@ private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock) } // Clean the blacklist from the expired entries. clearBlacklist(CLEAN_INTERVAL_S) + + // Delete driver logs from the configured spark dfs dir that exceed the configured max age + try { + val hdfsDir = conf.get("spark.driver.log.dfsDir") + val appDirs = fs.listLocatedStatus(new Path(hdfsDir)) + while (appDirs.hasNext()) { + val appDirStatus = appDirs.next() + if (appDirStatus.getModificationTime() < maxTime) { + logInfo(s"Deleting expired driver log for: ${appDirStatus.getPath().getName()}") + deleteLog(appDirStatus.getPath()) --- End diff -- Added the new configurations, with fallback option to existing ones.
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org