ron8hu commented on a change in pull request #31204: URL: https://github.com/apache/spark/pull/31204#discussion_r571753258
########## File path: core/src/main/scala/org/apache/spark/status/AppStatusStore.scala ########## @@ -104,19 +104,92 @@ private[spark] class AppStatusStore( listener.map(_.activeStages()).getOrElse(Nil) } - def stageList(statuses: JList[v1.StageStatus]): Seq[v1.StageData] = { + def stageList( + statuses: JList[v1.StageStatus], + details: Boolean = false, + withSummary: Boolean = false, + unsortedQuantiles: Option[Array[Double]], + taskStatus: JList[v1.TaskStatus]): Seq[v1.StageData] = { + val quantiles = unsortedQuantiles.getOrElse(Array(0, 0.25, 0.5, 0.75, 1.0)).sorted val it = store.view(classOf[StageDataWrapper]).reverse().asScala.map(_.info) - if (statuses != null && !statuses.isEmpty()) { + val withStatus = if (statuses != null && !statuses.isEmpty()) { it.filter { s => statuses.contains(s.status) }.toSeq } else { it.toSeq } + val withDetails = if (details) { + withStatus.map(stage => stageWithDetails(stage, taskStatus)) + } else { + withStatus + } + if (withSummary) { + withDetails.map { stage => + new v1.StageData( Review comment: @xkrogen has a suggestion: Here and in stageWithDetails we have a massive block that is copying one StageData to another with minor changes. It would be better if we implemented a copy method on StageData itself which allowed for overriding certain properties, like how case classes work in Scala. This will reduce duplicate code and make it more clear which fields are being changed. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org