Github user jodersky commented on a diff in the pull request: https://github.com/apache/spark/pull/11544#discussion_r55438396 --- Diff: core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala --- @@ -374,28 +374,29 @@ class JobProgressListener(conf: SparkConf) extends SparkListener with Logging { execSummary.taskTime += info.duration stageData.numActiveTasks -= 1 - val (errorMessage, metrics): (Option[String], Option[TaskMetrics]) = + val (errorMessage, metrics): (Option[String], Seq[AccumulableInfo]) = taskEnd.reason match { case org.apache.spark.Success => stageData.completedIndices.add(info.index) stageData.numCompleteTasks += 1 - (None, Option(taskEnd.taskMetrics)) - case e: ExceptionFailure => // Handle ExceptionFailure because we might have metrics + (None, taskEnd.taskMetrics.accumulatorUpdates()) + case e: ExceptionFailure => // Handle ExceptionFailure because we might have metrics stageData.numFailedTasks += 1 - (Some(e.toErrorString), e.metrics) - case e: TaskFailedReason => // All other failure cases + (Some(e.toErrorString), e.accumUpdates) + case e: TaskFailedReason => // All other failure cases stageData.numFailedTasks += 1 - (Some(e.toErrorString), None) + (Some(e.toErrorString), Seq.empty[AccumulableInfo]) } - metrics.foreach { m => + if (metrics.nonEmpty) { val oldMetrics = stageData.taskData.get(info.taskId).flatMap(_.taskMetrics) - updateAggregateMetrics(stageData, info.executorId, m, oldMetrics) + updateAggregateMetrics(stageData, info.executorId, + TaskMetrics.fromAccumulatorUpdates(metrics), oldMetrics) } - val taskData = stageData.taskData.getOrElseUpdate(info.taskId, new TaskUIData(info)) taskData.taskInfo = info - taskData.taskMetrics = metrics + taskData.taskMetrics = if (metrics.nonEmpty) Option(TaskMetrics. + fromAccumulatorUpdates(metrics)) else None --- End diff -- I was thinking of something like this: ```scala val (errorMessage, metrics): (Option[String], Seq[AccumulableInfo]) = //as before val taskMetrics: Option[TaskMetrics] = if (metrics.nonEmpty) TaskMetrics.fromAccumulatorUpdates(metrics) else None taskMetrics.foreach{m => val oldMetrics = //as before updateAggregateMetrics(stageData, info.executorId, m, oldMetrics) } taskData.taskMetrics = taskMetrics ``` That way you avoid calling TaskMetrics.fromAccumulatorUpdates twice. Maybe its a nitpick, depending on the complexity of creating task metrics from accumulators.
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org