Github user JoshRosen commented on a diff in the pull request:

    https://github.com/apache/spark/pull/10835#discussion_r50802530
  
    --- Diff: core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala 
---
    @@ -230,86 +297,119 @@ class TaskMetrics extends Serializable {
        */
       def shuffleWriteMetrics: Option[ShuffleWriteMetrics] = 
_shuffleWriteMetrics
     
    -  @deprecated("setting ShuffleWriteMetrics is for internal use only", 
"2.0.0")
    -  def shuffleWriteMetrics_=(swm: Option[ShuffleWriteMetrics]): Unit = {
    -    _shuffleWriteMetrics = swm
    -  }
    -
       /**
        * Get or create a new [[ShuffleWriteMetrics]] associated with this task.
        */
       private[spark] def registerShuffleWriteMetrics(): ShuffleWriteMetrics = 
synchronized {
         _shuffleWriteMetrics.getOrElse {
    -      val metrics = new ShuffleWriteMetrics
    +      val metrics = new ShuffleWriteMetrics(initialAccumsMap)
           _shuffleWriteMetrics = Some(metrics)
           metrics
         }
       }
     
    -  private var _updatedBlockStatuses: Seq[(BlockId, BlockStatus)] =
    -    Seq.empty[(BlockId, BlockStatus)]
    -
    -  /**
    -   * Storage statuses of any blocks that have been updated as a result of 
this task.
    -   */
    -  def updatedBlockStatuses: Seq[(BlockId, BlockStatus)] = 
_updatedBlockStatuses
     
    -  @deprecated("setting updated blocks is for internal use only", "2.0.0")
    -  def updatedBlocks_=(ub: Option[Seq[(BlockId, BlockStatus)]]): Unit = {
    -    _updatedBlockStatuses = ub.getOrElse(Seq.empty[(BlockId, BlockStatus)])
    -  }
    +  /* ========================== *
    +   |        OTHER THINGS        |
    +   * ========================== */
     
    -  private[spark] def incUpdatedBlockStatuses(v: Seq[(BlockId, 
BlockStatus)]): Unit = {
    -    _updatedBlockStatuses ++= v
    +  private[spark] def registerAccumulator(a: Accumulable[_, _]): Unit = {
    +    accums += a
       }
     
    -  private[spark] def setUpdatedBlockStatuses(v: Seq[(BlockId, 
BlockStatus)]): Unit = {
    -    _updatedBlockStatuses = v
    +  /**
    +   * Return the latest updates of accumulators in this task.
    +   */
    +  def accumulatorUpdates(): Seq[AccumulableInfo] = accums.map { a =>
    +    new AccumulableInfo(
    +      a.id, a.name.orNull, Some(a.localValue), None, a.isInternal, 
a.countFailedValues)
    --- End diff --
    
    This `.orNull` call here looks a little suspect to me; would you mind 
marking this field of `AccumulableInfo` with the `@Nullable` annotation if it's 
going to be legal to do this or convert it to an option? Either one of these 
options is fine by me, but just wanted to do this to make it clear to readers 
of AccumulableInfo that `name` might not be defined.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to