Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19788#discussion_r169229650
  
    --- Diff: core/src/main/scala/org/apache/spark/TaskEndReason.scala ---
    @@ -81,16 +81,17 @@ case object Resubmitted extends TaskFailedReason {
      */
     @DeveloperApi
     case class FetchFailed(
    -    bmAddress: BlockManagerId,  // Note that bmAddress can be null
    +    bmAddress: BlockManagerId, // Note that bmAddress can be null
         shuffleId: Int,
         mapId: Int,
         reduceId: Int,
    -    message: String)
    +    message: String,
    +    numBlocks: Int = 1)
       extends TaskFailedReason {
       override def toErrorString: String = {
         val bmAddressString = if (bmAddress == null) "null" else 
bmAddress.toString
         s"FetchFailed($bmAddressString, shuffleId=$shuffleId, mapId=$mapId, 
reduceId=$reduceId, " +
    -      s"message=\n$message\n)"
    +      s"numBlocks=$numBlocks, message=\n$message\n)"
    --- End diff --
    
    do we really care about the `numBlock` when a shuffle fetch failed?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to