ivoson commented on code in PR #39459:
URL: https://github.com/apache/spark/pull/39459#discussion_r1096527017


##########
core/src/main/scala/org/apache/spark/storage/BlockManager.scala:
##########
@@ -1325,14 +1325,47 @@ private[spark] class BlockManager(
     blockInfoManager.releaseAllLocksForTask(taskAttemptId)
   }
 
+  /**
+   * Retrieve the given rdd block if it exists and is visible, otherwise call 
the provided
+   * `makeIterator` method to compute the block, persist it, and return its 
values.
+   *
+   * @return either a BlockResult if the block was successfully cached, or an 
iterator if the block
+   *         could not be cached.
+   */
+  def getOrElseUpdateRDDBlock[T](
+      taskId: Long,
+      blockId: RDDBlockId,
+      level: StorageLevel,
+      classTag: ClassTag[T],
+      makeIterator: () => Iterator[T]): Either[BlockResult, Iterator[T]] = {
+    val isCacheVisible = isRDDBlockVisible(blockId)
+    var computed: Boolean = false
+    val getIterator = () => {
+      computed = true
+      makeIterator()
+    }
+
+    val res = getOrElseUpdate(blockId, level, classTag, getIterator)
+    if (res.isLeft && !isCacheVisible) {

Review Comment:
   Updated.



##########
core/src/test/scala/org/apache/spark/AccumulatorSuite.scala:
##########
@@ -89,6 +89,38 @@ class AccumulatorSuite extends SparkFunSuite with Matchers 
with LocalSparkContex
     assert(AccumulatorContext.get(100000).isEmpty)
   }
 
+
+  test("SPARK-41497: accumulators should be reported in the case of task retry 
with rdd cache") {
+    // Set up a cluster with 2 executors
+    val conf = new SparkConf()
+      .setMaster("local-cluster[2, 1, 
1024]").setAppName("TaskSchedulerImplSuite")

Review Comment:
   Done.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to