ivoson commented on code in PR #39459: URL: https://github.com/apache/spark/pull/39459#discussion_r1118028223
########## core/src/main/scala/org/apache/spark/storage/BlockManagerStorageEndpoint.scala: ########## @@ -81,6 +81,8 @@ class BlockManagerStorageEndpoint( case ReplicateBlock(blockId, replicas, maxReplicas) => context.reply(blockManager.replicateBlock(blockId, replicas.toSet, maxReplicas)) + case MarkRDDBlockAsVisible(blockId) => Review Comment: Of course. Updated. ########## core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala: ########## @@ -2266,6 +2270,160 @@ class BlockManagerSuite extends SparkFunSuite with Matchers with PrivateMethodTe } } + test("SPARK-41497: getOrElseUpdateRDDBlock do compute based on cache visibility statue") { + val store = makeBlockManager(8000, "executor1") + val blockId = RDDBlockId(rddId = 1, splitIndex = 1) + var computed: Boolean = false + val data = Seq(1, 2, 3) + val makeIterator = () => { + computed = true + data.iterator + } + + // Cache doesn't exist and is not visible. + assert(store.getStatus(blockId).isEmpty && !store.isRDDBlockVisible(blockId)) + val res1 = store.getOrElseUpdateRDDBlock( + 1, blockId, StorageLevel.MEMORY_ONLY, classTag[Int], makeIterator) + // Put cache successfully and reported block task info. + assert(res1.isLeft && computed) + verify(master, times(1)).updateRDDBlockTaskInfo(blockId, 1) + + // Cache exists but not visible. + computed = false + assert(store.getStatus(blockId).nonEmpty && !store.isRDDBlockVisible(blockId)) Review Comment: Thanks, done. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org