holdenk commented on a change in pull request #30046: URL: https://github.com/apache/spark/pull/30046#discussion_r505742266
########## File path: core/src/main/scala/org/apache/spark/storage/BlockManagerDecommissioner.scala ########## @@ -82,23 +83,35 @@ private[storage] class BlockManagerDecommissioner( Thread.sleep(SLEEP_TIME_SECS * 1000L) case Some((shuffleBlockInfo, retryCount)) => if (retryCount < maxReplicationFailuresForDecommission) { - logInfo(s"Trying to migrate shuffle ${shuffleBlockInfo} to ${peer}") - val blocks = - bm.migratableResolver.getMigrationBlocks(shuffleBlockInfo) + logDebug(s"Trying to migrate shuffle ${shuffleBlockInfo} to ${peer}") + val blocks = bm.migratableResolver.getMigrationBlocks(shuffleBlockInfo) logDebug(s"Got migration sub-blocks ${blocks}") - blocks.foreach { case (blockId, buffer) => - logDebug(s"Migrating sub-block ${blockId}") - bm.blockTransferService.uploadBlockSync( - peer.host, - peer.port, - peer.executorId, - blockId, - buffer, - StorageLevel.DISK_ONLY, - null)// class tag, we don't need for shuffle - logDebug(s"Migrated sub block ${blockId}") + + // Migrate the components of the blocks. + try { + blocks.foreach { case (blockId, buffer) => + logDebug(s"Migrating sub-block ${blockId}") + bm.blockTransferService.uploadBlockSync( + peer.host, + peer.port, + peer.executorId, + blockId, + buffer, + StorageLevel.DISK_ONLY, + null)// class tag, we don't need for shuffle + logDebug(s"Migrated sub block ${blockId}") + } + logDebug(s"Migrated ${shuffleBlockInfo} to ${peer}") + } catch { + case e: IOException => + // If a block got deleted before netty opened the file handle, then trying to Review comment: sure :) ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org