Github user ConeyLiu commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19285#discussion_r140149449
  
    --- Diff: 
core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala ---
    @@ -354,63 +401,30 @@ private[spark] class MemoryStore(
           ser.serializeStream(serializerManager.wrapForCompression(blockId, 
redirectableStream))
         }
     
    -    // Request enough memory to begin unrolling
    -    keepUnrolling = reserveUnrollMemoryForThisTask(blockId, 
initialMemoryThreshold, memoryMode)
    -
    -    if (!keepUnrolling) {
    -      logWarning(s"Failed to reserve initial memory threshold of " +
    -        s"${Utils.bytesToString(initialMemoryThreshold)} for computing 
block $blockId in memory.")
    -    } else {
    -      unrollMemoryUsedByThisBlock += initialMemoryThreshold
    +    def storeValue(value: T): Unit = {
    +      serializationStream.writeObject(value)(classTag)
         }
     
    -    def reserveAdditionalMemoryIfNecessary(): Unit = {
    -      if (bbos.size > unrollMemoryUsedByThisBlock) {
    -        val amountToRequest = (bbos.size * memoryGrowthFactor - 
unrollMemoryUsedByThisBlock).toLong
    -        keepUnrolling = reserveUnrollMemoryForThisTask(blockId, 
amountToRequest, memoryMode)
    -        if (keepUnrolling) {
    -          unrollMemoryUsedByThisBlock += amountToRequest
    -        }
    -      }
    -    }
    -
    -    // Unroll this block safely, checking whether we have exceeded our 
threshold
    -    while (values.hasNext && keepUnrolling) {
    -      serializationStream.writeObject(values.next())(classTag)
    -      elementsUnrolled += 1
    -      if (elementsUnrolled % memoryCheckPeriod == 0) {
    -        reserveAdditionalMemoryIfNecessary()
    +    def estimateSize(precise: Boolean): Long = {
    +      if (precise) {
    +        serializationStream.flush()
    --- End diff --
    
    OK, I'll do it tomorrow.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to