Github user squito commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20422#discussion_r165256701
  
    --- Diff: 
core/src/test/scala/org/apache/spark/shuffle/sort/IndexShuffleBlockResolverSuite.scala
 ---
    @@ -89,26 +96,39 @@ class IndexShuffleBlockResolverSuite extends 
SparkFunSuite with BeforeAndAfterEa
         } {
           out2.close()
         }
    -    resolver.writeIndexFileAndCommit(1, 2, lengths2, dataTmp2)
    +    resolver.writeIndexFileAndCommit(shuffleId, mapId, lengths2, dataTmp2)
    +
    +    assert(indexFile.length() === (lengths.length + 1) * 8)
         assert(lengths2.toSeq === lengths.toSeq)
         assert(dataFile.exists())
         assert(dataFile.length() === 30)
         assert(!dataTmp2.exists())
     
         // The dataFile should be the previous one
         val firstByte = new Array[Byte](1)
    -    val in = new FileInputStream(dataFile)
    +    val dataIn = new FileInputStream(dataFile)
         Utils.tryWithSafeFinally {
    -      in.read(firstByte)
    +      dataIn.read(firstByte)
         } {
    -      in.close()
    +      dataIn.close()
         }
         assert(firstByte(0) === 0)
     
    +    // The index file should not change
    +    val secondValueOffset = new Array[Byte](8)
    +    val indexIn = new FileInputStream(indexFile)
    +    Utils.tryWithSafeFinally {
    +      indexIn.read(secondValueOffset)
    +      indexIn.read(secondValueOffset)
    +    } {
    +      indexIn.close()
    +    }
    +    assert(secondValueOffset(7) === 10, "The index file should not change")
    --- End diff --
    
    minor: here and below, would be more clear if you use 
`DataInputStream.readLong()` (no magic 7 offset, and you check the rest of the 
bytes):
    ```scala
    val indexIn = new DataInputStream( newFileInputStream(indexFile))
    Utils.tryWithSafeFinally {
      indexIn.readLong()  // first offset is always 0
      assert(10 === indexIn.readLong(),"The index file should not change")
    }
    ```


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to