Ngone51 commented on a change in pull request #30062:
URL: https://github.com/apache/spark/pull/30062#discussion_r509980252



##########
File path: 
common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/RemoteBlockPushResolverSuite.java
##########
@@ -0,0 +1,528 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.network.shuffle;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.Arrays;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Throwables;
+import com.google.common.collect.ImmutableMap;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.roaringbitmap.RoaringBitmap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.junit.Assert.*;
+
+import org.apache.spark.network.buffer.FileSegmentManagedBuffer;
+import org.apache.spark.network.client.StreamCallbackWithID;
+import org.apache.spark.network.shuffle.protocol.FinalizeShuffleMerge;
+import org.apache.spark.network.shuffle.protocol.PushBlockStream;
+import org.apache.spark.network.util.MapConfigProvider;
+import org.apache.spark.network.util.TransportConf;
+
+/**
+ * Tests for {@link RemoteBlockPushResolver}.
+ */
+public class RemoteBlockPushResolverSuite {
+
+  private static final Logger log = 
LoggerFactory.getLogger(RemoteBlockPushResolverSuite.class);
+  private final String MERGE_DIR_RELATIVE_PATH = "usercache/%s/appcache/%s/";
+  private final String TEST_USER = "testUser";
+  private final String TEST_APP = "testApp";
+  private final String BLOCK_MANAGER_DIR = "blockmgr-193d8401";
+
+  private TransportConf conf;
+  private RemoteBlockPushResolver pushResolver;
+  private String[] localDirs;
+
+  @Before
+  public void before() throws IOException {
+    localDirs = new 
String[]{Paths.get("target/l1").toAbsolutePath().toString(),
+      Paths.get("target/l2").toAbsolutePath().toString()};
+    cleanupLocalDirs();
+    MapConfigProvider provider = new MapConfigProvider(
+      ImmutableMap.of("spark.shuffle.server.minChunkSizeInMergedShuffleFile", 
"4"));
+    conf = new TransportConf("shuffle", provider);
+    pushResolver = new RemoteBlockPushResolver(conf, MERGE_DIR_RELATIVE_PATH);
+  }
+
+  @After
+  public void after() {
+    try {
+      cleanupLocalDirs();
+    } catch (IOException e) {
+      // don't fail if clean up doesn't succeed.
+      log.warn("Error deleting test local dirs", e);
+    }
+  }
+
+  private void cleanupLocalDirs() throws IOException {
+    for (String local : localDirs) {
+      FileUtils.deleteDirectory(new File(local));
+    }
+  }
+
+  @Test(expected = RuntimeException.class)
+  public void testNoIndexFile() {
+    try {
+      registerApplication(TEST_APP, TEST_USER);
+      registerExecutor(TEST_APP, prepareBlockManagerLocalDirs(TEST_APP, 
TEST_USER, localDirs));
+      pushResolver.getMergedBlockMeta(TEST_APP, 0, 0);
+      removeApplication(TEST_APP);
+    } catch (Throwable t) {
+      assertTrue(t.getMessage().startsWith("Application merged shuffle index 
file is not found"));
+      Throwables.propagate(t);
+    }
+  }
+
+  @Test
+  public void testBasicBlockMerge() throws IOException {
+    registerApplication(TEST_APP, TEST_USER);
+    registerExecutor(TEST_APP, prepareBlockManagerLocalDirs(TEST_APP, 
TEST_USER, localDirs));
+    PushBlockStream[] pushBlocks = new PushBlockStream[] {
+      new PushBlockStream(TEST_APP, "shuffle_0_0_0", 0),
+      new PushBlockStream(TEST_APP, "shuffle_0_1_0", 0),
+    };
+    ByteBuffer[] blocks = new ByteBuffer[]{
+      ByteBuffer.wrap(new byte[4]),
+      ByteBuffer.wrap(new byte[5])
+    };
+    pushBlockHelper(TEST_APP, pushBlocks, blocks);
+    MergedBlockMeta blockMeta = pushResolver.getMergedBlockMeta(TEST_APP, 0, 
0);
+    validateChunks(TEST_APP, 0, 0, blockMeta, new int[]{4, 5}, new 
int[][]{{0}, {1}});
+    removeApplication(TEST_APP);
+  }
+
+  @Test
+  public void testDividingMergedBlocksIntoChunks() throws IOException {
+    registerApplication(TEST_APP, TEST_USER);
+    registerExecutor(TEST_APP, prepareBlockManagerLocalDirs(TEST_APP, 
TEST_USER, localDirs));
+    PushBlockStream[] pushBlocks = new PushBlockStream[] {
+      new PushBlockStream(TEST_APP, "shuffle_0_0_0", 0),
+      new PushBlockStream(TEST_APP, "shuffle_0_1_0", 0),
+      new PushBlockStream(TEST_APP, "shuffle_0_2_0", 0),
+      new PushBlockStream(TEST_APP, "shuffle_0_3_0", 0),
+    };
+    ByteBuffer[] buffers = new ByteBuffer[]{
+      ByteBuffer.wrap(new byte[2]),
+      ByteBuffer.wrap(new byte[3]),
+      ByteBuffer.wrap(new byte[5]),
+      ByteBuffer.wrap(new byte[3])
+    };
+    pushBlockHelper(TEST_APP, pushBlocks, buffers);
+    MergedBlockMeta meta = pushResolver.getMergedBlockMeta(TEST_APP, 0, 0);
+    validateChunks(TEST_APP, 0, 0, meta, new int[]{5, 5, 3}, new int[][]{{0, 
1}, {2}, {3}});
+    removeApplication(TEST_APP);
+  }
+
+  @Test
+  public void testDeferredBufsAreWrittenDuringOnData() throws IOException {
+    registerApplication(TEST_APP, TEST_USER);
+    registerExecutor(TEST_APP, prepareBlockManagerLocalDirs(TEST_APP, 
TEST_USER, localDirs));
+
+    PushBlockStream pbStream1 = new PushBlockStream(TEST_APP, "shuffle_0_0_0", 
0);
+    StreamCallbackWithID stream1 =
+      pushResolver.receiveBlockDataAsStream(new PushBlockStream(TEST_APP, 
pbStream1.blockId, 0));
+    stream1.onData(stream1.getID(), ByteBuffer.wrap(new byte[2]));
+
+    PushBlockStream pbStream2 = new PushBlockStream(TEST_APP, "shuffle_0_1_0", 
0);
+    StreamCallbackWithID stream2 =
+      pushResolver.receiveBlockDataAsStream(new PushBlockStream(TEST_APP, 
pbStream2.blockId, 0));
+    // This should be deferred
+    stream2.onData(stream2.getID(), ByteBuffer.wrap(new byte[3]));

Review comment:
       Maybe, add `spy()` to `stream2`. So we can verify 
`writeAnyDeferredBlocks` is really invoked later?

##########
File path: 
common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/RemoteBlockPushResolverSuite.java
##########
@@ -0,0 +1,528 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.network.shuffle;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.Arrays;
+
+import com.google.common.base.Preconditions;
+import com.google.common.base.Throwables;
+import com.google.common.collect.ImmutableMap;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.roaringbitmap.RoaringBitmap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.junit.Assert.*;
+
+import org.apache.spark.network.buffer.FileSegmentManagedBuffer;
+import org.apache.spark.network.client.StreamCallbackWithID;
+import org.apache.spark.network.shuffle.protocol.FinalizeShuffleMerge;
+import org.apache.spark.network.shuffle.protocol.PushBlockStream;
+import org.apache.spark.network.util.MapConfigProvider;
+import org.apache.spark.network.util.TransportConf;
+
+/**
+ * Tests for {@link RemoteBlockPushResolver}.
+ */
+public class RemoteBlockPushResolverSuite {
+
+  private static final Logger log = 
LoggerFactory.getLogger(RemoteBlockPushResolverSuite.class);
+  private final String MERGE_DIR_RELATIVE_PATH = "usercache/%s/appcache/%s/";
+  private final String TEST_USER = "testUser";
+  private final String TEST_APP = "testApp";
+  private final String BLOCK_MANAGER_DIR = "blockmgr-193d8401";
+
+  private TransportConf conf;
+  private RemoteBlockPushResolver pushResolver;
+  private String[] localDirs;
+
+  @Before
+  public void before() throws IOException {
+    localDirs = new 
String[]{Paths.get("target/l1").toAbsolutePath().toString(),
+      Paths.get("target/l2").toAbsolutePath().toString()};
+    cleanupLocalDirs();
+    MapConfigProvider provider = new MapConfigProvider(
+      ImmutableMap.of("spark.shuffle.server.minChunkSizeInMergedShuffleFile", 
"4"));
+    conf = new TransportConf("shuffle", provider);
+    pushResolver = new RemoteBlockPushResolver(conf, MERGE_DIR_RELATIVE_PATH);
+  }
+
+  @After
+  public void after() {
+    try {
+      cleanupLocalDirs();
+    } catch (IOException e) {
+      // don't fail if clean up doesn't succeed.
+      log.warn("Error deleting test local dirs", e);
+    }
+  }
+
+  private void cleanupLocalDirs() throws IOException {
+    for (String local : localDirs) {
+      FileUtils.deleteDirectory(new File(local));
+    }
+  }
+
+  @Test(expected = RuntimeException.class)
+  public void testNoIndexFile() {
+    try {
+      registerApplication(TEST_APP, TEST_USER);
+      registerExecutor(TEST_APP, prepareBlockManagerLocalDirs(TEST_APP, 
TEST_USER, localDirs));
+      pushResolver.getMergedBlockMeta(TEST_APP, 0, 0);
+      removeApplication(TEST_APP);
+    } catch (Throwable t) {
+      assertTrue(t.getMessage().startsWith("Application merged shuffle index 
file is not found"));
+      Throwables.propagate(t);
+    }
+  }
+
+  @Test
+  public void testBasicBlockMerge() throws IOException {
+    registerApplication(TEST_APP, TEST_USER);
+    registerExecutor(TEST_APP, prepareBlockManagerLocalDirs(TEST_APP, 
TEST_USER, localDirs));
+    PushBlockStream[] pushBlocks = new PushBlockStream[] {
+      new PushBlockStream(TEST_APP, "shuffle_0_0_0", 0),
+      new PushBlockStream(TEST_APP, "shuffle_0_1_0", 0),
+    };
+    ByteBuffer[] blocks = new ByteBuffer[]{
+      ByteBuffer.wrap(new byte[4]),
+      ByteBuffer.wrap(new byte[5])
+    };
+    pushBlockHelper(TEST_APP, pushBlocks, blocks);
+    MergedBlockMeta blockMeta = pushResolver.getMergedBlockMeta(TEST_APP, 0, 
0);
+    validateChunks(TEST_APP, 0, 0, blockMeta, new int[]{4, 5}, new 
int[][]{{0}, {1}});
+    removeApplication(TEST_APP);
+  }
+
+  @Test
+  public void testDividingMergedBlocksIntoChunks() throws IOException {
+    registerApplication(TEST_APP, TEST_USER);
+    registerExecutor(TEST_APP, prepareBlockManagerLocalDirs(TEST_APP, 
TEST_USER, localDirs));
+    PushBlockStream[] pushBlocks = new PushBlockStream[] {
+      new PushBlockStream(TEST_APP, "shuffle_0_0_0", 0),
+      new PushBlockStream(TEST_APP, "shuffle_0_1_0", 0),
+      new PushBlockStream(TEST_APP, "shuffle_0_2_0", 0),
+      new PushBlockStream(TEST_APP, "shuffle_0_3_0", 0),
+    };
+    ByteBuffer[] buffers = new ByteBuffer[]{
+      ByteBuffer.wrap(new byte[2]),
+      ByteBuffer.wrap(new byte[3]),
+      ByteBuffer.wrap(new byte[5]),
+      ByteBuffer.wrap(new byte[3])
+    };
+    pushBlockHelper(TEST_APP, pushBlocks, buffers);
+    MergedBlockMeta meta = pushResolver.getMergedBlockMeta(TEST_APP, 0, 0);
+    validateChunks(TEST_APP, 0, 0, meta, new int[]{5, 5, 3}, new int[][]{{0, 
1}, {2}, {3}});
+    removeApplication(TEST_APP);
+  }
+
+  @Test
+  public void testDeferredBufsAreWrittenDuringOnData() throws IOException {
+    registerApplication(TEST_APP, TEST_USER);
+    registerExecutor(TEST_APP, prepareBlockManagerLocalDirs(TEST_APP, 
TEST_USER, localDirs));
+
+    PushBlockStream pbStream1 = new PushBlockStream(TEST_APP, "shuffle_0_0_0", 
0);
+    StreamCallbackWithID stream1 =
+      pushResolver.receiveBlockDataAsStream(new PushBlockStream(TEST_APP, 
pbStream1.blockId, 0));
+    stream1.onData(stream1.getID(), ByteBuffer.wrap(new byte[2]));
+
+    PushBlockStream pbStream2 = new PushBlockStream(TEST_APP, "shuffle_0_1_0", 
0);
+    StreamCallbackWithID stream2 =
+      pushResolver.receiveBlockDataAsStream(new PushBlockStream(TEST_APP, 
pbStream2.blockId, 0));
+    // This should be deferred
+    stream2.onData(stream2.getID(), ByteBuffer.wrap(new byte[3]));

Review comment:
       Maybe, uses `spy()` on `stream2`. So we can verify 
`writeAnyDeferredBlocks` is really invoked later?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to