kfaraz commented on code in PR #16521: URL: https://github.com/apache/druid/pull/16521#discussion_r1623310787
########## extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureStorageAccountInputSourceTest.java: ########## @@ -224,7 +244,11 @@ public void test_getPrefixesSplitStream_withObjectGlob_successfullyCreatesCloudL ); EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_LISTING_LENGTH); - EasyMock.expect(azureCloudBlobIterableFactory.create(EasyMock.eq(prefixes), EasyMock.eq(MAX_LISTING_LENGTH), EasyMock.anyObject(AzureStorage.class))).andReturn( + EasyMock.expect(azureCloudBlobIterableFactory.create( Review Comment: same comment as above. ########## extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureStorageAccountInputSourceTest.java: ########## @@ -174,7 +189,11 @@ public void test_createSplits_successfullyCreatesCloudLocation_returnsExpectedLo List<CloudBlobHolder> expectedCloudBlobs = ImmutableList.of(cloudBlobDruid1); Iterator<CloudBlobHolder> expectedCloudBlobsIterator = expectedCloudBlobs.iterator(); EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_LISTING_LENGTH); - EasyMock.expect(azureCloudBlobIterableFactory.create(EasyMock.eq(prefixes), EasyMock.eq(MAX_LISTING_LENGTH), EasyMock.anyObject(AzureStorage.class))).andReturn( + EasyMock.expect(azureCloudBlobIterableFactory.create( Review Comment: same comment as above. ########## extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureInputSourceTest.java: ########## @@ -364,29 +370,34 @@ public void test_systemFields() (containerName, blobPath, storage) -> null ); - Assert.assertEquals("azure://foo/bar", azureInputSource.getSystemFieldValue(entity, SystemField.URI)); - Assert.assertEquals("foo", azureInputSource.getSystemFieldValue(entity, SystemField.BUCKET)); - Assert.assertEquals("bar", azureInputSource.getSystemFieldValue(entity, SystemField.PATH)); + assertEquals("azure://foo/bar", azureInputSource.getSystemFieldValue(entity, SystemField.URI)); + assertEquals("foo", azureInputSource.getSystemFieldValue(entity, SystemField.BUCKET)); + assertEquals("bar", azureInputSource.getSystemFieldValue(entity, SystemField.PATH)); } @Test public void abidesEqualsContract() { - EqualsVerifier.forClass(AzureInputSource.class) - .usingGetClass() - .withPrefabValues(Logger.class, new Logger(AzureStorage.class), new Logger(AzureStorage.class)) - .withPrefabValues(BlobContainerClient.class, new BlobContainerClientBuilder().buildClient(), new BlobContainerClientBuilder().buildClient()) - .withPrefabValues(AzureStorage.class, new AzureStorage(null, null), new AzureStorage(null, null)) - .withNonnullFields("storage") - .withNonnullFields("entityFactory") - .withNonnullFields("azureCloudBlobIterableFactory") - .withNonnullFields("inputDataConfig") - .withNonnullFields("objectGlob") - .withNonnullFields("scheme") - .verify(); + EqualsVerifier + .forClass(AzureInputSource.class) + .usingGetClass() + .withPrefabValues(Logger.class, new Logger(AzureStorage.class), new Logger(AzureStorage.class)) + .withPrefabValues( + BlobContainerClient.class, + new BlobContainerClientBuilder().buildClient(), + new BlobContainerClientBuilder().buildClient() + ) + .withPrefabValues(AzureStorage.class, new AzureStorage(null, null), new AzureStorage(null, null)) + .withNonnullFields("storage") + .withNonnullFields("entityFactory") + .withNonnullFields("azureCloudBlobIterableFactory") + .withNonnullFields("inputDataConfig") + .withNonnullFields("objectGlob") + .withNonnullFields("scheme") + .verify(); Review Comment: any specific reason to change this? ########## extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureStorageAccountInputSourceTest.java: ########## @@ -121,33 +127,42 @@ public void setup() EasyMock.expect(azureAccountConfig.getAccount()).andReturn(DEFAULT_STORAGE_ACCOUNT).anyTimes(); } - @Test(expected = IllegalArgumentException.class) + @Test public void test_constructor_emptyUrisEmptyPrefixesEmptyObjects_throwsIllegalArgumentException() { replayAll(); - azureInputSource = new AzureStorageAccountInputSource( - entityFactory, - azureCloudBlobIterableFactory, - inputDataConfig, - azureAccountConfig, - EMPTY_URIS, - EMPTY_PREFIXES, - EMPTY_OBJECTS, - null, - azureStorageAccountInputSourceConfig, - null + + //noinspection ResultOfObjectAllocationIgnored + assertThrows( + IllegalArgumentException.class, + () -> new AzureStorageAccountInputSource( + entityFactory, + azureCloudBlobIterableFactory, + inputDataConfig, + azureAccountConfig, + EMPTY_URIS, + EMPTY_PREFIXES, + EMPTY_OBJECTS, + null, + azureStorageAccountInputSourceConfig, + null + ) ); } @Test public void test_createEntity_returnsExpectedEntity() { - EasyMock.expect(entityFactory.create(EasyMock.eq(CLOUD_OBJECT_LOCATION_1), EasyMock.anyObject(AzureStorage.class), EasyMock.eq(AzureStorageAccountInputSource.SCHEME))).andReturn(azureEntity1); + EasyMock.expect(entityFactory.create( + EasyMock.eq(CLOUD_OBJECT_LOCATION_1), + EasyMock.anyObject(AzureStorage.class), + EasyMock.eq(AzureStorageAccountInputSource.SCHEME) + )).andReturn(azureEntity1); Review Comment: Nit: more readable this way. ```suggestion EasyMock.expect( entityFactory.create( EasyMock.eq(CLOUD_OBJECT_LOCATION_1), EasyMock.anyObject(AzureStorage.class), EasyMock.eq(AzureStorageAccountInputSource.SCHEME) ) ).andReturn(azureEntity1); ``` ########## extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentKillerTest.java: ########## @@ -121,27 +124,35 @@ public void killTest() throws SegmentLoadingException, BlobStorageException verifyAll(); } - @Test(expected = SegmentLoadingException.class) + @Test public void test_kill_StorageExceptionExtendedErrorInformationNull_throwsException() - throws SegmentLoadingException, BlobStorageException { + String dirPath = Paths.get(BLOB_PATH).getParent().toString(); - common_test_kill_StorageExceptionExtendedError_throwsException(); - } + EasyMock.expect(azureStorage.emptyCloudBlobDirectory(CONTAINER_NAME, dirPath)).andThrow( + new BlobStorageException( + "", + null, + null + ) Review Comment: can be in a single line. ########## extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentKillerTest.java: ########## @@ -321,58 +314,69 @@ public void killBatchTest() throws SegmentLoadingException, BlobStorageException verifyAll(); - Assert.assertEquals( - ImmutableSet.of(BLOB_PATH, BLOB_PATH_2), - new HashSet<>(deletedFilesCapture.getValue()) - ); + assertEquals(ImmutableSet.of(BLOB_PATH, BLOB_PATH_2), new HashSet<>(deletedFilesCapture.getValue())); } - @Test(expected = RuntimeException.class) + @Test public void test_killBatch_runtimeException() - throws SegmentLoadingException, BlobStorageException { - EasyMock.expect(azureStorage.batchDeleteFiles(CONTAINER_NAME, ImmutableList.of(BLOB_PATH, BLOB_PATH_2), null)) .andThrow(new RuntimeException("")); replayAll(); - AzureDataSegmentKiller killer = new AzureDataSegmentKiller(segmentConfig, inputDataConfig, accountConfig, azureStorage, azureCloudBlobIterableFactory); - - killer.kill(ImmutableList.of(DATA_SEGMENT, DATA_SEGMENT_2)); + assertThrows( + RuntimeException.class, + () -> { + AzureDataSegmentKiller killer = new AzureDataSegmentKiller( + segmentConfig, + inputDataConfig, + accountConfig, + azureStorage, + azureCloudBlobIterableFactory + ); + killer.kill(ImmutableList.of(DATA_SEGMENT, DATA_SEGMENT_2)); + } + ); verifyAll(); } - @Test(expected = SegmentLoadingException.class) + @Test public void test_killBatch_SegmentLoadingExceptionOnError() - throws SegmentLoadingException, BlobStorageException { - EasyMock.expect(azureStorage.batchDeleteFiles(CONTAINER_NAME, ImmutableList.of(BLOB_PATH, BLOB_PATH_2), null)) .andReturn(false); replayAll(); - AzureDataSegmentKiller killer = new AzureDataSegmentKiller(segmentConfig, inputDataConfig, accountConfig, azureStorage, azureCloudBlobIterableFactory); - - killer.kill(ImmutableList.of(DATA_SEGMENT, DATA_SEGMENT_2)); + assertThrows( + SegmentLoadingException.class, + () -> { + AzureDataSegmentKiller killer = new AzureDataSegmentKiller( + segmentConfig, + inputDataConfig, + accountConfig, + azureStorage, + azureCloudBlobIterableFactory + ); + killer.kill(ImmutableList.of(DATA_SEGMENT, DATA_SEGMENT_2)); Review Comment: Including both the statements in the lambda makes it difficult to verify that it is `.kill()` that has to throw the exception. ########## extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureStorageAccountInputSourceTest.java: ########## @@ -224,7 +244,11 @@ public void test_getPrefixesSplitStream_withObjectGlob_successfullyCreatesCloudL ); EasyMock.expect(inputDataConfig.getMaxListingLength()).andReturn(MAX_LISTING_LENGTH); - EasyMock.expect(azureCloudBlobIterableFactory.create(EasyMock.eq(prefixes), EasyMock.eq(MAX_LISTING_LENGTH), EasyMock.anyObject(AzureStorage.class))).andReturn( + EasyMock.expect(azureCloudBlobIterableFactory.create( + EasyMock.eq(prefixes), + EasyMock.eq(MAX_LISTING_LENGTH), + EasyMock.anyObject(AzureStorage.class) + )).andReturn( azureCloudBlobIterable); Review Comment: this could be in the previous line itself. ########## extensions-core/azure-extensions/src/test/java/org/apache/druid/data/input/azure/AzureStorageAccountInputSourceTest.java: ########## @@ -385,19 +413,31 @@ public void test_systemFields() (containerName, blobPath, storage) -> null ); - Assert.assertEquals("azureStorage://foo/container/bar", azureInputSource.getSystemFieldValue(entity, SystemField.URI)); - Assert.assertEquals("foo", azureInputSource.getSystemFieldValue(entity, SystemField.BUCKET)); - Assert.assertEquals("container/bar", azureInputSource.getSystemFieldValue(entity, SystemField.PATH)); + assertEquals( + "azureStorage://foo/container/bar", + azureInputSource.getSystemFieldValue(entity, SystemField.URI) + ); + assertEquals("foo", azureInputSource.getSystemFieldValue(entity, SystemField.BUCKET)); + assertEquals("container/bar", azureInputSource.getSystemFieldValue(entity, SystemField.PATH)); } @Test public void abidesEqualsContract() { - EqualsVerifier.forClass(AzureStorageAccountInputSource.class) + EqualsVerifier + .forClass(AzureStorageAccountInputSource.class) .usingGetClass() .withPrefabValues(Logger.class, new Logger(AzureStorage.class), new Logger(AzureStorage.class)) - .withPrefabValues(BlobContainerClient.class, new BlobContainerClientBuilder().buildClient(), new BlobContainerClientBuilder().buildClient()) - .withPrefabValues(AzureIngestClientFactory.class, new AzureIngestClientFactory(null, null), new AzureIngestClientFactory(null, null)) + .withPrefabValues( + BlobContainerClient.class, + new BlobContainerClientBuilder().buildClient(), + new BlobContainerClientBuilder().buildClient() + ) + .withPrefabValues( + AzureIngestClientFactory.class, + new AzureIngestClientFactory(null, null), + new AzureIngestClientFactory(null, null) + ) Review Comment: thanks, much better now. ########## extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java: ########## @@ -121,28 +121,29 @@ public void test_push_nonUniquePathNoPrefix_succeeds() throws Exception replayAll(); - DataSegment segment = pusher.push(tempFolder.getRoot(), SEGMENT_TO_PUSH, useUniquePath); + DataSegment segment = pusher.push(tempPath.toFile(), SEGMENT_TO_PUSH, useUniquePath); - Assert.assertTrue( - segment.getLoadSpec().get("blobPath").toString(), - Pattern.compile(NON_UNIQUE_NO_PREFIX_MATCHER).matcher(segment.getLoadSpec().get("blobPath").toString()).matches() + assertTrue( + Pattern.compile(NON_UNIQUE_NO_PREFIX_MATCHER) + .matcher(segment.getLoadSpec().get("blobPath").toString()) + .matches(), + segment.getLoadSpec().get("blobPath").toString() Review Comment: much more readable now, thanks! ########## extensions-core/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPullerTest.java: ########## @@ -25,163 +25,166 @@ import org.apache.druid.segment.loading.SegmentLoadingException; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import java.io.File; -import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; public class AzureDataSegmentPullerTest extends EasyMockSupport { - - private static final String SEGMENT_FILE_NAME = "segment"; private static final String CONTAINER_NAME = "container"; private static final String BLOB_PATH = "path/to/storage/index.zip"; - private static final String BLOB_PATH_HADOOP = AzureUtils.AZURE_STORAGE_HOST_ADDRESS + "/path/to/storage/index.zip"; private AzureStorage azureStorage; private AzureByteSourceFactory byteSourceFactory; - @Before + @BeforeEach public void before() { azureStorage = createMock(AzureStorage.class); byteSourceFactory = createMock(AzureByteSourceFactory.class); } @Test - public void test_getSegmentFiles_success() - throws SegmentLoadingException, BlobStorageException, IOException + public void test_getSegmentFiles_success(@TempDir Path sourcePath, @TempDir Path targetPath) + throws IOException, SegmentLoadingException { + final String segmentFileName = "segment"; final String value = "bucket"; - final File pulledFile = AzureTestUtils.createZipTempFile(SEGMENT_FILE_NAME, value); - final File toDir = FileUtils.createTempDir(); - try { - final InputStream zipStream = new FileInputStream(pulledFile); - final AzureAccountConfig config = new AzureAccountConfig(); - EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)).andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); - EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andReturn(zipStream); + final File pulledFile = createZipTempFile(sourcePath, segmentFileName, value); - replayAll(); + final InputStream zipStream = Files.newInputStream(pulledFile.toPath()); + final AzureAccountConfig config = new AzureAccountConfig(); - AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); + EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)) + .andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); + EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andReturn(zipStream); - FileUtils.FileCopyResult result = puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, toDir); + replayAll(); - File expected = new File(toDir, SEGMENT_FILE_NAME); - Assert.assertEquals(value.length(), result.size()); - Assert.assertTrue(expected.exists()); - Assert.assertEquals(value.length(), expected.length()); + AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); - verifyAll(); - } - finally { - pulledFile.delete(); - FileUtils.deleteDirectory(toDir); - } + FileUtils.FileCopyResult result = puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, targetPath.toFile()); + + File expected = new File(targetPath.toFile(), segmentFileName); + assertEquals(value.length(), result.size()); + assertTrue(expected.exists()); + assertEquals(value.length(), expected.length()); + + verifyAll(); } @Test - public void test_getSegmentFiles_blobPathIsHadoop_success() - throws SegmentLoadingException, BlobStorageException, IOException + public void test_getSegmentFiles_blobPathIsHadoop_success(@TempDir Path sourcePath, @TempDir Path targetPath) + throws IOException, SegmentLoadingException { + final String segmentFileName = "segment"; final String value = "bucket"; - final File pulledFile = AzureTestUtils.createZipTempFile(SEGMENT_FILE_NAME, value); - final File toDir = FileUtils.createTempDir(); - try { - final InputStream zipStream = new FileInputStream(pulledFile); - final AzureAccountConfig config = new AzureAccountConfig(); - EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)).andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); - EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andReturn(zipStream); + final File pulledFile = createZipTempFile(sourcePath, segmentFileName, value); - replayAll(); + final InputStream zipStream = Files.newInputStream(pulledFile.toPath()); + final AzureAccountConfig config = new AzureAccountConfig(); - AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); + EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)) + .andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); + EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andReturn(zipStream); - FileUtils.FileCopyResult result = puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH_HADOOP, toDir); + replayAll(); - File expected = new File(toDir, SEGMENT_FILE_NAME); - Assert.assertEquals(value.length(), result.size()); - Assert.assertTrue(expected.exists()); - Assert.assertEquals(value.length(), expected.length()); + AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); - verifyAll(); - } - finally { - pulledFile.delete(); - FileUtils.deleteDirectory(toDir); - } + final String blobPathHadoop = AzureUtils.AZURE_STORAGE_HOST_ADDRESS + "/path/to/storage/index.zip"; + FileUtils.FileCopyResult result = puller.getSegmentFiles(CONTAINER_NAME, blobPathHadoop, targetPath.toFile()); + + File expected = new File(targetPath.toFile(), segmentFileName); + assertEquals(value.length(), result.size()); + assertTrue(expected.exists()); + assertEquals(value.length(), expected.length()); + + verifyAll(); } - @Test(expected = RuntimeException.class) - public void test_getSegmentFiles_nonRecoverableErrorRaisedWhenPullingSegmentFiles_doNotDeleteOutputDirectory() - throws IOException, BlobStorageException, SegmentLoadingException + @Test + public void test_getSegmentFiles_nonRecoverableErrorRaisedWhenPullingSegmentFiles_doNotDeleteOutputDirectory( + @TempDir Path tempPath + ) { final AzureAccountConfig config = new AzureAccountConfig(); - final File outDir = FileUtils.createTempDir(); - try { - EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)).andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); - EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andThrow( - new RuntimeException( - "error" - ) - ); + EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)) + .andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); + EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)) + .andThrow(new RuntimeException("error")); - replayAll(); + AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); - AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); + replayAll(); - puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, outDir); - } - catch (Exception e) { - Assert.assertTrue(outDir.exists()); - verifyAll(); - throw e; - } - finally { - FileUtils.deleteDirectory(outDir); - } + assertThrows( + RuntimeException.class, + () -> puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, tempPath.toFile()) + ); + assertTrue(tempPath.toFile().exists()); + + verifyAll(); } - @Test(expected = SegmentLoadingException.class) - public void test_getSegmentFiles_recoverableErrorRaisedWhenPullingSegmentFiles_deleteOutputDirectory() - throws IOException, BlobStorageException, SegmentLoadingException + @Test + public void test_getSegmentFiles_recoverableErrorRaisedWhenPullingSegmentFiles_deleteOutputDirectory( + @TempDir Path tempPath + ) { final AzureAccountConfig config = new AzureAccountConfig(); - final File outDir = FileUtils.createTempDir(); - try { - HttpResponse httpResponse = createMock(HttpResponse.class); - EasyMock.expect(httpResponse.getStatusCode()).andReturn(500).anyTimes(); - EasyMock.replay(httpResponse); - EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)).andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); - EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andThrow( - new BlobStorageException("", httpResponse, null) - ).atLeastOnce(); + final HttpResponse httpResponse = createMock(HttpResponse.class); + EasyMock.expect(httpResponse.getStatusCode()).andReturn(500).anyTimes(); + EasyMock.replay(httpResponse); + EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH, azureStorage)) + .andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH)); + EasyMock.expect(azureStorage.getBlockBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andThrow( + new BlobStorageException("", httpResponse, null) + ).atLeastOnce(); - EasyMock.replay(azureStorage); - EasyMock.replay(byteSourceFactory); + EasyMock.replay(azureStorage); + EasyMock.replay(byteSourceFactory); - AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); + AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory, azureStorage, config); - puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, outDir); + assertThrows( + SegmentLoadingException.class, + () -> puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, tempPath.toFile()) + ); - Assert.assertFalse(outDir.exists()); + assertFalse(tempPath.toFile().exists()); + verifyAll(); - verifyAll(); - } - catch (Exception e) { - Assert.assertFalse(outDir.exists()); - verifyAll(); - throw e; - } - finally { - FileUtils.deleteDirectory(outDir); + assertFalse(tempPath.toFile().exists()); + verifyAll(); Review Comment: duplicate lines? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: commits-unsubscr...@druid.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@druid.apache.org For additional commands, e-mail: commits-h...@druid.apache.org