This is an automated email from the ASF dual-hosted git repository. stevel pushed a commit to branch branch-3.4.2 in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/branch-3.4.2 by this push: new a1c1511c2c2 HADOOP-19573. S3A: ITestS3AConfiguration.testDirectoryAllocatorDefval() failing (#7699) a1c1511c2c2 is described below commit a1c1511c2c20285e3c6ea0a1f51ce79f6bd67eea Author: Steve Loughran <ste...@cloudera.com> AuthorDate: Tue May 20 20:31:50 2025 +0100 HADOOP-19573. S3A: ITestS3AConfiguration.testDirectoryAllocatorDefval() failing (#7699) * trim the buffer dir string before the probe (more robust code anyway) * tests to set this, and remove any instantiated context mappers Contributed by Steve Loughran --- .../apache/hadoop/fs/s3a/impl/S3AStoreImpl.java | 10 +++-- .../hadoop/fs/s3a/ITestS3AConfiguration.java | 43 ++++++++++++++++++---- 2 files changed, 41 insertions(+), 12 deletions(-) diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AStoreImpl.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AStoreImpl.java index 526c9b15bc3..96ab44a8597 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AStoreImpl.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AStoreImpl.java @@ -57,6 +57,7 @@ import software.amazon.awssdk.transfer.s3.model.FileUpload; import software.amazon.awssdk.transfer.s3.model.UploadFileRequest; +import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalDirAllocator; @@ -287,10 +288,11 @@ public boolean inputStreamHasCapability(final String capability) { * Initialize dir allocator if not already initialized. */ private void initLocalDirAllocator() { - String bufferDir = getConfig().get(BUFFER_DIR) != null - ? BUFFER_DIR - : HADOOP_TMP_DIR; - directoryAllocator = new LocalDirAllocator(bufferDir); + String key = BUFFER_DIR; + if (StringUtils.isEmpty(getConfig().getTrimmed(key))) { + key = HADOOP_TMP_DIR; + } + directoryAllocator = new LocalDirAllocator(key); } /** Acquire write capacity for rate limiting {@inheritDoc}. */ diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java index e6dabc91bac..74b3b5d3075 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java @@ -49,6 +49,7 @@ import org.apache.commons.lang3.reflect.FieldUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.contract.ContractTestUtils; import org.apache.hadoop.fs.s3a.auth.STSClientFactory; @@ -63,6 +64,7 @@ import org.apache.hadoop.util.VersionInfo; import org.apache.http.HttpStatus; +import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static org.apache.hadoop.fs.contract.ContractTestUtils.skip; import static org.apache.hadoop.fs.s3a.Constants.*; @@ -485,12 +487,29 @@ public void testCloseIdempotent() throws Throwable { @Test public void testDirectoryAllocatorDefval() throws Throwable { + removeAllocatorContexts(); conf = new Configuration(); - conf.unset(Constants.BUFFER_DIR); - fs = S3ATestUtils.createTestFileSystem(conf); - File tmp = createTemporaryFileForWriting(); - assertTrue("not found: " + tmp, tmp.exists()); - tmp.delete(); + final String bucketName = getTestBucketName(conf); + final String blank = " "; + conf.set(Constants.BUFFER_DIR, blank); + conf.set(format("fs.s3a.bucket.%s.buffer.dir", bucketName), blank); + try { + fs = S3ATestUtils.createTestFileSystem(conf); + final Configuration fsConf = fs.getConf(); + Assertions.assertThat(fsConf.get(Constants.BUFFER_DIR)) + .describedAs("Config option %s", Constants.BUFFER_DIR) + .isEqualTo(blank); + File tmp = createTemporaryFileForWriting(); + assertTrue("not found: " + tmp, tmp.exists()); + tmp.delete(); + } finally { + removeAllocatorContexts(); + } + } + + private static void removeAllocatorContexts() { + LocalDirAllocator.removeContext(BUFFER_DIR); + LocalDirAllocator.removeContext(HADOOP_TMP_DIR); } /** @@ -504,13 +523,21 @@ private File createTemporaryFileForWriting() throws IOException { @Test public void testDirectoryAllocatorRR() throws Throwable { + removeAllocatorContexts(); File dir1 = GenericTestUtils.getRandomizedTestDir(); File dir2 = GenericTestUtils.getRandomizedTestDir(); dir1.mkdirs(); dir2.mkdirs(); conf = new Configuration(); - conf.set(Constants.BUFFER_DIR, dir1 + ", " + dir2); + final String bucketName = getTestBucketName(conf); + final String dirs = dir1 + ", " + dir2; + conf.set(Constants.BUFFER_DIR, dirs); + conf.set(format("fs.s3a.bucket.%s.buffer.dir", bucketName), dirs); fs = S3ATestUtils.createTestFileSystem(conf); + final Configuration fsConf = fs.getConf(); + Assertions.assertThat(fsConf.get(Constants.BUFFER_DIR)) + .describedAs("Config option %s", Constants.BUFFER_DIR) + .isEqualTo(dirs); File tmp1 = createTemporaryFileForWriting(); tmp1.delete(); File tmp2 = createTemporaryFileForWriting(); @@ -552,10 +579,10 @@ public S3AFileSystem run() throws Exception{ private static <T> T getField(Object target, Class<T> fieldType, String fieldName) throws IllegalAccessException { Object obj = FieldUtils.readField(target, fieldName, true); - assertNotNull(String.format( + assertNotNull(format( "Could not read field named %s in object with class %s.", fieldName, target.getClass().getName()), obj); - assertTrue(String.format( + assertTrue(format( "Unexpected type found for field named %s, expected %s, actual %s.", fieldName, fieldType.getName(), obj.getClass().getName()), fieldType.isAssignableFrom(obj.getClass())); --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org