This is an automated email from the ASF dual-hosted git repository.

mthakur pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 24f5f708df0 HADOOP-18778. Fixes failing tests when CSE is enabled. 
(#5763)
24f5f708df0 is described below

commit 24f5f708df0dff0ea16018b511a020559ac54230
Author: ahmarsuhail <ahmar.suh...@gmail.com>
AuthorDate: Wed Jul 26 17:26:49 2023 +0100

    HADOOP-18778. Fixes failing tests when CSE is enabled. (#5763)
    
    
    Contributed By: Ahmar Suhail <ahma...@amazon.co.uk>
---
 .../java/org/apache/hadoop/fs/s3a/S3AFileSystem.java  |  4 ++--
 .../org/apache/hadoop/fs/s3a/auth/RolePolicies.java   |  2 +-
 .../hadoop/fs/s3a/ITestS3APrefetchingCacheFiles.java  |  1 +
 .../hadoop/fs/s3a/ITestS3APrefetchingInputStream.java |  3 +++
 .../apache/hadoop/fs/s3a/ITestS3ARequesterPays.java   |  2 +-
 .../apache/hadoop/fs/s3a/auth/ITestAssumeRole.java    | 18 ++++++------------
 .../fs/s3a/auth/ITestAssumedRoleCommitOperations.java |  3 +--
 .../hadoop/fs/s3a/auth/ITestRestrictedReadAccess.java |  3 +--
 .../fs/s3a/impl/ITestPartialRenamesDeletes.java       | 19 ++++++++++++-------
 .../hadoop/fs/s3a/s3guard/ITestS3GuardTool.java       | 10 ++++++----
 10 files changed, 34 insertions(+), 31 deletions(-)

diff --git 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
index 999186f8cd5..2c828a5ef35 100644
--- 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
+++ 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
@@ -213,7 +213,7 @@ import static 
org.apache.hadoop.fs.s3a.Listing.toLocatedFileStatusIterator;
 import static org.apache.hadoop.fs.s3a.S3AUtils.*;
 import static org.apache.hadoop.fs.s3a.Statistic.*;
 import static org.apache.hadoop.fs.s3a.audit.S3AAuditConstants.INITIALIZE_SPAN;
-import static 
org.apache.hadoop.fs.s3a.auth.RolePolicies.STATEMENT_ALLOW_SSE_KMS_RW;
+import static 
org.apache.hadoop.fs.s3a.auth.RolePolicies.STATEMENT_ALLOW_KMS_RW;
 import static org.apache.hadoop.fs.s3a.auth.RolePolicies.allowS3Operations;
 import static 
org.apache.hadoop.fs.s3a.auth.delegation.S3ADelegationTokens.TokenIssuingPolicy.NoTokensAvailable;
 import static 
org.apache.hadoop.fs.s3a.auth.delegation.S3ADelegationTokens.hasDelegationTokenBinding;
@@ -4222,7 +4222,7 @@ public class S3AFileSystem extends FileSystem implements 
StreamCapabilities,
     // no attempt is made to qualify KMS access; there's no
     // way to predict read keys, and not worried about granting
     // too much encryption access.
-    statements.add(STATEMENT_ALLOW_SSE_KMS_RW);
+    statements.add(STATEMENT_ALLOW_KMS_RW);
 
     return statements;
   }
diff --git 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/RolePolicies.java
 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/RolePolicies.java
index 940742c11e2..b2da2c80094 100644
--- 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/RolePolicies.java
+++ 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/RolePolicies.java
@@ -80,7 +80,7 @@ public final class RolePolicies {
    * Statement to allow KMS R/W access access, so full use of
    * SSE-KMS.
    */
-  public static final Statement STATEMENT_ALLOW_SSE_KMS_RW =
+  public static final Statement STATEMENT_ALLOW_KMS_RW =
       statement(true, KMS_ALL_KEYS, KMS_ALL_OPERATIONS);
 
   /**
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3APrefetchingCacheFiles.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3APrefetchingCacheFiles.java
index 6ad8ef58a7f..e678df700b8 100644
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3APrefetchingCacheFiles.java
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3APrefetchingCacheFiles.java
@@ -105,6 +105,7 @@ public class ITestS3APrefetchingCacheFiles extends 
AbstractS3ACostTest {
   @Test
   public void testCacheFileExistence() throws Throwable {
     describe("Verify that FS cache files exist on local FS");
+    skipIfClientSideEncryption();
 
     try (FSDataInputStream in = fs.open(testFile)) {
       byte[] buffer = new byte[prefetchBlockSize];
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3APrefetchingInputStream.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3APrefetchingInputStream.java
index a7b59bb5d46..4998cbc946e 100644
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3APrefetchingInputStream.java
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3APrefetchingInputStream.java
@@ -106,6 +106,7 @@ public class ITestS3APrefetchingInputStream extends 
AbstractS3ACostTest {
   @Test
   public void testReadLargeFileFully() throws Throwable {
     describe("read a large file fully, uses S3ACachingInputStream");
+    skipIfClientSideEncryption();
     IOStatistics ioStats;
     createLargeFile();
 
@@ -139,6 +140,7 @@ public class ITestS3APrefetchingInputStream extends 
AbstractS3ACostTest {
   public void testReadLargeFileFullyLazySeek() throws Throwable {
     describe("read a large file using 
readFully(position,buffer,offset,length),"
         + " uses S3ACachingInputStream");
+    skipIfClientSideEncryption();
     IOStatistics ioStats;
     createLargeFile();
 
@@ -170,6 +172,7 @@ public class ITestS3APrefetchingInputStream extends 
AbstractS3ACostTest {
   @Test
   public void testRandomReadLargeFile() throws Throwable {
     describe("random read on a large file, uses S3ACachingInputStream");
+    skipIfClientSideEncryption();
     IOStatistics ioStats;
     createLargeFile();
 
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ARequesterPays.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ARequesterPays.java
index d3925d35a99..c58f13efbf2 100644
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ARequesterPays.java
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ARequesterPays.java
@@ -59,7 +59,7 @@ public class ITestS3ARequesterPays extends 
AbstractS3ATestBase {
   @Test
   public void testRequesterPaysOptionSuccess() throws Throwable {
     describe("Test requester pays enabled case by reading last then first 
byte");
-
+    skipIfClientSideEncryption();
     Configuration conf = this.createConfiguration();
     conf.setBoolean(ALLOW_REQUESTER_PAYS, true);
     // Enable bucket exists check, the first failure point people may encounter
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java
index 9fb09b4cede..658c81cd8f2 100644
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java
@@ -426,8 +426,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     bindRolePolicy(conf,
         policy(
             statement(false, S3_ALL_BUCKETS, S3_GET_OBJECT_TORRENT),
-            ALLOW_S3_GET_BUCKET_LOCATION,
-            STATEMENT_ALLOW_SSE_KMS_RW));
+            ALLOW_S3_GET_BUCKET_LOCATION, STATEMENT_ALLOW_KMS_RW));
     Path path = path("testAssumeRoleStillIncludesRolePerms");
     roleFS = (S3AFileSystem) path.getFileSystem(conf);
     assertTouchForbidden(roleFS, path);
@@ -447,8 +446,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     bindRolePolicy(conf,
         policy(
             statement(false, S3_ALL_BUCKETS, S3_PATH_WRITE_OPERATIONS),
-            STATEMENT_ALL_S3,
-            STATEMENT_ALLOW_SSE_KMS_READ));
+            STATEMENT_ALL_S3, STATEMENT_ALLOW_KMS_RW));
     Path path = methodPath();
     roleFS = (S3AFileSystem) path.getFileSystem(conf);
     // list the root path, expect happy
@@ -495,8 +493,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     Configuration conf = createAssumedRoleConfig();
 
     bindRolePolicyStatements(conf,
-        STATEMENT_ALL_BUCKET_READ_ACCESS,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+        STATEMENT_ALL_BUCKET_READ_ACCESS, STATEMENT_ALLOW_KMS_RW,
         new Statement(Effects.Allow)
           .addActions(S3_ALL_OPERATIONS)
           .addResources(directory(restrictedDir)));
@@ -563,8 +560,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     fs.delete(basePath, true);
     fs.mkdirs(readOnlyDir);
 
-    bindRolePolicyStatements(conf,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
         STATEMENT_ALL_BUCKET_READ_ACCESS,
         new Statement(Effects.Allow)
             .addActions(S3_PATH_RW_OPERATIONS)
@@ -714,8 +710,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     S3AFileSystem fs = getFileSystem();
     fs.delete(destDir, true);
 
-    bindRolePolicyStatements(conf,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
         statement(true, S3_ALL_BUCKETS, S3_ALL_OPERATIONS),
         new Statement(Effects.Deny)
             .addActions(S3_PATH_WRITE_OPERATIONS)
@@ -746,8 +741,7 @@ public class ITestAssumeRole extends AbstractS3ATestBase {
     describe("Restrict role to read only");
     Configuration conf = createAssumedRoleConfig();
 
-    bindRolePolicyStatements(conf,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
         statement(true, S3_ALL_BUCKETS, S3_ALL_OPERATIONS),
         statement(false, S3_ALL_BUCKETS, S3_GET_BUCKET_LOCATION));
     Path path = methodPath();
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumedRoleCommitOperations.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumedRoleCommitOperations.java
index dabc0abc2af..2dc8497d618 100644
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumedRoleCommitOperations.java
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumedRoleCommitOperations.java
@@ -61,8 +61,7 @@ public class ITestAssumedRoleCommitOperations extends 
ITestCommitOperations {
     restrictedDir = super.path("restricted");
     Configuration conf = newAssumedRoleConfig(getConfiguration(),
         getAssumedRoleARN());
-    bindRolePolicyStatements(conf,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
         statement(true, S3_ALL_BUCKETS, S3_BUCKET_READ_OPERATIONS),
         new RoleModel.Statement(RoleModel.Effects.Allow)
             .addActions(S3_PATH_RW_OPERATIONS)
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestRestrictedReadAccess.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestRestrictedReadAccess.java
index a16e1b5e492..7151c38ad3e 100644
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestRestrictedReadAccess.java
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestRestrictedReadAccess.java
@@ -260,8 +260,7 @@ public class ITestRestrictedReadAccess extends 
AbstractS3ATestBase {
     // it still has write access, which can be explored in the final
     // step to delete files and directories.
     roleConfig = createAssumedRoleConfig();
-    bindRolePolicyStatements(roleConfig,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(roleConfig, STATEMENT_ALLOW_KMS_RW,
         statement(true, S3_ALL_BUCKETS, S3_ALL_OPERATIONS),
         new Statement(Effects.Deny)
             .addActions(S3_ALL_GET)
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestPartialRenamesDeletes.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestPartialRenamesDeletes.java
index 378f4a70433..24f5ddf6d89 100644
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestPartialRenamesDeletes.java
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestPartialRenamesDeletes.java
@@ -56,6 +56,7 @@ import static 
org.apache.hadoop.fs.s3a.Statistic.OBJECT_DELETE_REQUEST;
 import static org.apache.hadoop.fs.s3a.auth.RoleModel.Effects;
 import static org.apache.hadoop.fs.s3a.auth.RoleModel.Statement;
 import static org.apache.hadoop.fs.s3a.auth.RoleModel.directory;
+import static org.apache.hadoop.fs.s3a.auth.RoleModel.resource;
 import static org.apache.hadoop.fs.s3a.auth.RoleModel.statement;
 import static org.apache.hadoop.fs.s3a.auth.RolePolicies.*;
 import static 
org.apache.hadoop.fs.s3a.auth.RoleTestUtils.bindRolePolicyStatements;
@@ -144,6 +145,11 @@ public class ITestPartialRenamesDeletes extends 
AbstractS3ATestBase {
    */
   private Path writableDir;
 
+  /**
+   * Instruction file created when using CSE, required to be added to policies.
+   */
+  private Path writableDirInstructionFile;
+
   /**
    * A directory to which restricted roles have only read access.
    */
@@ -216,6 +222,7 @@ public class ITestPartialRenamesDeletes extends 
AbstractS3ATestBase {
     basePath = uniquePath();
     readOnlyDir = new Path(basePath, "readonlyDir");
     writableDir = new Path(basePath, "writableDir");
+    writableDirInstructionFile = new Path(basePath, "writableDir.instruction");
     readOnlyChild = new Path(readOnlyDir, "child");
     noReadDir = new Path(basePath, "noReadDir");
     // the full FS
@@ -225,8 +232,7 @@ public class ITestPartialRenamesDeletes extends 
AbstractS3ATestBase {
 
     // create the baseline assumed role
     assumedRoleConfig = createAssumedRoleConfig();
-    bindRolePolicyStatements(assumedRoleConfig,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(assumedRoleConfig, STATEMENT_ALLOW_KMS_RW,
         STATEMENT_ALL_BUCKET_READ_ACCESS,  // root:     r-x
         new Statement(Effects.Allow)       // dest:     rwx
             .addActions(S3_PATH_RW_OPERATIONS)
@@ -365,13 +371,13 @@ public class ITestPartialRenamesDeletes extends 
AbstractS3ATestBase {
   public void testRenameParentPathNotWriteable() throws Throwable {
     describe("rename with parent paths not writeable; multi=%s", multiDelete);
     final Configuration conf = createAssumedRoleConfig();
-    bindRolePolicyStatements(conf,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(conf, STATEMENT_ALLOW_KMS_RW,
         STATEMENT_ALL_BUCKET_READ_ACCESS,
         new Statement(Effects.Allow)
             .addActions(S3_PATH_RW_OPERATIONS)
             .addResources(directory(readOnlyDir))
-            .addResources(directory(writableDir)));
+            .addResources(directory(writableDir))
+            .addResources(resource(writableDirInstructionFile, false, false)));
     roleFS = (S3AFileSystem) readOnlyDir.getFileSystem(conf);
 
     S3AFileSystem fs = getFileSystem();
@@ -733,8 +739,7 @@ public class ITestPartialRenamesDeletes extends 
AbstractS3ATestBase {
     // s3:DeleteObjectVersion permission, and attempt rename
     // and then delete.
     Configuration roleConfig = createAssumedRoleConfig();
-    bindRolePolicyStatements(roleConfig,
-        STATEMENT_ALLOW_SSE_KMS_RW,
+    bindRolePolicyStatements(roleConfig, STATEMENT_ALLOW_KMS_RW,
         STATEMENT_ALL_BUCKET_READ_ACCESS,  // root:     r-x
         new Statement(Effects.Allow)       // dest:     rwx
             .addActions(S3_PATH_RW_OPERATIONS)
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestS3GuardTool.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestS3GuardTool.java
index 23b14fd3792..f7b9ad4f24b 100644
--- 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestS3GuardTool.java
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/s3guard/ITestS3GuardTool.java
@@ -70,6 +70,7 @@ public class ITestS3GuardTool extends 
AbstractS3GuardToolTestBase {
 
   @Test
   public void testLandsatBucketRequireUnencrypted() throws Throwable {
+    skipIfClientSideEncryption();
     run(BucketInfo.NAME,
         "-" + BucketInfo.ENCRYPTION_FLAG, "none",
         getLandsatCSVFile(getConfiguration()));
@@ -178,8 +179,9 @@ public class ITestS3GuardTool extends 
AbstractS3GuardToolTestBase {
       // least a second old
       describe("Sleeping 1 second then confirming upload still there");
       Thread.sleep(1000);
-      LambdaTestUtils.eventually(5000, 1000,
-          () -> { assertNumUploadsAge(path, 1, 1); });
+      LambdaTestUtils.eventually(5000, 1000, () -> {
+        assertNumUploadsAge(path, 1, 1);
+      });
 
       // 7. Assert deletion works when age filter matches
       describe("Doing aged deletion");
@@ -231,8 +233,8 @@ public class ITestS3GuardTool extends 
AbstractS3GuardToolTestBase {
    *                   search all parts
    * @throws Exception on failure
    */
-  private void uploadCommandAssertCount(S3AFileSystem fs, String options[],
-      Path path, int numUploads, int ageSeconds)
+  private void uploadCommandAssertCount(S3AFileSystem fs, String[] options, 
Path path,
+      int numUploads, int ageSeconds)
       throws Exception {
     List<String> allOptions = new ArrayList<>();
     List<String> output = new ArrayList<>();


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to