This is an automated email from the ASF dual-hosted git repository.

tasanuma pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 67e02a92e0b HADOOP-18666. A whitelist of endpoints to skip Kerberos 
authentication doesn't work for ResourceManager and Job History Server (#5480)
67e02a92e0b is described below

commit 67e02a92e0b9c4da3dcdd01f231a98f243f12d06
Author: Yubi Lee <eubn...@gmail.com>
AuthorDate: Wed Mar 22 10:54:41 2023 +0900

    HADOOP-18666. A whitelist of endpoints to skip Kerberos authentication 
doesn't work for ResourceManager and Job History Server (#5480)
---
 .../java/org/apache/hadoop/http/HttpServer2.java   | 13 ++++-
 .../hadoop/http/TestHttpServerWithSpnego.java      | 60 +++++++++++++++++++---
 2 files changed, 63 insertions(+), 10 deletions(-)

diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
index 178f761191b..515148e9298 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
@@ -497,7 +497,12 @@ public final class HttpServer2 implements FilterContainer {
               prefix -> this.conf.get(prefix + "type")
                   .equals(PseudoAuthenticationHandler.TYPE))
       ) {
-        server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey);
+        server.initSpnego(
+            conf,
+            hostName,
+            getFilterProperties(conf, authFilterConfigurationPrefixes),
+            usernameConfKey,
+            keytabConfKey);
       }
 
       for (URI ep : endpoints) {
@@ -1340,8 +1345,12 @@ public final class HttpServer2 implements 
FilterContainer {
   }
 
   private void initSpnego(Configuration conf, String hostName,
-      String usernameConfKey, String keytabConfKey) throws IOException {
+      Properties authFilterConfigurationPrefixes, String usernameConfKey, 
String keytabConfKey)
+      throws IOException {
     Map<String, String> params = new HashMap<>();
+    for (Map.Entry<Object, Object> entry : 
authFilterConfigurationPrefixes.entrySet()) {
+      params.put(String.valueOf(entry.getKey()), 
String.valueOf(entry.getValue()));
+    }
     String principalInConf = conf.get(usernameConfKey);
     if (principalInConf != null && !principalInConf.isEmpty()) {
       params.put("kerberos.principal", SecurityUtil.getServerPrincipal(
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpnego.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpnego.java
index dfcd98801de..cddbc2a1959 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpnego.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpServerWithSpnego.java
@@ -19,8 +19,10 @@ package org.apache.hadoop.http;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.minikdc.MiniKdc;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.AuthenticationFilterInitializer;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.KerberosTestUtils;
 import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
@@ -104,7 +106,9 @@ public class TestHttpServerWithSpnego {
    */
   @Test
   public void testAuthenticationWithProxyUser() throws Exception {
-    Configuration spengoConf = getSpengoConf(new Configuration());
+    Configuration spnegoConf = getSpnegoConf(new Configuration());
+    spnegoConf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
+        ProxyUserAuthenticationFilterInitializer.class.getName());
 
     //setup logs dir
     System.setProperty("hadoop.log.dir", testRootDir.getAbsolutePath());
@@ -118,15 +122,15 @@ public class TestHttpServerWithSpnego {
         new String[]{"groupC"});
 
     // Make userA impersonate users in groupB
-    spengoConf.set("hadoop.proxyuser.userA.hosts", "*");
-    spengoConf.set("hadoop.proxyuser.userA.groups", "groupB");
-    ProxyUsers.refreshSuperUserGroupsConfiguration(spengoConf);
+    spnegoConf.set("hadoop.proxyuser.userA.hosts", "*");
+    spnegoConf.set("hadoop.proxyuser.userA.groups", "groupB");
+    ProxyUsers.refreshSuperUserGroupsConfiguration(spnegoConf);
 
     HttpServer2 httpServer = null;
     try {
       // Create http server to test.
       httpServer = getCommonBuilder()
-          .setConf(spengoConf)
+          .setConf(spnegoConf)
           .setACL(new AccessControlList("userA groupA"))
           .build();
       httpServer.start();
@@ -191,6 +195,48 @@ public class TestHttpServerWithSpnego {
     }
   }
 
+  @Test
+  public void testAuthenticationToAllowList() throws Exception {
+    Configuration spnegoConf = getSpnegoConf(new Configuration());
+    String[] allowList = new String[] {"/jmx", "/prom"};
+    String[] denyList = new String[] {"/conf", "/stacks", "/logLevel"};
+    spnegoConf.set(PREFIX + "kerberos.endpoint.whitelist", String.join(",", 
allowList));
+    spnegoConf.set(CommonConfigurationKeysPublic.HADOOP_PROMETHEUS_ENABLED, 
"true");
+    spnegoConf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
+        AuthenticationFilterInitializer.class.getName());
+
+    //setup logs dir
+    System.setProperty("hadoop.log.dir", testRootDir.getAbsolutePath());
+
+    HttpServer2 httpServer = null;
+    try {
+      // Create http server to test.
+      httpServer = 
getCommonBuilder().setConf(spnegoConf).setSecurityEnabled(true)
+          .setUsernameConfKey(PREFIX + "kerberos.principal")
+          .setKeytabConfKey(PREFIX + "kerberos.keytab").build();
+      httpServer.start();
+
+      String serverURL = "http://"; + 
NetUtils.getHostPortString(httpServer.getConnectorAddress(0));
+
+      // endpoints in whitelist should not require Kerberos authentication
+      for (String endpoint : allowList) {
+        HttpURLConnection conn = (HttpURLConnection) new URL(serverURL + 
endpoint).openConnection();
+        Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
+      }
+
+      // endpoints not in whitelist should require Kerberos authentication
+      for (String endpoint : denyList) {
+        HttpURLConnection conn = (HttpURLConnection) new URL(serverURL + 
endpoint).openConnection();
+        Assert.assertEquals(HttpURLConnection.HTTP_UNAUTHORIZED, 
conn.getResponseCode());
+      }
+
+    } finally {
+      if (httpServer != null) {
+        httpServer.stop();
+      }
+    }
+  }
+
   private AuthenticatedURL.Token getEncryptedAuthToken(Signer signer,
       String user) throws Exception {
     AuthenticationToken token =
@@ -209,10 +255,8 @@ public class TestHttpServerWithSpnego {
     return new Signer(secretProvider);
   }
 
-  private Configuration getSpengoConf(Configuration conf) {
+  private Configuration getSpnegoConf(Configuration conf) {
     conf = new Configuration();
-    conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
-        ProxyUserAuthenticationFilterInitializer.class.getName());
     conf.set(PREFIX + "type", "kerberos");
     conf.setBoolean(PREFIX + "simple.anonymous.allowed", false);
     conf.set(PREFIX + "signature.secret.file",


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to