Author: kihwal
Date: Mon Apr 28 14:01:39 2014
New Revision: 1590641

URL: http://svn.apache.org/r1590641
Log:
svn merge -c 1590640 merging from trunk to branch-2 to fix:HDFS-6218. Audit log 
should use true client IP for proxied webhdfs operations. Contributed by Daryn 
Sharp.

Modified:
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1590641&r1=1590640&r2=1590641&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
Mon Apr 28 14:01:39 2014
@@ -157,6 +157,9 @@ Release 2.5.0 - UNRELEASED
     HDFS-6270. Secondary namenode status page shows transaction count in bytes.
     (Benoy Antony via wheat9)
 
+    HDFS-6218. Audit log should use true client IP for proxied webhdfs
+    operations. (daryn via kihwal)
+
 Release 2.4.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java?rev=1590641&r1=1590640&r2=1590641&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
 Mon Apr 28 14:01:39 2014
@@ -669,7 +669,7 @@ public class JspHelper {
   // honor the X-Forwarded-For header set by a configured set of trusted
   // proxy servers.  allows audit logging and proxy user checks to work
   // via an http proxy
-  static String getRemoteAddr(HttpServletRequest request) {
+  public static String getRemoteAddr(HttpServletRequest request) {
     String remoteAddr = request.getRemoteAddr();
     String proxyHeader = request.getHeader("X-Forwarded-For");
     if (proxyHeader != null && ProxyUsers.isProxyServer(remoteAddr)) {

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1590641&r1=1590640&r2=1590641&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
 Mon Apr 28 14:01:39 2014
@@ -162,8 +162,16 @@ public class NamenodeWebHdfsMethods {
 
     //clear content type
     response.setContentType(null);
+    
+    // set the remote address, if coming in via a trust proxy server then
+    // the address with be that of the proxied client
+    REMOTE_ADDRESS.set(JspHelper.getRemoteAddr(request));
   }
 
+  private void reset() {
+    REMOTE_ADDRESS.set(null);
+  }
+  
   private static NamenodeProtocols getRPCServer(NameNode namenode)
       throws IOException {
      final NamenodeProtocols np = namenode.getRpcServer();
@@ -394,7 +402,6 @@ public class NamenodeWebHdfsMethods {
     return ugi.doAs(new PrivilegedExceptionAction<Response>() {
       @Override
       public Response run() throws IOException, URISyntaxException {
-        REMOTE_ADDRESS.set(request.getRemoteAddr());
         try {
           return put(ugi, delegation, username, doAsUser,
               path.getAbsolutePath(), op, destination, owner, group,
@@ -402,7 +409,7 @@ public class NamenodeWebHdfsMethods {
               modificationTime, accessTime, renameOptions, createParent,
               delegationTokenArgument,aclPermission);
         } finally {
-          REMOTE_ADDRESS.set(null);
+          reset();
         }
       }
     });
@@ -583,12 +590,11 @@ public class NamenodeWebHdfsMethods {
     return ugi.doAs(new PrivilegedExceptionAction<Response>() {
       @Override
       public Response run() throws IOException, URISyntaxException {
-        REMOTE_ADDRESS.set(request.getRemoteAddr());
         try {
           return post(ugi, delegation, username, doAsUser,
               path.getAbsolutePath(), op, concatSrcs, bufferSize);
         } finally {
-          REMOTE_ADDRESS.set(null);
+          reset();
         }
       }
     });
@@ -681,12 +687,11 @@ public class NamenodeWebHdfsMethods {
     return ugi.doAs(new PrivilegedExceptionAction<Response>() {
       @Override
       public Response run() throws IOException, URISyntaxException {
-        REMOTE_ADDRESS.set(request.getRemoteAddr());
         try {
           return get(ugi, delegation, username, doAsUser,
               path.getAbsolutePath(), op, offset, length, renewer, bufferSize);
         } finally {
-          REMOTE_ADDRESS.set(null);
+          reset();
         }
       }
     });
@@ -889,12 +894,11 @@ public class NamenodeWebHdfsMethods {
     return ugi.doAs(new PrivilegedExceptionAction<Response>() {
       @Override
       public Response run() throws IOException {
-        REMOTE_ADDRESS.set(request.getRemoteAddr());
         try {
           return delete(ugi, delegation, username, doAsUser,
               path.getAbsolutePath(), op, recursive);
         } finally {
-          REMOTE_ADDRESS.set(null);
+          reset();
         }
       }
     });

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java?rev=1590641&r1=1590640&r2=1590641&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java
 Mon Apr 28 14:01:39 2014
@@ -24,7 +24,10 @@ import static org.junit.Assert.assertTru
 import static org.junit.Assert.fail;
 
 import java.io.IOException;
+import java.net.HttpURLConnection;
 import java.net.InetAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
@@ -33,9 +36,11 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
+import org.apache.hadoop.hdfs.web.resources.GetOpParam;
 import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.authorize.ProxyUsers;
+import org.junit.Before;
 import org.junit.Test;
 
 /**
@@ -45,6 +50,16 @@ public class TestAuditLogger {
 
   private static final short TEST_PERMISSION = (short) 0654;
 
+  @Before
+  public void setup() {
+    DummyAuditLogger.initialized = false;
+    DummyAuditLogger.logCount = 0;
+    DummyAuditLogger.remoteAddr = null;
+
+    Configuration conf = new HdfsConfiguration();
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);    
+  }
+
   /**
    * Tests that AuditLogger works as expected.
    */
@@ -69,6 +84,57 @@ public class TestAuditLogger {
     }
   }
 
+  @Test
+  public void testWebHdfsAuditLogger() throws IOException, URISyntaxException {
+    Configuration conf = new HdfsConfiguration();
+    conf.set(DFS_NAMENODE_AUDIT_LOGGERS_KEY,
+        DummyAuditLogger.class.getName());
+    MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
+    
+    GetOpParam.Op op = GetOpParam.Op.GETFILESTATUS;
+    try {
+      cluster.waitClusterUp();
+      assertTrue(DummyAuditLogger.initialized);      
+      URI uri = new URI(
+          "http",
+          NetUtils.getHostPortString(cluster.getNameNode().getHttpAddress()),
+          "/webhdfs/v1/", op.toQueryString(), null);
+      
+      // non-proxy request
+      HttpURLConnection conn = (HttpURLConnection) 
uri.toURL().openConnection();
+      conn.setRequestMethod(op.getType().toString());
+      conn.connect();
+      assertEquals(200, conn.getResponseCode());
+      conn.disconnect();
+      assertEquals(1, DummyAuditLogger.logCount);
+      assertEquals("127.0.0.1", DummyAuditLogger.remoteAddr);
+      
+      // non-trusted proxied request
+      conn = (HttpURLConnection) uri.toURL().openConnection();
+      conn.setRequestMethod(op.getType().toString());
+      conn.setRequestProperty("X-Forwarded-For", "1.1.1.1");
+      conn.connect();
+      assertEquals(200, conn.getResponseCode());
+      conn.disconnect();
+      assertEquals(2, DummyAuditLogger.logCount);
+      assertEquals("127.0.0.1", DummyAuditLogger.remoteAddr);
+      
+      // trusted proxied request
+      conf.set(ProxyUsers.CONF_HADOOP_PROXYSERVERS, "127.0.0.1");
+      ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+      conn = (HttpURLConnection) uri.toURL().openConnection();
+      conn.setRequestMethod(op.getType().toString());
+      conn.setRequestProperty("X-Forwarded-For", "1.1.1.1");
+      conn.connect();
+      assertEquals(200, conn.getResponseCode());
+      conn.disconnect();
+      assertEquals(3, DummyAuditLogger.logCount);
+      assertEquals("1.1.1.1", DummyAuditLogger.remoteAddr);
+    } finally {
+      cluster.shutdown();
+    }
+  }
+
   /**
    * Minor test related to HADOOP-9155. Verify that during a
    * FileSystem.setPermission() operation, the stat passed in during the
@@ -128,7 +194,8 @@ public class TestAuditLogger {
     static boolean initialized;
     static int logCount;
     static short foundPermission;
-
+    static String remoteAddr;
+    
     public void initialize(Configuration conf) {
       initialized = true;
     }
@@ -140,6 +207,7 @@ public class TestAuditLogger {
     public void logAuditEvent(boolean succeeded, String userName,
         InetAddress addr, String cmd, String src, String dst,
         FileStatus stat) {
+      remoteAddr = addr.getHostAddress();
       logCount++;
       if (stat != null) {
         foundPermission = stat.getPermission().toShort();


Reply via email to