Repository: hadoop
Updated Branches:
  refs/heads/trunk 0fd3980a1 -> c447efebd


HDFS-10579. HDFS web interfaces lack configs for X-FRAME-OPTIONS protection. 
Contributed by Anu Engineer.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c447efeb
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c447efeb
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c447efeb

Branch: refs/heads/trunk
Commit: c447efebdb92dcdf3d95e983036f53bfbed2c0b4
Parents: 0fd3980
Author: Jitendra Pandey <jiten...@apache.org>
Authored: Mon Jul 11 14:55:33 2016 -0700
Committer: Jitendra Pandey <jiten...@apache.org>
Committed: Mon Jul 11 14:55:33 2016 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |  6 ++
 .../server/datanode/web/DatanodeHttpServer.java | 10 ++
 .../server/namenode/NameNodeHttpServer.java     | 20 ++++
 .../src/main/resources/hdfs-default.xml         | 24 +++++
 .../datanode/web/TestDatanodeHttpXFrame.java    | 90 ++++++++++++++++++
 .../namenode/TestNameNodeHttpServerXFrame.java  | 97 ++++++++++++++++++++
 6 files changed, 247 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c447efeb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index 08365cd..e734055 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -741,6 +741,12 @@ public class DFSConfigKeys extends CommonConfigurationKeys 
{
   // Security-related configs
   public static final String DFS_ENCRYPT_DATA_TRANSFER_KEY = 
"dfs.encrypt.data.transfer";
   public static final boolean DFS_ENCRYPT_DATA_TRANSFER_DEFAULT = false;
+  public static final String DFS_XFRAME_OPTION_ENABLED = "dfs.xframe.enabled";
+  public static final boolean DFS_XFRAME_OPTION_ENABLED_DEFAULT = true;
+
+  public static final String DFS_XFRAME_OPTION_VALUE = "dfs.xframe.value";
+  public static final String DFS_XFRAME_OPTION_VALUE_DEFAULT = "SAMEORIGIN";
+
   @Deprecated
   public static final String 
DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_KEY =
       HdfsClientConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_KEY;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c447efeb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java
index f9bdbf6..07b779b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java
@@ -105,6 +105,16 @@ public class DatanodeHttpServer implements Closeable {
         .addEndpoint(URI.create("http://localhost:0";))
         .setFindPort(true);
 
+    final boolean xFrameEnabled = conf.getBoolean(
+        DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED,
+        DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED_DEFAULT);
+
+    final String xFrameOptionValue = conf.getTrimmed(
+        DFSConfigKeys.DFS_XFRAME_OPTION_VALUE,
+        DFSConfigKeys.DFS_XFRAME_OPTION_VALUE_DEFAULT);
+
+    builder.configureXFrame(xFrameEnabled).setXFrameOption(xFrameOptionValue);
+
     this.infoServer = builder.build();
 
     this.infoServer.setAttribute("datanode", datanode);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c447efeb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
index 84229e7..a1959e4 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
@@ -29,6 +29,7 @@ import java.util.Map.Entry;
 
 import javax.servlet.ServletContext;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ha.HAServiceProtocol;
@@ -139,6 +140,16 @@ public class NameNodeHttpServer {
         DFSConfigKeys.DFS_NAMENODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY,
         DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY);
 
+    final boolean xFrameEnabled = conf.getBoolean(
+        DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED,
+        DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED_DEFAULT);
+
+    final String xFrameOptionValue = conf.getTrimmed(
+        DFSConfigKeys.DFS_XFRAME_OPTION_VALUE,
+        DFSConfigKeys.DFS_XFRAME_OPTION_VALUE_DEFAULT);
+
+    builder.configureXFrame(xFrameEnabled).setXFrameOption(xFrameOptionValue);
+
     httpServer = builder.build();
 
     if (policy.isHttpsEnabled()) {
@@ -310,4 +321,13 @@ public class NameNodeHttpServer {
   public static HAServiceProtocol.HAServiceState 
getNameNodeStateFromContext(ServletContext context) {
     return getNameNodeFromContext(context).getServiceState();
   }
+
+  /**
+   * Returns the httpServer.
+   * @return HttpServer2
+   */
+  @VisibleForTesting
+  public HttpServer2 getHttpServer() {
+    return httpServer;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c447efeb/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
index a198b71..c8dc66b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
@@ -2951,6 +2951,30 @@
   </description>
 </property>
 
+  <property>
+    <name>dfs.xframe.enabled</name>
+    <value>true</value>
+    <description>
+      If true, then enables protection against clickjacking by returning
+      X_FRAME_OPTIONS header value set to SAMEORIGIN.
+      Clickjacking protection prevents an attacker from using transparent or
+      opaque layers to trick a user into clicking on a button
+      or link on another page.
+    </description>
+  </property>
+
+  <property>
+    <name>dfs.xframe.value</name>
+    <value>SAMEORIGIN</value>
+    <description>
+      This configration value allows user to specify the value for the
+      X-FRAME-OPTIONS. The possible values for this field are
+      DENY, SAMEORIGIN and ALLOW-FROM. Any other value will throw an
+      exception when namenode and datanodes are starting up.
+    </description>
+  </property>
+
+
 <property>
   <name>dfs.balancer.keytab.enabled</name>
   <value>false</value>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c447efeb/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestDatanodeHttpXFrame.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestDatanodeHttpXFrame.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestDatanodeHttpXFrame.java
new file mode 100644
index 0000000..9ecd8ea
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/web/TestDatanodeHttpXFrame.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations 
under
+ * the License.
+ */
+
+package org.apache.hadoop.hdfs.server.datanode.web;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.server.datanode.DataNode;
+import org.apache.hadoop.http.HttpServer2;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+
+/**
+ * Test that X-Frame-Options works correctly with DatanodeHTTPServer.
+ */
+public class TestDatanodeHttpXFrame {
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  @Test
+  public void testDataNodeXFrameOptionsEnabled() throws Exception {
+    boolean xFrameEnabled = true;
+    MiniDFSCluster cluster = createCluster(xFrameEnabled, null);
+    HttpURLConnection conn = getConn(cluster);
+    String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS");
+    Assert.assertTrue("X-FRAME-OPTIONS is absent in the header",
+        xfoHeader != null);
+    Assert.assertTrue(xfoHeader.endsWith(HttpServer2.XFrameOption
+        .SAMEORIGIN.toString()));
+  }
+
+  @Test
+  public void testNameNodeXFrameOptionsDisabled() throws Exception {
+    boolean xFrameEnabled = false;
+    MiniDFSCluster cluster = createCluster(xFrameEnabled, null);
+    HttpURLConnection conn = getConn(cluster);
+    String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS");
+    Assert.assertTrue("unexpected X-FRAME-OPTION in header", xfoHeader == 
null);
+  }
+
+  @Test
+  public void testDataNodeXFramewithInvalidOptions() throws Exception {
+    exception.expect(IllegalArgumentException.class);
+    createCluster(false, "Hadoop");
+  }
+
+  private MiniDFSCluster createCluster(boolean enabled, String
+      value) throws IOException {
+    Configuration conf = new HdfsConfiguration();
+    conf.setBoolean(DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED, enabled);
+    if (value != null) {
+      conf.set(DFSConfigKeys.DFS_XFRAME_OPTION_VALUE, value);
+    }
+    MiniDFSCluster cluster =
+        new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
+    cluster.waitActive();
+    return cluster;
+  }
+
+  private HttpURLConnection getConn(MiniDFSCluster cluster)
+      throws IOException {
+    DataNode datanode = cluster.getDataNodes().get(0);
+    URL newURL = new URL("http://localhost:"; + datanode.getInfoPort());
+    HttpURLConnection conn = (HttpURLConnection) newURL.openConnection();
+    conn.connect();
+    return conn;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c447efeb/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServerXFrame.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServerXFrame.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServerXFrame.java
new file mode 100644
index 0000000..947e951
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServerXFrame.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations 
under
+ * the License.
+ */
+
+package org.apache.hadoop.hdfs.server.namenode;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.http.HttpServer2;
+import org.apache.hadoop.net.NetUtils;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.InetSocketAddress;
+import java.net.MalformedURLException;
+import java.net.URL;
+
+/**
+ * A class to test the XFrameoptions of Namenode HTTP Server. We are not 
reusing
+ * the TestNameNodeHTTPServer since it is a parameterized class and these
+ * following tests will run multiple times doing the same thing, if we had the
+ * code in that classs.
+ */
+public class TestNameNodeHttpServerXFrame {
+
+  @Rule
+  public ExpectedException exception = ExpectedException.none();
+
+  public static URL getServerURL(HttpServer2 server)
+      throws MalformedURLException {
+    Assert.assertNotNull("No server", server);
+    return new URL("http://";
+        + NetUtils.getHostPortString(server.getConnectorAddress(0)));
+  }
+
+  @Test
+  public void testNameNodeXFrameOptionsEnabled() throws Exception {
+    HttpURLConnection conn = createServerwithXFrame(true, null);
+    String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS");
+    Assert.assertTrue("X-FRAME-OPTIONS is absent in the header",
+        xfoHeader != null);
+    Assert.assertTrue(xfoHeader.endsWith(HttpServer2.XFrameOption
+        .SAMEORIGIN.toString()));
+  }
+
+  @Test
+  public void testNameNodeXFrameOptionsDisabled() throws Exception {
+    HttpURLConnection conn = createServerwithXFrame(false, null);
+    String xfoHeader = conn.getHeaderField("X-FRAME-OPTIONS");
+    Assert.assertTrue("unexpected X-FRAME-OPTION in header", xfoHeader == 
null);
+  }
+
+  @Test
+  public void testNameNodeXFrameOptionsIllegalOption() throws Exception {
+    exception.expect(IllegalArgumentException.class);
+    createServerwithXFrame(true, "hadoop");
+  }
+
+  private HttpURLConnection createServerwithXFrame(boolean enabled, String
+      value) throws IOException {
+    Configuration conf = new HdfsConfiguration();
+    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
+    conf.setBoolean(DFSConfigKeys.DFS_XFRAME_OPTION_ENABLED, enabled);
+    if (value != null) {
+      conf.set(DFSConfigKeys.DFS_XFRAME_OPTION_VALUE, value);
+
+    }
+    InetSocketAddress addr = InetSocketAddress.createUnresolved("localhost", 
0);
+    NameNodeHttpServer server = null;
+
+    server = new NameNodeHttpServer(conf, null, addr);
+    server.start();
+
+    URL url = getServerURL(server.getHttpServer());
+    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
+    conn.connect();
+    return conn;
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to