Repository: ambari
Updated Branches:
  refs/heads/trunk 1ef3c74b7 -> 57645a814


AMBARI-9750. Ambari Views: Kerberos support in Files View (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3716c553
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3716c553
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3716c553

Branch: refs/heads/trunk
Commit: 3716c553a9234304d343fab8a1d4250ef9f59786
Parents: 1ef3c74
Author: Alex Antonenko <hiv...@gmail.com>
Authored: Mon Feb 23 18:52:36 2015 +0200
Committer: Alex Antonenko <hiv...@gmail.com>
Committed: Mon Feb 23 19:40:48 2015 +0200

----------------------------------------------------------------------
 .../apache/ambari/view/filebrowser/HdfsApi.java | 40 ++++++++++++++---
 .../ambari/view/filebrowser/HdfsService.java    | 46 ++++++++++++++++++--
 contrib/views/files/src/main/resources/view.xml | 13 +++---
 .../view/filebrowser/FilebrowserTest.java       |  9 ++--
 4 files changed, 89 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3716c553/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
 
b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
index 1c21e58..ef87001 100644
--- 
a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
+++ 
b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsApi.java
@@ -20,7 +20,6 @@ package org.apache.ambari.view.filebrowser;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.*;
-import org.apache.hadoop.fs.permission.AccessControlException;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 
@@ -42,6 +41,7 @@ import java.util.LinkedHashMap;
  */
 public class HdfsApi {
   private final Configuration conf = new Configuration();
+  private final Map<String, String> params;
 
   private FileSystem fs;
   private UserGroupInformation ugi;
@@ -49,18 +49,46 @@ public class HdfsApi {
   /**
    * Constructor
    * @param defaultFs hdfs uri
-   * @param username user.name
+   * @param params map of parameters
    * @throws IOException
    * @throws InterruptedException
    */
-  public HdfsApi(String defaultFs, String username) throws IOException,
+  public HdfsApi(final String defaultFs, String username, Map<String, String> 
params) throws IOException,
       InterruptedException {
+    this.params = params;
     conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
     conf.set("fs.webhdfs.impl", 
"org.apache.hadoop.hdfs.web.WebHdfsFileSystem");
     conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
-    fs = FileSystem.get(URI.create(defaultFs), conf, username);
-    ugi = UserGroupInformation.createProxyUser(username,
-        UserGroupInformation.getLoginUser());
+
+    ugi = UserGroupInformation.createProxyUser(username, getProxyUser());
+
+    fs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+      public FileSystem run() throws IOException {
+        return FileSystem.get(URI.create(defaultFs), conf);
+      }
+    });
+  }
+
+  private UserGroupInformation getProxyUser() throws IOException {
+    UserGroupInformation proxyuser;
+    if (params.containsKey("proxyuser")) {
+      proxyuser = 
UserGroupInformation.createRemoteUser(params.get("proxyuser"));
+    } else {
+      proxyuser = UserGroupInformation.getCurrentUser();
+    }
+
+    proxyuser.setAuthenticationMethod(getAuthenticationMethod());
+    return proxyuser;
+  }
+
+  private UserGroupInformation.AuthenticationMethod getAuthenticationMethod() {
+    UserGroupInformation.AuthenticationMethod authMethod;
+    if (params.containsKey("auth")) {
+      authMethod = 
UserGroupInformation.AuthenticationMethod.valueOf(params.get("auth"));
+    } else {
+      authMethod = UserGroupInformation.AuthenticationMethod.SIMPLE;
+    }
+    return authMethod;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/3716c553/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
 
b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
index 1fd5719..765cb79 100644
--- 
a/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
+++ 
b/contrib/views/files/src/main/java/org/apache/ambari/view/filebrowser/HdfsService.java
@@ -23,9 +23,14 @@ import javax.xml.bind.annotation.XmlRootElement;
 import org.apache.ambari.view.ViewContext;
 import 
org.apache.ambari.view.filebrowser.utils.MisconfigurationFormattedException;
 import org.apache.ambari.view.filebrowser.utils.ServiceFormattedException;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
 /**
  * Base Hdfs service
  */
@@ -63,12 +68,12 @@ public abstract class HdfsService {
    */
   public HdfsApi getApi(ViewContext context) {
     if (_api == null) {
-      Thread.currentThread().setContextClassLoader(null);
+//      Thread.currentThread().setContextClassLoader(null);
       String defaultFs = context.getProperties().get("webhdfs.url");
       if (defaultFs == null)
         throw new MisconfigurationFormattedException("webhdfs.url");
       try {
-        _api = new HdfsApi(defaultFs, getUsername(context));
+        _api = new HdfsApi(defaultFs, getDoAsUsername(context), 
getHdfsAuthParams(context));
       } catch (Exception ex) {
         throw new ServiceFormattedException("HdfsApi connection failed. Check 
\"webhdfs.url\" property", ex);
       }
@@ -76,15 +81,48 @@ public abstract class HdfsService {
     return _api;
   }
 
+  private static Map<String, String> getHdfsAuthParams(ViewContext context) {
+    String auth = context.getProperties().get("webhdfs.auth");
+    Map<String, String> params = new HashMap<String, String>();
+    if (auth == null || auth.isEmpty()) {
+      auth = "auth=SIMPLE";
+    }
+    for(String param : auth.split(";")) {
+      String[] keyvalue = param.split("=");
+      if (keyvalue.length != 2) {
+        logger.error("Can not parse authentication param " + param + " in " + 
auth);
+        continue;
+      }
+      params.put(keyvalue[0], keyvalue[1]);
+    }
+    return params;
+  }
+
   /**
-   * Get username to use in HDFS
+   * Get doAs username to use in HDFS
    * @param context View Context instance
    * @return user name
    */
-  public String getUsername(ViewContext context) {
+  public String getDoAsUsername(ViewContext context) {
     String username = context.getProperties().get("webhdfs.username");
     if (username == null || username.isEmpty())
       username = context.getUsername();
     return username;
   }
+
+  /**
+   * Get proxyuser username to use in HDFS
+   * @param context View Context instance
+   * @return user name
+   */
+  public String getRealUsername(ViewContext context) {
+    String username = context.getProperties().get("webhdfs.proxyuser");
+    if (username == null || username.isEmpty())
+      try {
+        username = UserGroupInformation.getCurrentUser().getShortUserName();
+      } catch (IOException e) {
+        throw new ServiceFormattedException("HdfsApi connection failed. Can't 
get current user", e);
+      }
+    return username;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/3716c553/contrib/views/files/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/files/src/main/resources/view.xml 
b/contrib/views/files/src/main/resources/view.xml
index 7dbdfa9..206128d 100644
--- a/contrib/views/files/src/main/resources/view.xml
+++ b/contrib/views/files/src/main/resources/view.xml
@@ -18,19 +18,20 @@
     <name>FILES</name>
     <label>Files</label>
     <version>0.1.0</version>
-    <min-ambari-version>1.7.*</min-ambari-version>
 
     <parameter>
         <name>webhdfs.url</name>
-        <description>Enter the WebHDFS FileSystem URI. Typically this is the 
dfs.namenode.http-address property in the hdfs-site.xml configuration. URL must 
be accessible from Ambari Server.</description>
-        <label>WebHDFS FileSystem URI</label>
-        <placeholder>webhdfs://namenode:50070</description>
+        <description>WebHDFS FileSystem URI (example: 
webhdfs://namenode:50070)</description>
         <required>true</required>
     </parameter>
     <parameter>
         <name>webhdfs.username</name>
-        <description>User and doAs for proxy user for HDFS. By default, uses 
the currently logged-in Ambari user.</description>
-        <label>WebHDFS Username</label>
+        <description>doAs for proxy user for HDFS</description>
+        <required>false</required>
+    </parameter>
+    <parameter>
+        <name>webhdfs.auth</name>
+        <description>Semicolon-separated authentication configs. Default: 
auth=SIMPLE</description>
         <required>false</required>
     </parameter>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3716c553/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
 
b/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
index b8dcb92..9fa3e9b 100644
--- 
a/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
+++ 
b/contrib/views/files/src/test/java/org/apache/ambari/view/filebrowser/FilebrowserTest.java
@@ -45,7 +45,6 @@ import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
-import org.junit.Ignore;
 
 import com.google.inject.AbstractModule;
 import com.google.inject.Guice;
@@ -81,6 +80,9 @@ public class FilebrowserTest{
     FileUtil.fullyDelete(baseDir);
     Configuration conf = new Configuration();
     conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, baseDir.getAbsolutePath());
+    conf.set("hadoop.proxyuser." + System.getProperty("user.name") + 
".groups", "*");
+    conf.set("hadoop.proxyuser." + System.getProperty("user.name") + ".hosts", 
"*");
+
     MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
     hdfsCluster = builder.build();
     String hdfsURI = hdfsCluster.getURI() + "/";
@@ -89,6 +91,7 @@ public class FilebrowserTest{
     
expect(context.getUsername()).andReturn(System.getProperty("user.name")).anyTimes();
     replay(handler, context, httpHeaders, uriInfo);
     fileBrowserService = getService(FileBrowserService.class, handler, 
context);
+
     FileOperationService.MkdirRequest request = new 
FileOperationService.MkdirRequest();
     request.path = "/tmp";
     fileBrowserService.fileOps().mkdir(request);
@@ -157,9 +160,9 @@ public class FilebrowserTest{
 
   @Test
   public void testUsername() throws Exception {
-    Assert.assertEquals(System.getProperty("user.name"), 
fileBrowserService.upload().getUsername(context));
+    Assert.assertEquals(System.getProperty("user.name"), 
fileBrowserService.upload().getDoAsUsername(context));
     properties.put("webhdfs.username", "test-user");
-    Assert.assertEquals("test-user", 
fileBrowserService.upload().getUsername(context));
+    Assert.assertEquals("test-user", 
fileBrowserService.upload().getDoAsUsername(context));
   }
 
   private static <T> T getService(Class<T> clazz,

Reply via email to