Repository: ambari
Updated Branches:
  refs/heads/branch-1.7.0 f60fc61cf -> 306903330


AMBARI-7706. Slider View: view should check HDFS access and stack-version 
compatibility along with using YARN user - classloader issues in Kerberized 
cluster (srimanth)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/30690333
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/30690333
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/30690333

Branch: refs/heads/branch-1.7.0
Commit: 30690333094227578e4b9d80dd8fee4455d16508
Parents: f60fc61
Author: Srimanth Gunturi <sgunt...@hortonworks.com>
Authored: Thu Oct 9 16:09:34 2014 -0700
Committer: Srimanth Gunturi <sgunt...@hortonworks.com>
Committed: Thu Oct 9 16:09:40 2014 -0700

----------------------------------------------------------------------
 .../slider/SliderAppsViewControllerImpl.java    | 53 +++++++++++++-------
 1 file changed, 36 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/30690333/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
 
b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
index 069a188..f6449e2 100644
--- 
a/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
+++ 
b/contrib/views/slider/src/main/java/org/apache/ambari/view/slider/SliderAppsViewControllerImpl.java
@@ -321,19 +321,12 @@ public class SliderAppsViewControllerImpl implements 
SliderAppsViewController {
   private Validation validateHDFSAccess(final Map<String, String> 
hadoopConfigs, AmbariServiceInfo hdfsServiceInfo) {
     if (hdfsServiceInfo != null && hdfsServiceInfo.isStarted()) {
       if (hadoopConfigs.containsKey("fs.defaultFS")) {
-        final String fsPath = hadoopConfigs.get("fs.defaultFS");
         try {
           invokeHDFSClientRunnable(new HDFSClientRunnable<Boolean>() {
             @Override
-            public Boolean run() throws IOException, InterruptedException {
-              HdfsConfiguration hdfsConfiguration = new HdfsConfiguration();
-              for (Entry<String, String> entry : hadoopConfigs.entrySet()) {
-                hdfsConfiguration.set(entry.getKey(), entry.getValue());
-              }
-              FileSystem fs = FileSystem.get(URI.create(fsPath), 
hdfsConfiguration, getUserToRunAs(hadoopConfigs));
+            public Boolean run(FileSystem fs) throws IOException, 
InterruptedException {
               Path homePath = fs.getHomeDirectory();
               fs.listFiles(homePath, false);
-              fs.close();
               return Boolean.TRUE;
             }
           }, hadoopConfigs);
@@ -376,7 +369,7 @@ public class SliderAppsViewControllerImpl implements 
SliderAppsViewController {
   }
 
   private static interface HDFSClientRunnable<T> {
-    public T run() throws IOException, InterruptedException;
+    public T run(FileSystem fs) throws IOException, InterruptedException;
   }
 
   private String getUserToRunAs() {
@@ -397,26 +390,44 @@ public class SliderAppsViewControllerImpl implements 
SliderAppsViewController {
     }
   }
 
-  private <T> T invokeHDFSClientRunnable(final HDFSClientRunnable<T> runnable, 
Map<String, String> hadoopConfigs)
-      throws IOException, InterruptedException {
+  private <T> T invokeHDFSClientRunnable(final HDFSClientRunnable<T> runnable, 
final Map<String, String> hadoopConfigs) throws IOException,
+      InterruptedException {
     ClassLoader currentClassLoader = 
Thread.currentThread().getContextClassLoader();
     Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
     try {
       boolean securityEnabled = 
Boolean.valueOf(hadoopConfigs.get("security_enabled"));
+      final HdfsConfiguration hdfsConfiguration = new HdfsConfiguration();
+      for (Entry<String, String> entry : hadoopConfigs.entrySet()) {
+        hdfsConfiguration.set(entry.getKey(), entry.getValue());
+      }
+      UserGroupInformation.setConfiguration(hdfsConfiguration);
       UserGroupInformation sliderUser;
+      String loggedInUser = getUserToRunAs(hadoopConfigs);
       if (securityEnabled) {
         String viewPrincipal = getViewParameterValue(PARAM_VIEW_PRINCIPAL);
         String viewPrincipalKeytab = 
getViewParameterValue(PARAM_VIEW_PRINCIPAL_KEYTAB);
         UserGroupInformation ambariUser = 
UserGroupInformation.loginUserFromKeytabAndReturnUGI(viewPrincipal, 
viewPrincipalKeytab);
-        sliderUser = UserGroupInformation.createProxyUser(getUserToRunAs(), 
ambariUser);
+        if (loggedInUser.equals(ambariUser.getShortUserName())) {
+          // HDFS throws exception when caller tries to impresonate themselves.
+          // User: ad...@example.com is not allowed to impersonate admin
+          sliderUser = ambariUser;
+        } else {
+          sliderUser = UserGroupInformation.createProxyUser(loggedInUser, 
ambariUser);
+        }
       } else {
-        sliderUser = UserGroupInformation.getBestUGI(null, getUserToRunAs());
+        sliderUser = UserGroupInformation.getBestUGI(null, loggedInUser);
       }
       try {
         T value = sliderUser.doAs(new PrivilegedExceptionAction<T>() {
           @Override
           public T run() throws Exception {
-            return runnable.run();
+            String fsPath = hadoopConfigs.get("fs.defaultFS");
+            FileSystem fs = FileSystem.get(URI.create(fsPath), 
hdfsConfiguration);
+            try {
+              return runnable.run(fs);
+            } finally {
+              fs.close();
+            }
           }
         });
         return value;
@@ -432,15 +443,23 @@ public class SliderAppsViewControllerImpl implements 
SliderAppsViewController {
     ClassLoader currentClassLoader = 
Thread.currentThread().getContextClassLoader();
     Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
     try {
-      boolean securityEnabled = 
Boolean.valueOf(getHadoopConfigs().get(PROPERTY_SLIDER_SECURITY_ENABLED));
+      boolean securityEnabled = 
Boolean.valueOf(getHadoopConfigs().get("security_enabled"));
+      UserGroupInformation.setConfiguration(getSliderClientConfiguration());
       UserGroupInformation sliderUser;
+      String loggedInUser = getUserToRunAs();
       if (securityEnabled) {
         String viewPrincipal = getViewParameterValue(PARAM_VIEW_PRINCIPAL);
         String viewPrincipalKeytab = 
getViewParameterValue(PARAM_VIEW_PRINCIPAL_KEYTAB);
         UserGroupInformation ambariUser = 
UserGroupInformation.loginUserFromKeytabAndReturnUGI(viewPrincipal, 
viewPrincipalKeytab);
-        sliderUser = UserGroupInformation.createProxyUser(getUserToRunAs(), 
ambariUser);
+        if (loggedInUser.equals(ambariUser.getShortUserName())) {
+          // HDFS throws exception when caller tries to impresonate themselves.
+          // User: ad...@example.com is not allowed to impersonate admin
+          sliderUser = ambariUser;
+        } else {
+          sliderUser = UserGroupInformation.createProxyUser(loggedInUser, 
ambariUser);
+        }
       } else {
-        sliderUser = UserGroupInformation.getBestUGI(null, getUserToRunAs());
+        sliderUser = UserGroupInformation.getBestUGI(null, loggedInUser);
       }
       try {
         T value = sliderUser.doAs(new PrivilegedExceptionAction<T>() {

Reply via email to