This is an automated email from the ASF dual-hosted git repository.

rameshkumar pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new f298ebc51d HVIE-26199 Reduce FileSystem init during user impersonation 
(#3264)
f298ebc51d is described below

commit f298ebc51d8073e2bcefae8bdf331fab2b91d4d0
Author: Ramesh Kumar <rameshkumarthangara...@gmail.com>
AuthorDate: Tue May 10 07:44:50 2022 -0700

    HVIE-26199 Reduce FileSystem init during user impersonation (#3264)
---
 .../org/apache/hadoop/hive/common/FileUtils.java   | 92 +++++++++++++++-------
 .../org/apache/hadoop/hive/ql/metadata/Hive.java   | 16 +++-
 .../plugin/sqlstd/SQLAuthorizationUtils.java       | 20 +++--
 3 files changed, 91 insertions(+), 37 deletions(-)

diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
index e92b700195..f3290f69a1 100644
--- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
@@ -67,6 +67,8 @@ import org.apache.hive.common.util.SuppressFBWarnings;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import javax.security.auth.login.LoginException;
+
 /**
  * Collection of file manipulation utilities common across Hive.
  */
@@ -408,10 +410,16 @@ public final class FileUtils {
     return getPathOrParentThatExists(fs, parentPath);
   }
 
-  public static void checkFileAccessWithImpersonation(final FileSystem fs, 
final FileStatus stat,
-      final FsAction action, final String user)
-      throws IOException, AccessControlException, InterruptedException, 
Exception {
-    checkFileAccessWithImpersonation(fs, stat, action, user, null);
+  public static void checkFileAccessWithImpersonation(final FileSystem fs, 
final FileStatus stat, final FsAction action,
+      final String user) throws Exception {
+    UserGroupInformation proxyUser = null;
+    try {
+      proxyUser = getProxyUser(user);
+      FileSystem fsAsUser = FileUtils.getFsAsUser(fs, proxyUser);
+      checkFileAccessWithImpersonation(fs, stat, action, user, null, fsAsUser);
+    } finally {
+      closeFs(proxyUser);
+    }
   }
 
   /**
@@ -435,9 +443,9 @@ public final class FileUtils {
    * @throws InterruptedException
    * @throws Exception
    */
-  public static void checkFileAccessWithImpersonation(final FileSystem fs,
-      final FileStatus stat, final FsAction action, final String user, final 
List<FileStatus> children)
-          throws IOException, AccessControlException, InterruptedException, 
Exception {
+  public static void checkFileAccessWithImpersonation(final FileSystem fs, 
final FileStatus stat, final FsAction action,
+      final String user, final List<FileStatus> children, final FileSystem 
fsAsUser)
+      throws IOException, AccessControlException, InterruptedException, 
Exception {
     UserGroupInformation ugi = Utils.getUGI();
     String currentUser = ugi.getShortUserName();
 
@@ -449,25 +457,17 @@ public final class FileUtils {
     }
 
     // Otherwise, try user impersonation. Current user must be configured to 
do user impersonation.
-    UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(
-        user, UserGroupInformation.getLoginUser());
-    try {
-      proxyUser.doAs(new PrivilegedExceptionAction<Object>() {
-        @Override
-        public Object run() throws Exception {
-          FileSystem fsAsUser = FileSystem.get(fs.getUri(), fs.getConf());
-          ShimLoader.getHadoopShims().checkFileAccess(fsAsUser, stat, action);
-          addChildren(fsAsUser, stat.getPath(), children);
-          return null;
-        }
-      });
-    } finally {
-      FileSystem.closeAllForUGI(proxyUser);
-    }
+    UserGroupInformation proxyUser = 
UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser());
+    proxyUser.doAs(new PrivilegedExceptionAction<Object>() {
+      @Override public Object run() throws Exception {
+        ShimLoader.getHadoopShims().checkFileAccess(fsAsUser, stat, action);
+        addChildren(fsAsUser, stat.getPath(), children);
+        return null;
+      }
+    });
   }
 
-  private static void addChildren(FileSystem fsAsUser, Path path, 
List<FileStatus> children)
-      throws IOException {
+  private static void addChildren(FileSystem fsAsUser, Path path, 
List<FileStatus> children) throws IOException {
     if (children != null) {
       FileStatus[] listStatus;
       try {
@@ -480,6 +480,33 @@ public final class FileUtils {
     }
   }
 
+  public static UserGroupInformation getProxyUser(final String user) throws 
LoginException, IOException {
+    UserGroupInformation ugi = Utils.getUGI();
+    String currentUser = ugi.getShortUserName();
+    UserGroupInformation proxyUser = null;
+    if (user != null && !user.equals(currentUser)) {
+      proxyUser = UserGroupInformation.createProxyUser(user, 
UserGroupInformation.getLoginUser());
+    }
+    return proxyUser;
+  }
+
+  public static void closeFs(UserGroupInformation proxyUser) throws 
IOException {
+    if (proxyUser != null) {
+      FileSystem.closeAllForUGI(proxyUser);
+    }
+  }
+
+  public static FileSystem getFsAsUser(FileSystem fs, UserGroupInformation 
proxyUser) throws IOException, InterruptedException {
+    if (proxyUser == null) {
+      return null;
+    }
+    FileSystem fsAsUser = proxyUser.doAs(new 
PrivilegedExceptionAction<FileSystem>() {
+      @Override public FileSystem run() throws Exception {
+        return FileSystem.get(fs.getUri(), fs.getConf());
+      }
+    });
+    return fsAsUser;
+  }
   /**
    * Check if user userName has permissions to perform the given FsAction 
action
    * on all files under the file whose FileStatus fileStatus is provided
@@ -493,12 +520,21 @@ public final class FileUtils {
    */
   public static boolean isActionPermittedForFileHierarchy(FileSystem fs, 
FileStatus fileStatus,
                                                           String userName, 
FsAction action) throws Exception {
-    return isActionPermittedForFileHierarchy(fs,fileStatus,userName, action, 
true);
+    UserGroupInformation proxyUser = null;
+    boolean isPermitted = false;
+    try {
+      proxyUser = getProxyUser(userName);
+      FileSystem fsAsUser = getFsAsUser(fs, proxyUser);
+      isPermitted = isActionPermittedForFileHierarchy(fs, fileStatus, 
userName, action, true, fsAsUser);
+    } finally {
+      closeFs(proxyUser);
+    }
+    return isPermitted;
   }
 
   @SuppressFBWarnings(value = "DLS_DEAD_LOCAL_STORE", justification = 
"Intended, dir privilege all-around bug")
   public static boolean isActionPermittedForFileHierarchy(FileSystem fs, 
FileStatus fileStatus,
-      String userName, FsAction action, boolean recurse) throws Exception {
+      String userName, FsAction action, boolean recurse, FileSystem fsAsUser) 
throws Exception {
     boolean isDir = fileStatus.isDirectory();
 
     // for dirs user needs execute privileges as well
@@ -510,7 +546,7 @@ public final class FileUtils {
     }
 
     try {
-      checkFileAccessWithImpersonation(fs, fileStatus, action, userName, 
subDirsToCheck);
+      checkFileAccessWithImpersonation(fs, fileStatus, action, userName, 
subDirsToCheck, fsAsUser);
     } catch (AccessControlException err) {
       // Action not permitted for user
       LOG.warn("Action " + action + " denied on " + fileStatus.getPath() + " 
for user " + userName);
@@ -525,7 +561,7 @@ public final class FileUtils {
     // check all children
     for (FileStatus childStatus : subDirsToCheck) {
       // check children recursively - recurse is true if we're here.
-      if (!isActionPermittedForFileHierarchy(fs, childStatus, userName, 
action, true)) {
+      if (!isActionPermittedForFileHierarchy(fs, childStatus, userName, 
action, true, fsAsUser)) {
         return false;
       }
     }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 38c2dfadff..66b4cb2f1b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -223,6 +223,7 @@ import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
@@ -5008,10 +5009,17 @@ private void constructOneLBLocationMap(FileStatus fSta,
         boolean isOwned = FileUtils.isOwnerOfFileHierarchy(srcFs, srcs, 
configuredOwner, false);
         if (configuredOwner.equals(runningUser)) {
           // Check if owner has write permission, else it will have to copy
-          if (!(isOwned &&
-              FileUtils.isActionPermittedForFileHierarchy(
-                  srcFs, srcs, configuredOwner, FsAction.WRITE, false))) {
-            return true;
+          UserGroupInformation proxyUser = null;
+          try {
+            proxyUser = FileUtils.getProxyUser(configuredOwner);
+            FileSystem fsAsUser = FileUtils.getFsAsUser(srcFs, proxyUser);
+            if (!(isOwned && 
FileUtils.isActionPermittedForFileHierarchy(srcFs, srcs, configuredOwner, 
FsAction.WRITE,
+                false, fsAsUser))) {
+              return true;
+            }
+          }
+          finally {
+            FileUtils.closeFs(proxyUser);
           }
         } else {
           // If the configured owner does not own the file, throw
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
index e78753812b..d8031d037f 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
@@ -30,6 +30,8 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.shims.Utils;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
@@ -454,12 +456,20 @@ public class SQLAuthorizationUtils {
     if (FileUtils.isOwnerOfFileHierarchy(fs, fileStatus, userName, recurse)) {
       privs.add(SQLPrivTypeGrant.OWNER_PRIV);
     }
-    if (FileUtils.isActionPermittedForFileHierarchy(fs, fileStatus, userName, 
FsAction.WRITE, recurse)) {
-      privs.add(SQLPrivTypeGrant.INSERT_NOGRANT);
-      privs.add(SQLPrivTypeGrant.DELETE_NOGRANT);
+    UserGroupInformation proxyUser = null;
+    try {
+      proxyUser = FileUtils.getProxyUser(userName);
+      FileSystem fsAsUser = FileUtils.getFsAsUser(fs, proxyUser);
+      if (FileUtils.isActionPermittedForFileHierarchy(fs, fileStatus, 
userName, FsAction.WRITE, recurse, fsAsUser)) {
+        privs.add(SQLPrivTypeGrant.INSERT_NOGRANT);
+        privs.add(SQLPrivTypeGrant.DELETE_NOGRANT);
+      }
+      if (FileUtils.isActionPermittedForFileHierarchy(fs, fileStatus, 
userName, FsAction.READ, recurse, fsAsUser)) {
+        privs.add(SQLPrivTypeGrant.SELECT_NOGRANT);
+      }
     }
-    if (FileUtils.isActionPermittedForFileHierarchy(fs, fileStatus, userName, 
FsAction.READ, recurse)) {
-      privs.add(SQLPrivTypeGrant.SELECT_NOGRANT);
+    finally {
+      FileUtils.closeFs(proxyUser);
     }
     LOG.debug("addPrivilegesFromFS:[{}] asked for privileges on [{}] with 
recurse={} and obtained:[{}]",
         userName, fileStatus, recurse, privs);

Reply via email to