This is an automated email from the ASF dual-hosted git repository.

ayushsaxena pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 6c6c3fd34bc HIVE-29209: Remove unnecessary usage of LoginException 
(#6083)
6c6c3fd34bc is described below

commit 6c6c3fd34bc52bd22f14cdd8c588baa1419ee977
Author: Wechar Yu <[email protected]>
AuthorDate: Sun Sep 28 00:55:37 2025 +0800

    HIVE-29209: Remove unnecessary usage of LoginException (#6083)
---
 .../org/apache/hadoop/hive/common/FileUtils.java   |  4 +--
 .../java/org/apache/hadoop/hive/conf/HiveConf.java | 10 ++------
 .../org/apache/hive/hcatalog/common/HCatUtil.java  |  8 +-----
 .../txn/compactor/TestCleanerWithReplication.java  |  3 +--
 .../hadoop/hive/metastore/HiveClientCache.java     | 14 +++-------
 .../hadoop/hive/metastore/HiveMetaStoreUtils.java  |  8 +-----
 .../hadoop/hive/metastore/TestHiveClientCache.java | 11 ++++----
 .../apache/hadoop/hive/ql/exec/tez/DagUtils.java   | 20 ++++-----------
 .../hive/ql/exec/tez/TezSessionPoolSession.java    |  4 +--
 .../hadoop/hive/ql/exec/tez/TezSessionState.java   | 30 ++++++----------------
 .../hadoop/hive/ql/parse/repl/CopyUtils.java       |  9 +++----
 .../repl/dump/events/AbstractEventHandler.java     |  3 +--
 .../parse/repl/dump/events/CommitTxnHandler.java   |  7 +++--
 .../repl/dump/events/CreateFunctionHandler.java    |  3 +--
 .../hive/ql/parse/repl/dump/io/FileOperations.java | 10 +++-----
 .../StorageBasedAuthorizationProvider.java         | 12 +--------
 .../hive/ql/udf/generic/GenericUDTFGetSplits.java  |  6 +----
 .../hadoop/hive/ql/exec/mr/TestMapRedTask.java     |  4 +--
 .../hive/ql/exec/tez/SampleTezSessionState.java    |  7 +++--
 .../org/apache/hive/service/cli/CLIService.java    |  6 +----
 .../hive/service/cli/thrift/ThriftCLIService.java  |  6 ++---
 .../org/apache/hadoop/fs/DefaultFileAccess.java    |  4 +--
 .../java/org/apache/hadoop/hive/shims/Utils.java   |  3 +--
 23 files changed, 53 insertions(+), 139 deletions(-)

diff --git a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
index 803da2e8b7a..375a94c6b13 100644
--- a/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
@@ -77,8 +77,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.security.auth.login.LoginException;
-
 /**
  * Collection of file manipulation utilities common across Hive.
  */
@@ -520,7 +518,7 @@ private static void addChildren(FileSystem fsAsUser, Path 
path, List<FileStatus>
     }
   }
 
-  public static UserGroupInformation getProxyUser(final String user) throws 
LoginException, IOException {
+  public static UserGroupInformation getProxyUser(final String user) throws 
IOException {
     UserGroupInformation ugi = Utils.getUGI();
     String currentUser = ugi.getShortUserName();
     UserGroupInformation proxyUser = null;
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 813ff00d8a5..fa89ee7be8e 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -49,8 +49,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.security.auth.login.LoginException;
-
 import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.IOException;
@@ -7057,12 +7055,8 @@ public static URL getHiveServer2SiteLocation() {
    * @throws IOException
    */
   public String getUser() throws IOException {
-    try {
-      UserGroupInformation ugi = Utils.getUGI();
-      return ugi.getUserName();
-    } catch (LoginException le) {
-      throw new IOException(le);
-    }
+    UserGroupInformation ugi = Utils.getUGI();
+    return ugi.getUserName();
   }
 
   public static String getColumnInternalName(int pos) {
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
index af225d95386..8c110300614 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
@@ -33,8 +33,6 @@
 import java.util.Map;
 import java.util.Properties;
 
-import javax.security.auth.login.LoginException;
-
 import com.google.common.collect.Maps;
 import org.apache.commons.codec.binary.Base64;
 import org.apache.commons.lang3.StringUtils;
@@ -551,11 +549,7 @@ public static IMetaStoreClient 
getHiveMetastoreClient(HiveConf hiveConf)
         }
       }
     }
-    try {
-      return hiveClientCache.get(hiveConf);
-    } catch (LoginException e) {
-      throw new IOException("Couldn't create hiveMetaStoreClient, Error 
getting UGI for user", e);
-    }
+    return hiveClientCache.get(hiveConf);
   }
 
   /**
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCleanerWithReplication.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCleanerWithReplication.java
index 67af2443b25..47a52035f17 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCleanerWithReplication.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCleanerWithReplication.java
@@ -38,7 +38,6 @@
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import javax.security.auth.login.LoginException;
 import java.io.IOException;
 
 import static org.junit.Assert.assertEquals;
@@ -65,7 +64,7 @@ public void setup() throws Exception {
   }
 
   @BeforeClass
-  public static void classLevelSetup() throws LoginException, IOException {
+  public static void classLevelSetup() throws IOException {
     Configuration hadoopConf = new Configuration();
     hadoopConf.set("dfs.client.use.datanode.hostname", "true");
     hadoopConf.set("hadoop.proxyuser." + Utils.getUGI().getShortUserName() + 
".hosts", "*");
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveClientCache.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveClientCache.java
index f798d2dde74..718e5d38a5b 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveClientCache.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveClientCache.java
@@ -28,8 +28,6 @@
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import javax.security.auth.login.LoginException;
-
 import org.apache.commons.lang3.builder.EqualsBuilder;
 import org.apache.commons.lang3.builder.HashCodeBuilder;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
@@ -250,9 +248,8 @@ public void cleanup() {
    * @return the hive client
    * @throws MetaException
    * @throws IOException
-   * @throws LoginException
    */
-  public IMetaStoreClient get(final HiveConf hiveConf) throws MetaException, 
IOException, LoginException {
+  public IMetaStoreClient get(final HiveConf hiveConf) throws MetaException, 
IOException {
     final HiveClientCacheKey cacheKey = 
HiveClientCacheKey.fromHiveConf(hiveConf, getThreadId());
     ICacheableMetaStoreClient cacheableHiveMetaStoreClient = null;
 
@@ -279,10 +276,9 @@ public IMetaStoreClient get(final HiveConf hiveConf) 
throws MetaException, IOExc
    * @return
    * @throws IOException
    * @throws MetaException
-   * @throws LoginException
    */
   private ICacheableMetaStoreClient getOrCreate(final HiveClientCacheKey 
cacheKey)
-      throws IOException, MetaException, LoginException {
+      throws IOException, MetaException {
     try {
       return hiveCache.get(cacheKey, new Callable<ICacheableMetaStoreClient>() 
{
         @Override
@@ -301,8 +297,6 @@ public ICacheableMetaStoreClient call() throws 
MetaException {
         throw (IOException) t;
       } else if (t instanceof MetaException) {
         throw (MetaException) t;
-      } else if (t instanceof LoginException) {
-        throw (LoginException) t;
       } else {
         throw new IOException("Error creating hiveMetaStoreClient", t);
       }
@@ -321,14 +315,14 @@ static class HiveClientCacheKey {
     final private HiveConf hiveConf;
     final private int threadId;
 
-    private HiveClientCacheKey(HiveConf hiveConf, final int threadId) throws 
IOException, LoginException {
+    private HiveClientCacheKey(HiveConf hiveConf, final int threadId) throws 
IOException {
       this.metaStoreURIs = hiveConf.getVar(HiveConf.ConfVars.METASTORE_URIS);
       ugi = Utils.getUGI();
       this.hiveConf = hiveConf;
       this.threadId = threadId;
     }
 
-    public static HiveClientCacheKey fromHiveConf(HiveConf hiveConf, final int 
threadId) throws IOException, LoginException {
+    public static HiveClientCacheKey fromHiveConf(HiveConf hiveConf, final int 
threadId) throws IOException {
       return new HiveClientCacheKey(hiveConf, threadId);
     }
 
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java
index d1875cfed2f..5b816cea8e0 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreUtils.java
@@ -24,8 +24,6 @@
 import java.util.List;
 import java.util.Properties;
 
-import javax.security.auth.login.LoginException;
-
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
@@ -248,11 +246,7 @@ public static IMetaStoreClient 
getHiveMetastoreClient(HiveConf hiveConf)
         }
       }
     }
-    try {
-      return hiveClientCache.get(hiveConf);
-    } catch (LoginException e) {
-      throw new IOException("Couldn't create hiveMetaStoreClient, Error 
getting UGI for user", e);
-    }
+    return hiveClientCache.get(hiveConf);
   }
 
 }
diff --git 
a/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveClientCache.java 
b/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveClientCache.java
index 65dae97a4af..bc61ed8e3ca 100644
--- 
a/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveClientCache.java
+++ 
b/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveClientCache.java
@@ -30,7 +30,6 @@
 import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.junit.Test;
 
-import javax.security.auth.login.LoginException;
 import java.io.IOException;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
@@ -42,7 +41,7 @@ public class TestHiveClientCache {
   final HiveConf hiveConf = new HiveConf();
 
   @Test
-  public void testCacheHit() throws IOException, MetaException, LoginException 
{
+  public void testCacheHit() throws IOException, MetaException {
     HiveClientCache cache = new HiveClientCache(1000);
     HiveClientCache.ICacheableMetaStoreClient client =
         (HiveClientCache.ICacheableMetaStoreClient) cache.get(hiveConf);
@@ -60,7 +59,7 @@ public void testCacheHit() throws IOException, MetaException, 
LoginException {
   }
 
   @Test
-  public void testCacheMiss() throws IOException, MetaException, 
LoginException {
+  public void testCacheMiss() throws IOException, MetaException {
     HiveClientCache cache = new HiveClientCache(1000);
     IMetaStoreClient client = cache.get(hiveConf);
     assertNotNull(client);
@@ -77,7 +76,7 @@ public void testCacheMiss() throws IOException, 
MetaException, LoginException {
    * Also verify that the expiry time configuration is honoured
    */
   @Test
-  public void testCacheExpiry() throws IOException, MetaException, 
LoginException, InterruptedException {
+  public void testCacheExpiry() throws IOException, MetaException, 
InterruptedException {
     HiveClientCache cache = new HiveClientCache(1);
     HiveClientCache.ICacheableMetaStoreClient client =
         (HiveClientCache.ICacheableMetaStoreClient) cache.get(hiveConf);
@@ -105,7 +104,7 @@ public void testMultipleThreadAccess() throws 
ExecutionException, InterruptedExc
 
     class GetHiveClient implements Callable<IMetaStoreClient> {
       @Override
-      public IMetaStoreClient call() throws IOException, MetaException, 
LoginException {
+      public IMetaStoreClient call() throws IOException, MetaException {
         return cache.get(hiveConf);
       }
     }
@@ -126,7 +125,7 @@ public IMetaStoreClient call() throws IOException, 
MetaException, LoginException
   }
 
   @Test
-  public void testCloseAllClients() throws IOException, MetaException, 
LoginException {
+  public void testCloseAllClients() throws IOException, MetaException {
     final HiveClientCache cache = new HiveClientCache(1000);
     HiveClientCache.ICacheableMetaStoreClient client1 =
         (HiveClientCache.ICacheableMetaStoreClient) cache.get(hiveConf);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
index 22c7e777a30..ec3abfba0d8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
@@ -28,8 +28,6 @@
 import com.google.common.collect.Iterators;
 import com.google.common.collect.Lists;
 
-import javax.security.auth.login.LoginException;
-
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
@@ -1059,11 +1057,10 @@ public PreWarmVertex 
createPreWarmVertex(TezConfiguration conf,
   /**
    * @param conf
    * @return path to destination directory on hdfs
-   * @throws LoginException if we are unable to figure user information
    * @throws IOException when any dfs operation fails.
    */
   @SuppressWarnings("deprecation")
-  public Path getDefaultDestDir(Configuration conf) throws LoginException, 
IOException {
+  public Path getDefaultDestDir(Configuration conf) throws IOException {
     UserGroupInformation ugi = Utils.getUGI();
     String userName = ugi.getShortUserName();
     String userPathStr = HiveConf.getVar(conf, 
HiveConf.ConfVars.HIVE_USER_INSTALL_DIR);
@@ -1097,10 +1094,9 @@ public Path getDefaultDestDir(Configuration conf) throws 
LoginException, IOExcep
    * @param conf
    * @return List&lt;LocalResource&gt; local resources to add to execution
    * @throws IOException when hdfs operation fails
-   * @throws LoginException when getDefaultDestDir fails with the same 
exception
    */
   public List<LocalResource> localizeTempFilesFromConf(
-      String hdfsDirPathStr, Configuration conf) throws IOException, 
LoginException {
+      String hdfsDirPathStr, Configuration conf) throws IOException {
     List<LocalResource> tmpResources = new ArrayList<LocalResource>();
 
     if (HiveConf.getBoolVar(conf, 
HiveConf.ConfVars.HIVE_ADD_FILES_USE_HDFS_LOCATION)) {
@@ -1233,7 +1229,7 @@ private Map<String, LocalResource> 
addTempResources(Configuration conf, String h
     return tmpResourcesMap;
   }
 
-  public FileStatus getHiveJarDirectory(Configuration conf) throws 
IOException, LoginException {
+  public FileStatus getHiveJarDirectory(Configuration conf) throws IOException 
{
     FileStatus fstatus = null;
     String hdfsDirPathStr = HiveConf.getVar(conf, 
HiveConf.ConfVars.HIVE_JAR_DIRECTORY, (String)null);
     if (hdfsDirPathStr != null) {
@@ -1612,14 +1608,8 @@ public Vertex createVertex(JobConf conf, BaseWork 
workUnit, Path scratchDir,
    */
   public Path createTezDir(Path scratchDir, Configuration conf)
       throws IOException {
-    UserGroupInformation ugi;
-    String userName = System.getProperty("user.name");
-    try {
-      ugi = Utils.getUGI();
-      userName = ugi.getShortUserName();
-    } catch (LoginException e) {
-      throw new IOException(e);
-    }
+    UserGroupInformation ugi = Utils.getUGI();
+    String userName = ugi.getShortUserName();
 
     scratchDir = new Path(scratchDir, userName);
 
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolSession.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolSession.java
index d3748edb867..49a3211e60f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolSession.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionPoolSession.java
@@ -25,8 +25,6 @@
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import javax.security.auth.login.LoginException;
-
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.registry.impl.TezAmInstance;
@@ -120,7 +118,7 @@ void close(boolean keepTmpDir) throws Exception {
   @Override
   protected void openInternal(String[] additionalFiles,
       boolean isAsync, LogHelper console, HiveResources resources)
-          throws IOException, LoginException, URISyntaxException, TezException 
{
+          throws IOException, URISyntaxException, TezException {
     super.openInternal(additionalFiles, isAsync, console, resources);
     parent.registerOpenSession(this);
     if (expirationTracker != null) {
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
index 8b66454c51e..b89cac4e813 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezSessionState.java
@@ -39,7 +39,6 @@
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.AtomicReference;
-import javax.security.auth.login.LoginException;
 
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.io.FilenameUtils;
@@ -236,7 +235,7 @@ public static String makeSessionId() {
     return UUID.randomUUID().toString();
   }
 
-  public void open() throws IOException, LoginException, URISyntaxException, 
TezException {
+  public void open() throws IOException, URISyntaxException, TezException {
     String[] noFiles = null;
     open(noFiles);
   }
@@ -246,24 +245,24 @@ public void open() throws IOException, LoginException, 
URISyntaxException, TezEx
    * submit multiple DAGs against a session (as long as they are executed 
serially).
    */
   public void open(String[] additionalFilesNotFromConf)
-      throws IOException, LoginException, URISyntaxException, TezException {
+      throws IOException, URISyntaxException, TezException {
     openInternal(additionalFilesNotFromConf, false, null, null);
   }
 
 
   public void open(HiveResources resources)
-      throws LoginException, IOException, URISyntaxException, TezException {
+      throws IOException, URISyntaxException, TezException {
     openInternal(null, false, null, resources);
   }
 
   public void beginOpen(String[] additionalFiles, LogHelper console)
-      throws IOException, LoginException, URISyntaxException, TezException {
+      throws IOException, URISyntaxException, TezException {
     openInternal(additionalFiles, true, console, null);
   }
 
   protected void openInternal(String[] additionalFilesNotFromConf,
       boolean isAsync, LogHelper console, HiveResources resources)
-          throws IOException, LoginException, URISyntaxException, TezException 
{
+          throws IOException, URISyntaxException, TezException {
     // TODO Why is the queue name set again. It has already been setup via 
setQueueName. Do only one of the two.
     String confQueueName = conf.get(TezConfiguration.TEZ_QUEUE_NAME);
     if (queueName != null && !queueName.equals(confQueueName)) {
@@ -634,7 +633,7 @@ private void setupSessionAcls(Configuration tezConf, 
HiveConf hiveConf) throws
 
   /** This is called in openInternal and in TezTask.updateSession to localize 
conf resources. */
   public void ensureLocalResources(Configuration conf, String[] 
newFilesNotFromConf)
-          throws IOException, LoginException, URISyntaxException, TezException 
{
+          throws IOException, URISyntaxException, TezException {
     if (resources == null) {
       throw new AssertionError("Ensure called on an unitialized (or closed) 
session " + sessionId);
     }
@@ -820,12 +819,11 @@ private Path createTezDir(String sessionId, String 
suffix) throws IOException {
    * @param localJarPath Local path to the jar to be localized.
    * @return LocalResource corresponding to the localized hive exec resource.
    * @throws IOException when any file system related call fails.
-   * @throws LoginException when we are unable to determine the user.
    * @throws URISyntaxException when current jar location cannot be determined.
    */
   @VisibleForTesting
   LocalResource createJarLocalResource(String localJarPath)
-      throws IOException, LoginException, IllegalArgumentException {
+      throws IOException, IllegalArgumentException {
     // TODO Reduce the number of lookups that happen here. This shouldn't go 
to HDFS for each call.
     // The hiveJarDir can be determined once per client.
     FileStatus destDirStatus = utils.getHiveJarDirectory(conf);
@@ -854,19 +852,7 @@ private String getKey(final FileStatus fileStatus) {
     return fileStatus.getPath() + ":" + fileStatus.getLen() + ":" + 
fileStatus.getModificationTime();
   }
 
-  private void addJarLRByClassName(String className, final Map<String, 
LocalResource> lrMap) throws
-      IOException, LoginException {
-    Class<?> clazz;
-    try {
-      clazz = Class.forName(className);
-    } catch (ClassNotFoundException e) {
-      throw new IOException("Cannot find " + className + " in classpath", e);
-    }
-    addJarLRByClass(clazz, lrMap);
-  }
-
-  private void addJarLRByClass(Class<?> clazz, final Map<String, 
LocalResource> lrMap) throws IOException,
-      LoginException {
+  private void addJarLRByClass(Class<?> clazz, final Map<String, 
LocalResource> lrMap) throws IOException {
     String jarPath = Utilities.jarFinderGetJar(clazz);
     if (jarPath == null) {
       throw new IOException("Can't find jar for: " + clazz);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/CopyUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/CopyUtils.java
index eb55b549fb8..90901d0dc40 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/CopyUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/CopyUtils.java
@@ -40,7 +40,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.security.auth.login.LoginException;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
@@ -164,7 +163,7 @@ ContentSummary getContentSummary(FileSystem fs, Path f) 
throws IOException {
   // if not match, copy again from cm
   public void copyAndVerify(Path destRoot, List<ReplChangeManager.FileInfo> 
srcFiles, Path origSrcPath,
                             boolean readSrcAsFilesList, boolean overwrite)
-          throws IOException, LoginException, HiveFatalException {
+          throws IOException, HiveFatalException {
     UserGroupInformation proxyUser = getProxyUser();
     if (CollectionUtils.isEmpty(srcFiles)) {
       throw new IOException(ErrorMsg.REPL_INVALID_ARGUMENTS.format("SrcFiles 
can not be empty during copy operation."));
@@ -230,7 +229,7 @@ ExecutorService getExecutorService() {
 
   @VisibleForTesting
   void doCopy(Map.Entry<Path, List<ReplChangeManager.FileInfo>> destMapEntry, 
UserGroupInformation proxyUser,
-                      boolean useRegularCopy, boolean overwrite, 
DataCopyStatistics copyStatistics) throws IOException, LoginException, 
HiveFatalException {
+                      boolean useRegularCopy, boolean overwrite, 
DataCopyStatistics copyStatistics) throws IOException, HiveFatalException {
     Path destination = destMapEntry.getKey();
     List<ReplChangeManager.FileInfo> fileInfoList = destMapEntry.getValue();
     // Get the file system again from cache. There is a chance that the file 
system stored in the map is closed.
@@ -246,7 +245,7 @@ void doCopy(Map.Entry<Path, 
List<ReplChangeManager.FileInfo>> destMapEntry, User
 
   private void doCopyRetry(FileSystem sourceFs, 
List<ReplChangeManager.FileInfo> srcFileList,
                            Path destination, UserGroupInformation proxyUser,
-                           boolean useRegularCopy, boolean overwrite, 
DataCopyStatistics copyStatistics) throws IOException, LoginException, 
HiveFatalException {
+                           boolean useRegularCopy, boolean overwrite, 
DataCopyStatistics copyStatistics) throws IOException, HiveFatalException {
     int repeat = 0;
     boolean isCopyError = false;
     List<Path> pathList = Lists.transform(srcFileList, 
ReplChangeManager.FileInfo::getEffectivePath);
@@ -537,7 +536,7 @@ private void deleteSubDirs(FileSystem fs, Path path) throws 
IOException {
     mkdirs(fs, path);
   }
 
-  public void doCopy(Path destination, List<Path> srcPaths) throws 
IOException, LoginException {
+  public void doCopy(Path destination, List<Path> srcPaths) throws IOException 
{
     Map<FileSystem, List<Path>> map = fsToPathMap(srcPaths);
 
     UserGroupInformation proxyUser = getProxyUser();
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AbstractEventHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AbstractEventHandler.java
index 7f0830589d3..04e2fa82633 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AbstractEventHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/AbstractEventHandler.java
@@ -38,7 +38,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import javax.security.auth.login.LoginException;
 import java.io.BufferedWriter;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
@@ -106,7 +105,7 @@ protected void writeEncodedDumpFiles(Context withinContext, 
Iterable<String> fil
   }
 
   protected void writeFileEntry(Table table, Partition ptn, String file, 
Context withinContext)
-          throws IOException, LoginException, HiveFatalException {
+          throws IOException, HiveFatalException {
     HiveConf hiveConf = withinContext.hiveConf;
     String distCpDoAsUser = 
hiveConf.getVar(HiveConf.ConfVars.HIVE_DISTCP_DOAS_USER);
     if (!Utils.shouldDumpMetaDataOnly(withinContext.hiveConf)) {
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CommitTxnHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CommitTxnHandler.java
index 7ba61ff2ac2..7ec56e818de 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CommitTxnHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CommitTxnHandler.java
@@ -38,7 +38,6 @@
 import org.apache.hadoop.hive.ql.parse.repl.DumpType;
 import org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData;
 
-import javax.security.auth.login.LoginException;
 import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
@@ -60,7 +59,7 @@ CommitTxnMessage eventMessage(String stringRepresentation) {
 
   private void writeDumpFiles(Table qlMdTable, Partition ptn, Iterable<String> 
files, Context withinContext,
                               Path dataPath)
-          throws IOException, LoginException, HiveFatalException, 
SemanticException {
+          throws IOException, HiveFatalException, SemanticException {
     boolean copyAtLoad = 
withinContext.hiveConf.getBoolVar(HiveConf.ConfVars.REPL_RUN_DATA_COPY_TASKS_ON_TARGET);
     if (copyAtLoad) {
       // encoded filename/checksum of files, write into _files
@@ -74,7 +73,7 @@ private void writeDumpFiles(Table qlMdTable, Partition ptn, 
Iterable<String> fil
 
   private void createDumpFile(Context withinContext, 
org.apache.hadoop.hive.ql.metadata.Table qlMdTable,
                   List<Partition> qlPtns, List<List<String>> fileListArray)
-          throws IOException, SemanticException, LoginException, 
HiveFatalException {
+          throws IOException, SemanticException, HiveFatalException {
     if (fileListArray == null || fileListArray.isEmpty()) {
       return;
     }
@@ -102,7 +101,7 @@ private void createDumpFile(Context withinContext, 
org.apache.hadoop.hive.ql.met
 
   private void createDumpFileForTable(Context withinContext, 
org.apache.hadoop.hive.ql.metadata.Table qlMdTable,
                     List<Partition> qlPtns, List<List<String>> fileListArray)
-          throws IOException, SemanticException, LoginException, 
HiveFatalException {
+          throws IOException, SemanticException, HiveFatalException {
     Path newPath = HiveUtils.getDumpPath(withinContext.eventRoot, 
qlMdTable.getDbName(), qlMdTable.getTableName());
     Context context = new Context(withinContext);
     context.setEventRoot(newPath);
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunctionHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunctionHandler.java
index 638a22961dc..75bbe8311c8 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunctionHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/events/CreateFunctionHandler.java
@@ -35,7 +35,6 @@
 
 import org.apache.hadoop.hive.ql.parse.EximUtil.DataCopyPath;
 
-import javax.security.auth.login.LoginException;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
@@ -75,7 +74,7 @@ public void handle(Context withinContext) throws Exception {
   }
 
   private void copyFunctionBinaries(List<DataCopyPath> 
functionBinaryCopyPaths, HiveConf hiveConf)
-          throws IOException, LoginException, HiveFatalException {
+          throws IOException, HiveFatalException {
     if (!functionBinaryCopyPaths.isEmpty()) {
       String distCpDoAsUser = 
hiveConf.getVar(HiveConf.ConfVars.HIVE_DISTCP_DOAS_USER);
       List<ReplChangeManager.FileInfo> filePaths = new ArrayList<>();
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FileOperations.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FileOperations.java
index 9fae68667b2..3dcc32be369 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FileOperations.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/io/FileOperations.java
@@ -26,8 +26,6 @@
 import java.util.List;
 import java.util.concurrent.Callable;
 
-import javax.security.auth.login.LoginException;
-
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -90,7 +88,7 @@ public void export(boolean isExportTask, boolean 
dataCopyAtLoad) throws Exceptio
   /**
    * This writes the actual data in the exportRootDataDir from the source.
    */
-  private void copyFiles() throws IOException, LoginException {
+  private void copyFiles() throws IOException {
     if (mmCtx == null) {
       for (Path dataPath : dataPathList) {
         copyOneDataPath(dataPath, exportRootDataDir);
@@ -100,7 +98,7 @@ private void copyFiles() throws IOException, LoginException {
     }
   }
 
-  private void copyOneDataPath(Path fromPath, Path toPath) throws IOException, 
LoginException {
+  private void copyOneDataPath(Path fromPath, Path toPath) throws IOException {
     FileStatus[] fileStatuses = 
LoadSemanticAnalyzer.matchFilesOrDir(dataFileSystem, fromPath);
     List<Path> srcPaths = new ArrayList<>();
     for (FileStatus fileStatus : fileStatuses) {
@@ -110,7 +108,7 @@ private void copyOneDataPath(Path fromPath, Path toPath) 
throws IOException, Log
     new CopyUtils(distCpDoAsUser, hiveConf, 
toPath.getFileSystem(hiveConf)).doCopy(toPath, srcPaths);
   }
 
-  private void copyMmPath() throws LoginException, IOException {
+  private void copyMmPath() throws IOException {
     ValidWriteIdList ids = AcidUtils.getTableValidWriteIdList(hiveConf, 
mmCtx.getFqTableName());
     for (Path fromPath : dataPathList) {
       fromPath = dataFileSystem.makeQualified(fromPath);
@@ -163,7 +161,7 @@ private FileStatus[] listFilesInDir(Path path) throws 
IOException {
   /**
    * Since the bootstrap will do table directory level copy, need to check for 
existence of src path.
    */
-  private void validateSrcPathListExists() throws IOException, LoginException {
+  private void validateSrcPathListExists() throws IOException {
     if (dataPathList.isEmpty()) {
       return;
     }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
index 1058ce3b6d1..254d60fb884 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
@@ -25,8 +25,6 @@
 import java.util.EnumSet;
 import java.util.List;
 
-import javax.security.auth.login.LoginException;
-
 import org.apache.hadoop.hive.metastore.HMSHandler;
 import org.apache.hadoop.hive.metastore.IHMSHandler;
 import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils;
@@ -380,10 +378,6 @@ public void authorize(Path path, Privilege[] 
readRequiredPriv, Privilege[] write
 
       checkPermissions(getConf(), path, actions);
 
-    } catch (AccessControlException ex) {
-      throw authorizationException(ex);
-    } catch (LoginException ex) {
-      throw authorizationException(ex);
     } catch (IOException ex) {
       throw hiveException(ex);
     }
@@ -395,7 +389,7 @@ public void authorize(Path path, Privilege[] 
readRequiredPriv, Privilege[] write
    * If the given path does not exists, it checks for its parent folder.
    */
   protected void checkPermissions(final Configuration conf, final Path path,
-      final EnumSet<FsAction> actions) throws IOException, LoginException, 
HiveException {
+      final EnumSet<FsAction> actions) throws IOException, HiveException {
 
     if (path == null) {
       throw new IllegalArgumentException("path is null");
@@ -474,10 +468,6 @@ private HiveException hiveException(Exception e) {
     return new HiveException(e);
   }
 
-  private AuthorizationException authorizationException(Exception e) {
-    return new AuthorizationException(e);
-  }
-
   private static AccessControlException accessControlException(Exception e) {
     AccessControlException ace = new AccessControlException(e.getMessage());
     ace.initCause(e);
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
index b24b3602e09..9443c3c959e 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFGetSplits.java
@@ -33,8 +33,6 @@
 import java.util.Set;
 import java.util.UUID;
 
-import javax.security.auth.login.LoginException;
-
 import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.collections4.CollectionUtils;
 import org.apache.commons.io.FilenameUtils;
@@ -739,13 +737,11 @@ private static byte[] 
serializeVertexSpec(SignableVertexSpec.Builder svsb) throw
    * @return LocalResource corresponding to the localized hive exec resource.
    * @throws IOException
    *           when any file system related call fails.
-   * @throws LoginException
-   *           when we are unable to determine the user.
    * @throws URISyntaxException
    *           when current jar location cannot be determined.
    */
   private LocalResource createJarLocalResource(String localJarPath,
-      DagUtils utils, Configuration conf) throws IOException, LoginException,
+      DagUtils utils, Configuration conf) throws IOException,
       IllegalArgumentException, FileNotFoundException {
     FileStatus destDirStatus = utils.getHiveJarDirectory(conf);
     assert destDirStatus != null;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/mr/TestMapRedTask.java 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/mr/TestMapRedTask.java
index d4f983c62c0..120b4cca8ef 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/mr/TestMapRedTask.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/mr/TestMapRedTask.java
@@ -26,8 +26,6 @@
 import java.io.IOException;
 import java.util.Arrays;
 
-import javax.security.auth.login.LoginException;
-
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.metrics.common.Metrics;
 import org.apache.hadoop.hive.common.metrics.common.MetricsConstant;
@@ -59,7 +57,7 @@ public void mrTask_updates_Metrics() throws IOException {
   }
 
   @Test
-  public void mrTaskSumbitViaChildWithImpersonation() throws IOException, 
LoginException {
+  public void mrTaskSumbitViaChildWithImpersonation() throws IOException {
     Utils.getUGI().setAuthenticationMethod(PROXY);
 
     Context ctx = Mockito.mock(Context.class);
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java
index d4296b9b434..f108160a80f 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/SampleTezSessionState.java
@@ -26,7 +26,6 @@
 import com.google.common.util.concurrent.SettableFuture;
 import java.io.IOException;
 import java.util.concurrent.ScheduledExecutorService;
-import javax.security.auth.login.LoginException;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -72,7 +71,7 @@ public void setOpen(boolean open) {
   }
 
   @Override
-  public void open() throws LoginException, IOException {
+  public void open() throws IOException {
     UserGroupInformation ugi = Utils.getUGI();
     user = ugi.getShortUserName();
     this.doAsEnabled = 
hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
@@ -80,12 +79,12 @@ public void open() throws LoginException, IOException {
   }
 
   @Override
-  public void open(HiveResources resources) throws LoginException, IOException 
{
+  public void open(HiveResources resources) throws IOException {
     open();
   }
 
   @Override
-  public void open(String[] additionalFiles) throws IOException, 
LoginException {
+  public void open(String[] additionalFiles) throws IOException {
     open();
   }
 
diff --git a/service/src/java/org/apache/hive/service/cli/CLIService.java 
b/service/src/java/org/apache/hive/service/cli/CLIService.java
index eb53607771b..af22741d1a1 100644
--- a/service/src/java/org/apache/hive/service/cli/CLIService.java
+++ b/service/src/java/org/apache/hive/service/cli/CLIService.java
@@ -27,8 +27,6 @@
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 
-import javax.security.auth.login.LoginException;
-
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -96,8 +94,6 @@ public synchronized void init(HiveConf hiveConf) {
         this.serviceUGI = Utils.getUGI();
       } catch (IOException e) {
         throw new ServiceException("Unable to login to kerberos with given 
principal/keytab", e);
-      } catch (LoginException e) {
-        throw new ServiceException("Unable to login to kerberos with given 
principal/keytab", e);
       }
 
       // Also try creating a UGI object for the SPNego principal
@@ -602,7 +598,7 @@ public RowSet fetchResults(OperationHandle opHandle, 
FetchOrientation orientatio
 
   // obtain delegation token for the give user from metastore
   public String getDelegationTokenFromMetaStore(String owner)
-      throws HiveSQLException, UnsupportedOperationException, LoginException, 
IOException {
+      throws HiveSQLException, UnsupportedOperationException, IOException {
     HiveConf hiveConf = getHiveConf();
     if (!hiveConf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL) ||
         !hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
diff --git 
a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java 
b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
index 7d71881dc84..ccf576fe50d 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
@@ -33,7 +33,6 @@
 import java.util.Optional;
 import java.util.concurrent.TimeUnit;
 
-import javax.security.auth.login.LoginException;
 import org.apache.hadoop.hive.common.ServerUtils;
 import org.apache.hadoop.hive.common.log.ProgressMonitor;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -544,11 +543,10 @@ private String getShortName(String userName) throws 
IOException {
    * @param res
    * @return
    * @throws HiveSQLException
-   * @throws LoginException
    * @throws IOException
    */
   private SessionHandle getSessionHandle(TOpenSessionReq req, TOpenSessionResp 
res, String userName)
-      throws HiveSQLException, LoginException, IOException {
+      throws HiveSQLException, IOException {
     final String ipAddress = getIpAddress();
 
     LOG.info("Creating Hive session handle for user [{}] from IP {}", 
req.getUsername(), ipAddress);
@@ -579,7 +577,7 @@ private double getProgressedPercentage(OperationHandle 
opHandle) throws HiveSQLE
   }
 
   private String getDelegationToken(String userName)
-      throws HiveSQLException, LoginException, IOException {
+      throws HiveSQLException, IOException {
     try {
       return cliService.getDelegationTokenFromMetaStore(userName);
     } catch (UnsupportedOperationException e) {
diff --git 
a/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java 
b/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java
index a9446128a20..332f8281c3a 100644
--- a/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java
+++ b/shims/common/src/main/java/org/apache/hadoop/fs/DefaultFileAccess.java
@@ -26,8 +26,6 @@
 import java.util.EnumSet;
 import java.util.List;
 
-import javax.security.auth.login.LoginException;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -50,7 +48,7 @@ public class DefaultFileAccess {
   private static List<String> emptyGroups = new ArrayList<String>(0);
 
   public static void checkFileAccess(FileSystem fs, FileStatus stat, FsAction 
action)
-      throws IOException, AccessControlException, LoginException {
+      throws IOException, AccessControlException {
     // Get the user/groups for checking permissions based on the current UGI.
     UserGroupInformation currentUgi = Utils.getUGI();
     DefaultFileAccess.checkFileAccess(fs, stat, action,
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java 
b/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java
index 515b10b35f0..b28b4669322 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java
@@ -23,7 +23,6 @@
 import java.util.HashSet;
 import java.util.Set;
 
-import javax.security.auth.login.LoginException;
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
@@ -46,7 +45,7 @@ public class Utils {
 
   public static final String DISTCP_OPTIONS_PREFIX = "distcp.options.";
 
-  public static UserGroupInformation getUGI() throws LoginException, 
IOException {
+  public static UserGroupInformation getUGI() throws IOException {
     if (UserGroupInformation.isSecurityEnabled()) {
       return UserGroupInformation.getCurrentUser();
     }


Reply via email to