Repository: hive
Updated Branches:
  refs/heads/master edc5974a1 -> 7a4fd3377


HIVE-13391 : add an option to LLAP to use keytab to authenticate to read data 
(Sergey Shelukhin, reviewed by Siddharth Seth)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7a4fd337
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7a4fd337
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7a4fd337

Branch: refs/heads/master
Commit: 7a4fd3377e8a34de724ec45ec61793ac00ae92bc
Parents: edc5974
Author: Sergey Shelukhin <ser...@apache.org>
Authored: Wed Jun 8 14:49:44 2016 -0700
Committer: Sergey Shelukhin <ser...@apache.org>
Committed: Wed Jun 8 14:49:44 2016 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/common/UgiFactory.java   | 22 +++++++
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  6 ++
 .../impl/LlapZookeeperRegistryImpl.java         |  1 +
 .../llap/daemon/impl/ContainerRunnerImpl.java   | 10 ++-
 .../hive/llap/daemon/impl/LlapDaemon.java       | 10 ++-
 .../llap/daemon/impl/TaskRunnerCallable.java    |  8 ++-
 .../hive/llap/io/api/impl/LlapIoImpl.java       |  6 +-
 .../llap/io/decode/ColumnVectorProducer.java    |  2 +-
 .../llap/security/LlapUgiFactoryFactory.java    | 67 ++++++++++++++++++++
 .../daemon/impl/TaskExecutorTestHelpers.java    |  2 +-
 .../apache/hadoop/hive/shims/Hadoop23Shims.java | 44 +++++++++++++
 .../apache/hadoop/hive/shims/HadoopShims.java   |  2 +
 12 files changed, 169 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/7a4fd337/common/src/java/org/apache/hadoop/hive/common/UgiFactory.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/UgiFactory.java 
b/common/src/java/org/apache/hadoop/hive/common/UgiFactory.java
new file mode 100644
index 0000000..5b1ce60
--- /dev/null
+++ b/common/src/java/org/apache/hadoop/hive/common/UgiFactory.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.common;
+
+import java.io.IOException;
+import org.apache.hadoop.security.UserGroupInformation;
+
+public interface UgiFactory {
+  UserGroupInformation createUgi() throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/7a4fd337/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 2d7489b..bb0ca3a 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -2702,6 +2702,12 @@ public class HiveConf extends Configuration {
     LLAP_ZKSM_KERBEROS_KEYTAB_FILE("hive.llap.zk.sm.keytab.file", "",
         "The path to the Kerberos Keytab file containing the principal to use 
to talk to\n" +
         "ZooKeeper for ZooKeeper SecretManager."),
+    LLAP_FS_KERBEROS_PRINCIPAL("hive.llap.task.principal", "",
+        "The name of the principal to use to run tasks. By default, the 
clients are required\n" +
+        "to provide tokens to access HDFS/etc."),
+    LLAP_FS_KERBEROS_KEYTAB_FILE("hive.llap.task.keytab.file", "",
+        "The path to the Kerberos Keytab file containing the principal to use 
to run tasks.\n" +
+        "By default, the clients are required to provide tokens to access 
HDFS/etc."),
     LLAP_ZKSM_ZK_CONNECTION_STRING("hive.llap.zk.sm.connectionString", "",
         "ZooKeeper connection string for ZooKeeper SecretManager."),
     LLAP_ZK_REGISTRY_USER("hive.llap.zk.registry.user", "",

http://git-wip-us.apache.org/repos/asf/hive/blob/7a4fd337/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapZookeeperRegistryImpl.java
----------------------------------------------------------------------
diff --git 
a/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapZookeeperRegistryImpl.java
 
b/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapZookeeperRegistryImpl.java
index 154081d..551fcc5 100644
--- 
a/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapZookeeperRegistryImpl.java
+++ 
b/llap-client/src/java/org/apache/hadoop/hive/llap/registry/impl/LlapZookeeperRegistryImpl.java
@@ -503,6 +503,7 @@ public class LlapZookeeperRegistryImpl implements 
ServiceRegistry {
     @Override
     public Map<String, ServiceInstance> getAll() {
       Map<String, ServiceInstance> instances = new LinkedHashMap<>();
+      // TODO: we could refresh instanceCache here on previous failure
       for (ChildData childData : instancesCache.getCurrentData()) {
         if (childData != null) {
           byte[] data = childData.getData();

http://git-wip-us.apache.org/repos/asf/hive/blob/7a4fd337/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/ContainerRunnerImpl.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/ContainerRunnerImpl.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/ContainerRunnerImpl.java
index d439c07..6f21d3c 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/ContainerRunnerImpl.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/ContainerRunnerImpl.java
@@ -25,6 +25,7 @@ import java.util.Set;
 import java.util.concurrent.atomic.AtomicReference;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.UgiFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.llap.DaemonId;
@@ -97,12 +98,13 @@ public class ContainerRunnerImpl extends CompositeService 
implements ContainerRu
   private final HadoopShim tezHadoopShim;
   private final LlapSignerImpl signer;
   private final String clusterId;
+  private final UgiFactory fsUgiFactory;
 
   public ContainerRunnerImpl(Configuration conf, int numExecutors, int 
waitQueueSize,
       boolean enablePreemption, String[] localDirsBase, 
AtomicReference<Integer> localShufflePort,
       AtomicReference<InetSocketAddress> localAddress,
       long totalMemoryAvailableBytes, LlapDaemonExecutorMetrics metrics,
-      AMReporter amReporter, ClassLoader classLoader, DaemonId daemonId) {
+      AMReporter amReporter, ClassLoader classLoader, DaemonId daemonId, 
UgiFactory fsUgiFactory) {
     super("ContainerRunnerImpl");
     this.conf = conf;
     Preconditions.checkState(numExecutors > 0,
@@ -112,6 +114,7 @@ public class ContainerRunnerImpl extends CompositeService 
implements ContainerRu
     this.amReporter = amReporter;
     this.signer = UserGroupInformation.isSecurityEnabled()
         ? new LlapSignerImpl(conf, daemonId) : null;
+    this.fsUgiFactory = fsUgiFactory;
 
     this.clusterId = daemonId.getClusterString();
     this.queryTracker = new QueryTracker(conf, localDirsBase, clusterId);
@@ -230,10 +233,11 @@ public class ContainerRunnerImpl extends CompositeService 
implements ContainerRu
       // Used for re-localization, to add the user specified configuration 
(conf_pb_binary_stream)
 
       Configuration callableConf = new Configuration(getConfig());
+      UserGroupInformation taskUgi = fsUgiFactory == null ? null : 
fsUgiFactory.createUgi();
       TaskRunnerCallable callable = new TaskRunnerCallable(request, 
fragmentInfo, callableConf,
           new LlapExecutionContext(localAddress.get().getHostName(), 
queryTracker), env,
           credentials, memoryPerExecutor, amReporter, confParams, metrics, 
killedTaskHandler,
-          this, tezHadoopShim, attemptId, vertex);
+          this, tezHadoopShim, attemptId, vertex, taskUgi);
       submissionState = executorService.schedule(callable);
 
       if (LOG.isInfoEnabled()) {
@@ -289,7 +293,7 @@ public class ContainerRunnerImpl extends CompositeService 
implements ContainerRu
       queryTracker.registerDagQueryId(
           new 
QueryIdentifier(source.getContext().getApplicationId().toString(),
               source.getContext().getDagIdentifier()),
-          HiveConf.getVar(source.getConf(), HiveConf.ConfVars.HIVEQUERYID));
+          HiveConf.getVar(source.getConf(), HiveConf.ConfVars.HIVEQUERYID));;
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/7a4fd337/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
index 2faedcd..c1ef0f4 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/LlapDaemon.java
@@ -33,6 +33,7 @@ import javax.management.ObjectName;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.LogUtils;
+import org.apache.hadoop.hive.common.UgiFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.llap.DaemonId;
@@ -54,6 +55,7 @@ import 
org.apache.hadoop.hive.llap.metrics.LlapDaemonExecutorMetrics;
 import org.apache.hadoop.hive.llap.metrics.LlapMetricsSystem;
 import org.apache.hadoop.hive.llap.metrics.MetricsUtils;
 import org.apache.hadoop.hive.llap.registry.impl.LlapRegistryService;
+import org.apache.hadoop.hive.llap.security.LlapUgiFactoryFactory;
 import org.apache.hadoop.hive.llap.shufflehandler.ShuffleHandler;
 import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.UDF;
@@ -246,9 +248,15 @@ public class LlapDaemon extends CompositeService 
implements ContainerRunner, Lla
     this.server = new LlapProtocolServerImpl(
         numHandlers, this, srvAddress, mngAddress, srvPort, mngPort, daemonId);
 
+    UgiFactory fsUgiFactory = null;
+    try {
+      fsUgiFactory = LlapUgiFactoryFactory.createFsUgiFactory(daemonConf);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
     this.containerRunner = new ContainerRunnerImpl(daemonConf, numExecutors, 
waitQueueSize,
         enablePreemption, localDirs, this.shufflePort, srvAddress, 
executorMemoryBytes, metrics,
-        amReporter, executorClassLoader, daemonId);
+        amReporter, executorClassLoader, daemonId, fsUgiFactory);
     addIfService(containerRunner);
 
     // Not adding the registry as a service, since we need to control when it 
is initialized - conf used to pickup properties.

http://git-wip-us.apache.org/repos/asf/hive/blob/7a4fd337/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
index 0d9882b..f97585d 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/TaskRunnerCallable.java
@@ -114,6 +114,7 @@ public class TaskRunnerCallable extends 
CallableWithNdc<TaskRunner2Result> {
   private final AtomicBoolean isCompleted = new AtomicBoolean(false);
   private final AtomicBoolean killInvoked = new AtomicBoolean(false);
   private final SignableVertexSpec vertex;
+  private UserGroupInformation taskUgi;
 
   @VisibleForTesting
   public TaskRunnerCallable(SubmitWorkRequestProto request, QueryFragmentInfo 
fragmentInfo,
@@ -125,7 +126,7 @@ public class TaskRunnerCallable extends 
CallableWithNdc<TaskRunner2Result> {
                      KilledTaskHandler killedTaskHandler,
                      FragmentCompletionHandler fragmentCompleteHandler,
                      HadoopShim tezHadoopShim, TezTaskAttemptID attemptId,
-                     SignableVertexSpec vertex) {
+                     SignableVertexSpec vertex, UserGroupInformation taskUgi) {
     this.request = request;
     this.fragmentInfo = fragmentInfo;
     this.conf = conf;
@@ -152,6 +153,7 @@ public class TaskRunnerCallable extends 
CallableWithNdc<TaskRunner2Result> {
     this.killedTaskHandler = killedTaskHandler;
     this.fragmentCompletionHanler = fragmentCompleteHandler;
     this.tezHadoopShim = tezHadoopShim;
+    this.taskUgi = taskUgi;
   }
 
   public long getStartTime() {
@@ -188,7 +190,9 @@ public class TaskRunnerCallable extends 
CallableWithNdc<TaskRunner2Result> {
 
     // TODO Consolidate this code with TezChild.
     runtimeWatch.start();
-    UserGroupInformation taskUgi = 
UserGroupInformation.createRemoteUser(vertex.getUser());
+    if (taskUgi == null) {
+      taskUgi = UserGroupInformation.createRemoteUser(vertex.getUser());
+    }
     taskUgi.addCredentials(credentials);
 
     Map<String, ByteBuffer> serviceConsumerMetadata = new HashMap<>();

http://git-wip-us.apache.org/repos/asf/hive/blob/7a4fd337/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java 
b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java
index fea3dc7..9316dff 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java
@@ -70,10 +70,10 @@ public class LlapIoImpl implements 
LlapIo<VectorizedRowBatch> {
 
   private final ColumnVectorProducer cvp;
   private final ListeningExecutorService executor;
-  private LlapDaemonCacheMetrics cacheMetrics;
-  private LlapDaemonIOMetrics ioMetrics;
+  private final LlapDaemonCacheMetrics cacheMetrics;
+  private final LlapDaemonIOMetrics ioMetrics;
   private ObjectName buddyAllocatorMXBean;
-  private Allocator allocator;
+  private final Allocator allocator;
 
   private LlapIoImpl(Configuration conf) throws IOException {
     String ioMode = HiveConf.getVar(conf, 
HiveConf.ConfVars.LLAP_IO_MEMORY_MODE);

http://git-wip-us.apache.org/repos/asf/hive/blob/7a4fd337/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ColumnVectorProducer.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ColumnVectorProducer.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ColumnVectorProducer.java
index b3b571d..abd4533 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ColumnVectorProducer.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ColumnVectorProducer.java
@@ -33,4 +33,4 @@ public interface ColumnVectorProducer {
   ReadPipeline createReadPipeline(Consumer<ColumnVectorBatch> consumer, 
FileSplit split,
       List<Integer> columnIds, SearchArgument sarg, String[] columnNames,
       QueryFragmentCounters counters);
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/7a4fd337/llap-server/src/java/org/apache/hadoop/hive/llap/security/LlapUgiFactoryFactory.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/security/LlapUgiFactoryFactory.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/security/LlapUgiFactoryFactory.java
new file mode 100644
index 0000000..f6c216f
--- /dev/null
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/security/LlapUgiFactoryFactory.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.llap.security;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.UgiFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.llap.LlapUtil;
+import org.apache.hadoop.hive.shims.HadoopShims;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hadoop.security.UserGroupInformation;
+
+/** No Java application is complete until it has a FactoryFactory. */
+public class LlapUgiFactoryFactory {
+  private static final HadoopShims SHIMS = ShimLoader.getHadoopShims();
+
+  private static class KerberosUgiFactory implements UgiFactory {
+    private final UserGroupInformation baseUgi;
+
+    public KerberosUgiFactory(String keytab, String principal) throws 
IOException {
+      baseUgi = LlapUtil.loginWithKerberos(principal, keytab);
+    }
+
+    @Override
+    public UserGroupInformation createUgi() throws IOException {
+      // Make sure the UGI is current.
+      baseUgi.checkTGTAndReloginFromKeytab();
+      // TODO: the only reason this is done this way is because we want unique 
Subject-s so that
+      //       the FS.get gives different FS objects to different fragments.
+      // TODO: could we log in from ticket cache instead? no good method on 
UGI right now.
+      return SHIMS.cloneUgi(baseUgi);
+    }
+  }
+
+  private static class NoopUgiFactory implements UgiFactory {
+    @Override
+    public UserGroupInformation createUgi() throws IOException {
+      return null;
+    }
+  }
+
+  public static UgiFactory createFsUgiFactory(Configuration conf) throws 
IOException {
+    String fsKeytab = HiveConf.getVar(conf, 
ConfVars.LLAP_FS_KERBEROS_KEYTAB_FILE),
+        fsPrincipal = HiveConf.getVar(conf, 
ConfVars.LLAP_FS_KERBEROS_PRINCIPAL);
+    boolean hasFsKeytab = fsKeytab != null && !fsKeytab.isEmpty(),
+        hasFsPrincipal = fsPrincipal != null && !fsPrincipal.isEmpty();
+    if (hasFsKeytab != hasFsPrincipal) {
+      throw new IOException("Inconsistent FS keytab settings " + fsKeytab + "; 
" + fsPrincipal);
+    }
+    return hasFsKeytab ? new KerberosUgiFactory(fsKeytab, fsPrincipal) : new 
NoopUgiFactory();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/7a4fd337/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java
----------------------------------------------------------------------
diff --git 
a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java
 
b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java
index 96d626a..1df5253 100644
--- 
a/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java
+++ 
b/llap-server/src/test/org/apache/hadoop/hive/llap/daemon/impl/TaskExecutorTestHelpers.java
@@ -151,7 +151,7 @@ public class TaskExecutorTestHelpers {
               LlapDaemonExecutorMetrics.class),
           mock(KilledTaskHandler.class), mock(
               FragmentCompletionHandler.class), new DefaultHadoopShim(), null,
-              requestProto.getWorkSpec().getVertex());
+              requestProto.getWorkSpec().getVertex(), null);
       this.workTime = workTime;
       this.canFinish = canFinish;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/7a4fd337/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
----------------------------------------------------------------------
diff --git 
a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java 
b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
index 273099e..68fac17 100644
--- a/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
+++ b/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
@@ -21,6 +21,7 @@ import java.io.DataInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
+import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.net.InetSocketAddress;
 import java.net.MalformedURLException;
@@ -36,6 +37,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
 
+import javax.security.auth.Subject;
+
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.crypto.key.KeyProvider;
@@ -1252,4 +1255,45 @@ public class Hadoop23Shims extends HadoopShimsSecure {
   public long getFileId(FileSystem fs, String path) throws IOException {
     return ensureDfs(fs).getClient().getFileInfo(path).getFileId();
   }
+
+
+  private final static java.lang.reflect.Method getSubjectMethod;
+  private final static java.lang.reflect.Constructor<UserGroupInformation> 
ugiCtor;
+  private final static String ugiCloneError;
+  static {
+    Class<UserGroupInformation> clazz = UserGroupInformation.class;
+    java.lang.reflect.Method method = null;
+    java.lang.reflect.Constructor<UserGroupInformation> ctor;
+    String error = null;
+    try {
+      method = clazz.getMethod("getSubject");
+      method.setAccessible(true);
+      ctor = clazz.getConstructor(Subject.class);
+      ctor.setAccessible(true);
+    } catch (Throwable t) {
+      error = t.getMessage();
+      method = null;
+      ctor = null;
+      LOG.error("Cannot create UGI reflection methods", t);
+    }
+    getSubjectMethod = method;
+    ugiCtor = ctor;
+    ugiCloneError = error;
+  }
+
+  @Override
+  public UserGroupInformation cloneUgi(UserGroupInformation baseUgi) throws 
IOException {
+    // Based on UserGroupInformation::createProxyUser.
+    // TODO: use a proper method after we can depend on HADOOP-13081.
+    if (getSubjectMethod == null) {
+      throw new IOException("The UGI method was not found: " + ugiCloneError);
+    }
+    Subject subject = new Subject();
+    try {
+      
subject.getPrincipals().addAll(((Subject)getSubjectMethod.invoke(baseUgi)).getPrincipals());
+      return ugiCtor.newInstance(subject);
+    } catch (InstantiationException | IllegalAccessException | 
InvocationTargetException e) {
+      throw new IOException(e);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/7a4fd337/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
----------------------------------------------------------------------
diff --git 
a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java 
b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
index 3e30758..9b0bc35 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
@@ -629,4 +629,6 @@ public interface HadoopShims {
    */
   long getFileId(FileSystem fs, String path) throws IOException;
 
+  /** Clones the UGI and the Subject. */
+  UserGroupInformation cloneUgi(UserGroupInformation baseUgi) throws 
IOException;
 }

Reply via email to