Repository: hive
Updated Branches:
  refs/heads/master a44d9f322 -> b34bfce15


HIVE-14015: SMB MapJoin failed for Hive on Spark when kerberized (Yongzhi Chen, 
reviewed by Chaoyu Tang)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b34bfce1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b34bfce1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b34bfce1

Branch: refs/heads/master
Commit: b34bfce1576fc46ae55a5fb20d12fc766d029549
Parents: a44d9f3
Author: Yongzhi Chen <ych...@apache.org>
Authored: Wed Jun 15 10:07:48 2016 -0400
Committer: Yongzhi Chen <ych...@apache.org>
Committed: Sun Jun 19 21:53:38 2016 -0400

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java    | 7 +++++++
 1 file changed, 7 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/b34bfce1/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
index 27b1673..57b6c67 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SMBMapJoinOperator.java
@@ -52,6 +52,7 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.PriorityQueue;
 import org.apache.hive.common.util.ReflectionUtil;
 
@@ -196,6 +197,12 @@ public class SMBMapJoinOperator extends 
AbstractMapJoinOperator<SMBJoinDesc> imp
       FetchWork fetchWork = entry.getValue();
 
       JobConf jobClone = new JobConf(hconf);
+      if (UserGroupInformation.isSecurityEnabled()) {
+        String hadoopAuthToken = 
System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
+        if(hadoopAuthToken != null){
+          jobClone.set("mapreduce.job.credentials.binary", hadoopAuthToken);
+        }
+      }
 
       TableScanOperator ts = (TableScanOperator)aliasToWork.get(alias);
       // push down projections

Reply via email to