[
https://issues.apache.org/jira/browse/HIVE-6915?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14163767#comment-14163767
]
Naveen Gangam commented on HIVE-6915:
-------------------------------------
I am using the fix from the patch with Hive 0.13 and I continue to see the
issue with Tez while it works fine with MR.
I am running a simple "SELECT count(*) from hbasetable;"
{code}
2014-10-08 08:12:39,910 FATAL [InputInitializer [Map 1] #0]
org.apache.hadoop.ipc.RpcClient: SASL authentication failed. The most likely
cause is missing or invalid credentials. Consider 'kinit'.
javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException:
No valid credentials provided (Mechanism level: Failed to find any Kerberos
tgt)]
at
com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:212)
at
org.apache.hadoop.hbase.security.HBaseSaslRpcClient.saslConnect(HBaseSaslRpcClient.java:179)
at
org.apache.hadoop.hbase.ipc.RpcClient$Connection.setupSaslConnection(RpcClient.java:770)
at
org.apache.hadoop.hbase.ipc.RpcClient$Connection.access$600(RpcClient.java:357)
at
org.apache.hadoop.hbase.ipc.RpcClient$Connection$2.run(RpcClient.java:891)
at
org.apache.hadoop.hbase.ipc.RpcClient$Connection$2.run(RpcClient.java:888)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1614)
at
org.apache.hadoop.hbase.ipc.RpcClient$Connection.setupIOstreams(RpcClient.java:888)
at
org.apache.hadoop.hbase.ipc.RpcClient.getConnection(RpcClient.java:1543)
at org.apache.hadoop.hbase.ipc.RpcClient.call(RpcClient.java:1442)
at
org.apache.hadoop.hbase.ipc.RpcClient.callBlockingMethod(RpcClient.java:1661)
at
org.apache.hadoop.hbase.ipc.RpcClient$BlockingRpcChannelImplementation.callBlockingMethod(RpcClient.java:1719)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.scan(ClientProtos.java:29990)
at
org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:308)
at
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:164)
at
org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:59)
at
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:114)
at
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:90)
at
org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:283)
at
org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:188)
at
org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:183)
at
org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:110)
at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:738)
at
org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:178)
at
org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:82)
at
org.apache.hadoop.hbase.client.MetaScanner.allTableRegions(MetaScanner.java:282)
at
org.apache.hadoop.hbase.client.HTable.getRegionLocations(HTable.java:616)
at
org.apache.hadoop.hbase.util.RegionSizeCalculator.<init>(RegionSizeCalculator.java:79)
at
org.apache.hadoop.hbase.util.RegionSizeCalculator.<init>(RegionSizeCalculator.java:64)
at
org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.getSplits(TableInputFormatBase.java:160)
at
org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat.getSplits(HiveHBaseTableInputFormat.java:482)
at
org.apache.hadoop.hive.ql.io.HiveInputFormat.addSplitsForGroup(HiveInputFormat.java:291)
at
org.apache.hadoop.hive.ql.io.HiveInputFormat.getSplits(HiveInputFormat.java:372)
at
org.apache.hadoop.mapred.split.TezGroupedSplitsInputFormat.getSplits(TezGroupedSplitsInputFormat.java:68)
at
org.apache.tez.mapreduce.hadoop.MRHelpers.generateOldSplits(MRHelpers.java:263)
at
org.apache.tez.mapreduce.common.MRInputAMSplitGenerator.initialize(MRInputAMSplitGenerator.java:139)
at
org.apache.tez.dag.app.dag.RootInputInitializerRunner$InputInitializerCallable$1.run(RootInputInitializerRunner.java:154)
at
org.apache.tez.dag.app.dag.RootInputInitializerRunner$InputInitializerCallable$1.run(RootInputInitializerRunner.java:146)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1614)
at
org.apache.tez.dag.app.dag.RootInputInitializerRunner$InputInitializerCallable.call(RootInputInitializerRunner.java:146)
at
org.apache.tez.dag.app.dag.RootInputInitializerRunner$InputInitializerCallable.call(RootInputInitializerRunner.java:114)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Caused by: GSSException: No valid credentials provided (Mechanism level: Failed
to find any Kerberos tgt)
at
sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
at
sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:121)
at
sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
at
sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:223)
at
sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
at
sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
at
com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:193)
{code}
Notice
> Hive Hbase queries fail on secure Tez cluster
> ---------------------------------------------
>
> Key: HIVE-6915
> URL: https://issues.apache.org/jira/browse/HIVE-6915
> Project: Hive
> Issue Type: Bug
> Components: Tez
> Affects Versions: 0.13.0
> Environment: Kerberos secure Tez cluster
> Reporter: Deepesh Khandelwal
> Assignee: Siddharth Seth
> Attachments: HIVE-6915.1.patch, HIVE-6915.2.patch
>
>
> Hive queries reading and writing to HBase are currently failing with the
> following exception in a secure Tez cluster:
> {noformat}
> 2014-04-14 13:47:05,644 FATAL [InputInitializer [Map 1] #0]
> org.apache.hadoop.ipc.RpcClient: SASL authentication failed. The most likely
> cause is missing or invalid credentials. Consider 'kinit'.
> javax.security.sasl.SaslException: GSS initiate failed [Caused by
> GSSException: No valid credentials provided (Mechanism level: Failed to find
> any Kerberos tgt)]
> at
> com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:212)
> at
> org.apache.hadoop.hbase.security.HBaseSaslRpcClient.saslConnect(HBaseSaslRpcClient.java:152)
> at
> org.apache.hadoop.hbase.ipc.RpcClient$Connection.setupSaslConnection(RpcClient.java:792)
> at
> org.apache.hadoop.hbase.ipc.RpcClient$Connection.access$800(RpcClient.java:349)
> at
> org.apache.hadoop.hbase.ipc.RpcClient$Connection$2.run(RpcClient.java:918)
> at
> org.apache.hadoop.hbase.ipc.RpcClient$Connection$2.run(RpcClient.java:915)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:415)
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1557)
> at
> org.apache.hadoop.hbase.ipc.RpcClient$Connection.setupIOstreams(RpcClient.java:915)
> at
> org.apache.hadoop.hbase.ipc.RpcClient$Connection.writeRequest(RpcClient.java:1065)
> at
> org.apache.hadoop.hbase.ipc.RpcClient$Connection.tracedWriteRequest(RpcClient.java:1032)
> at org.apache.hadoop.hbase.ipc.RpcClient.call(RpcClient.java:1474)
> at
> org.apache.hadoop.hbase.ipc.RpcClient.callBlockingMethod(RpcClient.java:1684)
> at
> org.apache.hadoop.hbase.ipc.RpcClient$BlockingRpcChannelImplementation.callBlockingMethod(RpcClient.java:1737)
> at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:29288)
> at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1562)
> at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:87)
> at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:84)
> at
> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:121)
> at
> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:97)
> at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:90)
> at
> org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callBlockingMethod(CoprocessorRpcChannel.java:67)
> at
> org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos$AuthenticationService$BlockingStub.getAuthenticationToken(AuthenticationProtos.java:4512)
> at
> org.apache.hadoop.hbase.security.token.TokenUtil.obtainToken(TokenUtil.java:60)
> at
> org.apache.hadoop.hbase.security.token.TokenUtil$3.run(TokenUtil.java:174)
> at
> org.apache.hadoop.hbase.security.token.TokenUtil$3.run(TokenUtil.java:172)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:415)
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1557)
> at
> org.apache.hadoop.hbase.security.token.TokenUtil.obtainTokenForJob(TokenUtil.java:171)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:601)
> at org.apache.hadoop.hbase.util.Methods.call(Methods.java:39)
> at
> org.apache.hadoop.hbase.security.User$SecureHadoopUser.obtainAuthTokenForJob(User.java:334)
> at
> org.apache.hadoop.hbase.mapred.TableMapReduceUtil.initCredentials(TableMapReduceUtil.java:201)
> at
> org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat.getSplits(HiveHBaseTableInputFormat.java:415)
> at
> org.apache.hadoop.hive.ql.io.HiveInputFormat.addSplitsForGroup(HiveInputFormat.java:291)
> at
> org.apache.hadoop.hive.ql.io.HiveInputFormat.getSplits(HiveInputFormat.java:372)
> at
> org.apache.hadoop.mapred.split.TezGroupedSplitsInputFormat.getSplits(TezGroupedSplitsInputFormat.java:68)
> at
> org.apache.tez.mapreduce.hadoop.MRHelpers.generateOldSplits(MRHelpers.java:263)
> at
> org.apache.tez.mapreduce.common.MRInputAMSplitGenerator.initialize(MRInputAMSplitGenerator.java:139)
> at
> org.apache.tez.dag.app.dag.RootInputInitializerRunner$InputInitializerCallable$1.run(RootInputInitializerRunner.java:154)
> at
> org.apache.tez.dag.app.dag.RootInputInitializerRunner$InputInitializerCallable$1.run(RootInputInitializerRunner.java:146)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:415)
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1557)
> at
> org.apache.tez.dag.app.dag.RootInputInitializerRunner$InputInitializerCallable.call(RootInputInitializerRunner.java:146)
> at
> org.apache.tez.dag.app.dag.RootInputInitializerRunner$InputInitializerCallable.call(RootInputInitializerRunner.java:114)
> at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
> at java.util.concurrent.FutureTask.run(FutureTask.java:166)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> at java.lang.Thread.run(Thread.java:722)
> Caused by: GSSException: No valid credentials provided (Mechanism level:
> Failed to find any Kerberos tgt)
> at
> sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
> at
> sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:121)
> at
> sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
> at
> sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:223)
> at
> sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
> at
> sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
> at
> com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:193)
> ... 55 more
> {noformat}
> kinit was performed by the client. The error appears in the Tez application
> logs. The queries work fine when i change the hive.execution.engine to MR.
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)