[ 
https://issues.apache.org/jira/browse/SPARK-14694?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15255036#comment-15255036
 ] 

zhangguancheng commented on SPARK-14694:
----------------------------------------

Content of hive-site.xml:
{quote}
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>hive.server2.thrift.port</name>
<value>10000</value> 
</property>

<property>
<name>hive.metastore.sasl.enabled</name>
<value>true</value> 
</property>

<property>
<name>hive.metastore.kerberos.keytab.file</name>
<value>/Users/zhangguancheng/Documents/github/bigdata/hive/apache-hive-1.1.1-bin/conf/hive.keytab</value>
 
</property>

<property>
<name>hive.metastore.kerberos.principal</name>
<value>hive/c1@C1</value> 
</property>

<property>
<name>hive.server2.authentication</name>
<value>KERBEROS</value> 
</property>

<property>
<name>hive.server2.authentication.kerberos.principal</name>
<value>hive/c1@C1</value> 
</property>

<property>
<name>hive.server2.authentication.kerberos.keytab</name>
<value>/Users/zhangguancheng/Documents/github/bigdata/hive/apache-hive-1.1.1-bin/conf/hive.keytab</value>
 
</property>

<property>
  <name>javax.jdo.option.ConnectionURL</name>
  <value>jdbc:mysql://localhost/test</value>
  <description>the URL of the MySQL database</description>
</property>

<property>
  <name>javax.jdo.option.ConnectionDriverName</name>
  <value>com.mysql.jdbc.Driver</value>
</property>

<property>
  <name>javax.jdo.option.ConnectionUserName</name>
  <value>test</value>
</property>

<property>
  <name>javax.jdo.option.ConnectionPassword</name>
  <value>test123</value>
</property>

<property>
  <name>datanucleus.autoCreateSchema</name>
  <value>false</value>
</property>

<property>
  <name>datanucleus.fixedDatastore</name>
  <value>true</value>
</property>

<property>
  <name>hive.metastore.uris</name>
  <value>thrift://localhost:9083</value>
  <description>IP address (or fully-qualified domain name) and port of the 
metastore host</description>
</property>

</configuration>
{quote}


> Thrift Server + Hive Metastore + Kerberos doesn't work
> ------------------------------------------------------
>
>                 Key: SPARK-14694
>                 URL: https://issues.apache.org/jira/browse/SPARK-14694
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 1.6.0, 1.6.1
>         Environment: Spark 1.6.1. compiled with hadoop 2.6.0, yarn, hive
> Hadoop 2.6.4 
> Hive 1.1.1 
> Kerberos
>            Reporter: zhangguancheng
>              Labels: security
>
> My Hive Metasore is MySQL based. I started a spark thrift server on the same 
> node as the Hive Metastore. I can open beeline and run select statements but 
> for some commands like "show databases", I get an error:
> {quote}
> ERROR pool-24-thread-1 org.apache.thrift.transport.TSaslTransport:315 SASL 
> negotiation failure
> javax.security.sasl.SaslException: GSS initiate failed [Caused by 
> GSSException: No valid credentials provided (Mechanism level: Failed to find 
> any Kerberos tgt)]
>     at 
> com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
>     at 
> org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
>     at 
> org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
>     at 
> org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
>     at 
> org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
>     at 
> org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
>     at java.security.AccessController.doPrivileged(Native Method)
>     at javax.security.auth.Subject.doAs(Subject.java:422)
>     at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
>     at 
> org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
>     at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:420)
>     at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:236)
>     at 
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
>     at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>     at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>     at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>     at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
>     at 
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
>     at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
>     at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
>     at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
>     at 
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
>     at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
>     at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
>     at org.apache.hadoop.hive.ql.exec.DDLTask.showDatabases(DDLTask.java:2223)
>     at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:385)
>     at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)
>     at 
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:88)
>     at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1653)
>     at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1412)
>     at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1195)
>     at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059)
>     at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049)
>     at 
> org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:495)
>     at 
> org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:484)
>     at 
> org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:290)
>     at 
> org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:237)
>     at 
> org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:236)
>     at 
> org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:279)
>     at 
> org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:484)
>     at 
> org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:474)
>     at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:605)
>     at 
> org.apache.spark.sql.hive.execution.HiveNativeCommand.run(HiveNativeCommand.scala:33)
>     at 
> org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:58)
>     at 
> org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:56)
>     at 
> org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:70)
>     at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
>     at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
>     at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
>     at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
>     at 
> org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:55)
>     at 
> org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:55)
>     at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
>     at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
>     at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
>     at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:817)
>     at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:211)
>     at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1$$anon$2.run(SparkExecuteStatementOperation.scala:154)
>     at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1$$anon$2.run(SparkExecuteStatementOperation.scala:151)
>     at java.security.AccessController.doPrivileged(Native Method)
>     at javax.security.auth.Subject.doAs(Subject.java:422)
>     at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
>     at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1.run(SparkExecuteStatementOperation.scala:164)
>     at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>     at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>     at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>     at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>     at java.lang.Thread.run(Thread.java:745)
> Caused by: GSSException: No valid credentials provided (Mechanism level: 
> Failed to find any Kerberos tgt)
>     at 
> sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
>     at 
> sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
>     at 
> sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
>     at 
> sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
>     at 
> sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
>     at 
> sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
>     at 
> com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
>     ... 67 more
> {quote}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to