[ 
https://issues.apache.org/jira/browse/HIVE-10857?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14579849#comment-14579849
 ] 

Sushanth Sowmyan commented on HIVE-10857:
-----------------------------------------

(Also committed to branch-1, forgot earlier)

> Accumulo storage handler fail throwing java.lang.IllegalArgumentException: 
> Cannot determine SASL mechanism for token class: class 
> org.apache.accumulo.core.client.security.tokens.PasswordToken
> -----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>
>                 Key: HIVE-10857
>                 URL: https://issues.apache.org/jira/browse/HIVE-10857
>             Project: Hive
>          Issue Type: Bug
>    Affects Versions: 1.2.1
>            Reporter: Takahiko Saito
>            Assignee: Josh Elser
>             Fix For: 1.2.1
>
>         Attachments: HIVE-10857.2.patch, HIVE-10857.patch
>
>
> create table Accumulo storage with Accumulo storage handler fails due to 
> ACCUMULO-2815.
> {noformat}
> create table accumulo_1(key string, age int) stored by 
> 'org.apache.hadoop.hive.accumulo.AccumuloStorageHandler' with serdeproperties 
> ( "accumulo.columns.mapping" = ":rowid,info:age");
> {noformat}
> The error shows:
> {noformat}
> FAILED: Execution Error, return code 1 from 
> org.apache.hadoop.hive.ql.exec.DDLTask. 
> MetaException(message:org.apache.accumulo.core.client.AccumuloException: 
> java.lang.IllegalArgumentException: Cannot determine SASL mechanism for token 
> class: class org.apache.accumulo.core.client.security.tokens.PasswordToken
>       at 
> org.apache.accumulo.core.client.impl.ServerClient.execute(ServerClient.java:67)
>       at 
> org.apache.accumulo.core.client.impl.ConnectorImpl.<init>(ConnectorImpl.java:67)
>       at 
> org.apache.accumulo.core.client.ZooKeeperInstance.getConnector(ZooKeeperInstance.java:248)
>       at 
> org.apache.hadoop.hive.accumulo.AccumuloConnectionParameters.getConnector(AccumuloConnectionParameters.java:125)
>       at 
> org.apache.hadoop.hive.accumulo.AccumuloConnectionParameters.getConnector(AccumuloConnectionParameters.java:111)
>       at 
> org.apache.hadoop.hive.accumulo.AccumuloStorageHandler.preCreateTable(AccumuloStorageHandler.java:245)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:664)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:657)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:606)
>       at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:156)
>       at com.sun.proxy.$Proxy5.createTable(Unknown Source)
>       at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:714)
>       at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4135)
>       at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:306)
>       at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160)
>       at 
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:88)
>       at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1650)
>       at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1409)
>       at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1192)
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059)
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049)
>       at 
> org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:213)
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:165)
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:376)
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:311)
>       at 
> org.apache.hadoop.hive.cli.CliDriver.processReader(CliDriver.java:409)
>       at org.apache.hadoop.hive.cli.CliDriver.processFile(CliDriver.java:425)
>       at 
> org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:714)
>       at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681)
>       at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:606)
>       at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
>       at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
> Caused by: java.lang.IllegalArgumentException: Cannot determine SASL 
> mechanism for token class: class 
> org.apache.accumulo.core.client.security.tokens.PasswordToken
>       at 
> org.apache.accumulo.core.rpc.SaslConnectionParams.updateFromToken(SaslConnectionParams.java:155)
>       at 
> org.apache.accumulo.core.rpc.SaslConnectionParams.<init>(SaslConnectionParams.java:143)
>       at 
> org.apache.accumulo.core.client.impl.ClientContext.getSaslParams(ClientContext.java:134)
>       at 
> org.apache.accumulo.core.client.impl.ThriftTransportKey.<init>(ThriftTransportKey.java:42)
>       at 
> org.apache.accumulo.core.client.impl.ServerClient.getConnection(ServerClient.java:135)
>       at 
> org.apache.accumulo.core.client.impl.ServerClient.getConnection(ServerClient.java:117)
>       at 
> org.apache.accumulo.core.client.impl.ServerClient.getConnection(ServerClient.java:113)
>       at 
> org.apache.accumulo.core.client.impl.ServerClient.executeRaw(ServerClient.java:95)
>       at 
> org.apache.accumulo.core.client.impl.ServerClient.execute(ServerClient.java:61)
>       ... 38 more
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to