I am able to fix that issue, but got another error

[127.0.0.1:10000] hive> CREATE TABLE IF NOT EXISTS pagecounts_hbase (rowkey
STRING, pageviews STRING, bytes STRING) STORED BY
'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES
('hbase.columns.mapping' = ':key,f:c1,f:c2') TBLPROPERTIES ('
hbase.table.name' = 'pagecounts');
[Hive Error]: Query returned non-zero code: 1, cause: FAILED: Execution
Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask.
MetaException(message:java.lang.IllegalArgumentException: Not a host:port
pair: PBUF
"
ibrar-virtual-machine���߯��)��
    at
org.apache.hadoop.hbase.util.Addressing.parseHostname(Addressing.java:60)
    at org.apache.hadoop.hbase.ServerName.<init>(ServerName.java:96)
    at
org.apache.hadoop.hbase.ServerName.parseVersionedServerName(ServerName.java:278)
    at
org.apache.hadoop.hbase.MasterAddressTracker.bytesToServerName(MasterAddressTracker.java:77)
    at
org.apache.hadoop.hbase.MasterAddressTracker.getMasterAddress(MasterAddressTracker.java:61)
    at
org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.getMaster(HConnectionManager.java:631)
    at org.apache.hadoop.hbase.client.HBaseAdmin.<init>(HBaseAdmin.java:106)
    at
org.apache.hadoop.hive.hbase.HBaseStorageHandler.getHBaseAdmin(HBaseStorageHandler.java:84)
    at
org.apache.hadoop.hive.hbase.HBaseStorageHandler.preCreateTable(HBaseStorageHandler.java:162)
    at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:554)
    at
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:547)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:606)
    at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89)
    at com.sun.proxy.$Proxy7.createTable(Unknown Source)
    at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:613)
    at org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4194)
    at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:281)
    at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:153)
    at
org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85)
    at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1472)
    at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1239)
    at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1057)
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:880)
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:870)
    at
org.apache.hadoop.hive.service.HiveServer$HiveServerHandler.execute(HiveServer.java:198)
    at
org.apache.hadoop.hive.service.ThriftHive$Processor$execute.getResult(ThriftHive.java:644)
    at
org.apache.hadoop.hive.service.ThriftHive$Processor$execute.getResult(ThriftHive.java:628)
    at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
    at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
    at
org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:206)
    at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    at java.lang.Thread.run(Thread.java:745)


On Wed, Jun 17, 2015 at 3:51 PM, Ibrar Ahmed <ibrar.ah...@gmail.com> wrote:

> Hi,
>
> Whats wrong with my settings?
>
> [127.0.0.1:10000] hive> CREATE TABLE IF NOT EXISTS pagecounts_hbase
> (rowkey STRING, pageviews STRING, bytes STRING) STORED BY
> 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES
> ('hbase.columns.mapping' = ':key,f:c1,f:c2') TBLPROPERTIES ('
> hbase.table.name' = 'pagecounts');
>
> [Hive Error]: Query returned non-zero code: 1, cause: FAILED: Execution
> Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask.
> MetaException(message:MetaException(message:org.apache.hadoop.hbase.MasterNotRunningException:
> Retried 10 times
>     at
> org.apache.hadoop.hbase.client.HBaseAdmin.<init>(HBaseAdmin.java:127)
>     at
> org.apache.hadoop.hive.hbase.HBaseStorageHandler.getHBaseAdmin(HBaseStorageHandler.java:84)
>     at
> org.apache.hadoop.hive.hbase.HBaseStorageHandler.preCreateTable(HBaseStorageHandler.java:162)
>     at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:554)
>     at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:547)
>     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>     at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>     at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>     at java.lang.reflect.Method.invoke(Method.java:606)
>     at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89)
>     at com.sun.proxy.$Proxy7.createTable(Unknown Source)
>     at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:613)
>     at
> org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4194)
>     at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:281)
>     at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:153)
>     at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85)
>     at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1472)
>     at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1239)
>     at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1057)
>     at org.apache.hadoop.hive.ql.Driver.run(Driver.java:880)
>     at org.apache.hadoop.hive.ql.Driver.run(Driver.java:870)
>     at
> org.apache.hadoop.hive.service.HiveServer$HiveServerHandler.execute(HiveServer.java:198)
>     at
> org.apache.hadoop.hive.service.ThriftHive$Processor$execute.getResult(ThriftHive.java:644)
>     at
> org.apache.hadoop.hive.service.ThriftHive$Processor$execute.getResult(ThriftHive.java:628)
>     at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
>     at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
>     at
> org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:206)
>     at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>     at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>     at java.lang.Thread.run(Thread.java:745)
> )
>     at
> org.apache.hadoop.hive.hbase.HBaseStorageHandler.getHBaseAdmin(HBaseStorageHandler.java:88)
>     at
> org.apache.hadoop.hive.hbase.HBaseStorageHandler.preCreateTable(HBaseStorageHandler.java:162)
>     at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:554)
>     at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:547)
>     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>     at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
>     at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>     at java.lang.reflect.Method.invoke(Method.java:606)
>     at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:89)
>     at com.sun.proxy.$Proxy7.createTable(Unknown Source)
>     at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:613)
>     at
> org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4194)
>     at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:281)
>     at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:153)
>     at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:85)
>     at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1472)
>     at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1239)
>     at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1057)
>     at org.apache.hadoop.hive.ql.Driver.run(Driver.java:880)
>     at org.apache.hadoop.hive.ql.Driver.run(Driver.java:870)
>     at
> org.apache.hadoop.hive.service.HiveServer$HiveServerHandler.execute(HiveServer.java:198)
>     at
> org.apache.hadoop.hive.service.ThriftHive$Processor$execute.getResult(ThriftHive.java:644)
>     at
> org.apache.hadoop.hive.service.ThriftHive$Processor$execute.getResult(ThriftHive.java:628)
>     at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
>     at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
>     at
> org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:206)
>     at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>     at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>     at java.lang.Thread.run(Thread.java:745)
>
>
> --ibrar
>

Reply via email to