[
https://issues.apache.org/jira/browse/DRILL-1160?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14154783#comment-14154783
]
Miguel Ping commented on DRILL-1160:
------------------------------------
I put the jars on jars/ext folder, but I am hitting a protobuf error:
This is a major dealbreaker for me.
{code}
java.lang.UnsupportedOperationException: This is supposed to be overridden by
subclasses.
at
com.google.protobuf.GeneratedMessage.getUnknownFields(GeneratedMessage.java:180)
~[protobuf-java-2.5.0.jar:na]
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$GetFileInfoRequestProto.getSerializedSize(ClientNamenodeProtocolProtos.java:30108)
~[hadoop-hdfs-2.0.0-cdh4.7.0.jar:na]
at
com.google.protobuf.AbstractMessageLite.toByteString(AbstractMessageLite.java:49)
~[protobuf-java-2.5.0.jar:na]
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.constructRpcRequest(ProtobufRpcEngine.java:149)
~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:193)
~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at com.sun.proxy.$Proxy31.getFileInfo(Unknown Source) ~[na:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
~[na:1.8.0_05]
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[na:1.8.0_05]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[na:1.8.0_05]
at java.lang.reflect.Method.invoke(Method.java:483) ~[na:1.8.0_05]
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:164)
~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
[60/1575]
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:83)
~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at com.sun.proxy.$Proxy31.getFileInfo(Unknown Source) ~[na:na]
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:629)
~[hadoop-hdfs-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1545)
~[hadoop-hdfs-2.0.0-cdh4.7.0.jar:na]
at
org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:820)
~[hadoop-hdfs-2.0.0-cdh4.7.0.jar:na]
at
org.apache.hadoop.fs.FileSystem.globStatusInternal(FileSystem.java:1683)
~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem.globStatus(FileSystem.java:1629)
~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem.globStatus(FileSystem.java:1604)
~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at
org.apache.drill.exec.dotdrill.DotDrillUtil.getDotDrills(DotDrillUtil.java:57)
~[drill-java-exec-0.5.0-incubating-rebuffed.jar:0.5.0-incubating]
at
org.apache.drill.exec.store.dfs.WorkspaceSchemaFactory$WorkspaceSchema.getTable(WorkspaceSchemaFactory.java:248)
~[drill-java-exec-0.5.0-incubating-rebuffed.jar:0.5.0-incubating]
at
net.hydromatic.optiq.jdbc.SimpleOptiqSchema.getTable(SimpleOptiqSchema.java:75)
[optiq-core-0.9-drill-r2.jar:na]
at
net.hydromatic.optiq.prepare.OptiqCatalogReader.getTableFrom(OptiqCatalogReader.java:87)
[optiq-core-0.9-drill-r2.jar:na]
at
net.hydromatic.optiq.prepare.OptiqCatalogReader.getTable(OptiqCatalogReader.java:70)
[optiq-core-0.9-drill-r2.jar:na]
at
net.hydromatic.optiq.prepare.OptiqCatalogReader.getTable(OptiqCatalogReader.java:42)
[optiq-core-0.9-drill-r2.jar:na]
at
org.eigenbase.sql.validate.EmptyScope.getTableNamespace(EmptyScope.java:67)
[optiq-core-0.9-drill-r2.jar:na]
at
org.eigenbase.sql.validate.IdentifierNamespace.validateImpl(IdentifierNamespace.java:75)
[optiq-core-0.9-drill-r2.jar:na]
at
org.eigenbase.sql.validate.AbstractNamespace.validate(AbstractNamespace.java:85)
[optiq-core-0.9-drill-r2.jar:na]
at
org.eigenbase.sql.validate.SqlValidatorImpl.validateNamespace(SqlValidatorImpl.java:779)
[optiq-core-0.9-drill-r2.jar:na]
at
org.eigenbase.sql.validate.SqlValidatorImpl.validateQuery(SqlValidatorImpl.java:768)
[optiq-core-0.9-drill-r2.jar:na]
at
org.eigenbase.sql.validate.SqlValidatorImpl.validateFrom(SqlValidatorImpl.java:2599)
[optiq-core-0.9-drill-r2.jar:na]
at
org.eigenbase.sql.validate.SqlValidatorImpl.validateSelect(SqlValidatorImpl.java:2807)
[optiq-core-0.9-drill-r2.jar:na]
at
org.eigenbase.sql.validate.SelectNamespace.validateImpl(SelectNamespace.java:60)
[optiq-core-0.9-drill-r2.jar:na]
at
org.eigenbase.sql.validate.AbstractNamespace.validate(AbstractNamespace.java:85)
[optiq-core-0.9-drill-r2.jar:na]
at
org.eigenbase.sql.validate.SqlValidatorImpl.validateNamespace(SqlValidatorImpl.java:779)
[optiq-core-0.9-drill-r2.jar:na]
at
org.eigenbase.sql.validate.SqlValidatorImpl.validateQuery(SqlValidatorImpl.java:768)
[optiq-core-0.9-drill-r2.jar:na]
at org.eigenbase.sql.SqlSelect.validate(SqlSelect.java:208)
[optiq-core-0.9-drill-r2.jar:na]
at
org.eigenbase.sql.validate.SqlValidatorImpl.validateScopedExpression(SqlValidatorImpl.java:742)
[optiq-core-0.9-drill-r2.jar:na]
at
org.eigenbase.sql.validate.SqlValidatorImpl.validate(SqlValidatorImpl.java:458)
[optiq-core-0.9-drill-r2.jar:na]
at
net.hydromatic.optiq.prepare.PlannerImpl.validate(PlannerImpl.java:173)
[optiq-core-0.9-drill-r2.jar:na]
at
org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.validateNode(DefaultSqlHandler.java:138)
[drill-java-exec-0.5.0-incubating-rebuffed.jar:0.5.0-incubating]
at
org.apache.drill.exec.planner.sql.handlers.DefaultSqlHandler.getPlan(DefaultSqlHandler.java:118)
[drill-java-exec-0.5.0-incubating-rebuffed.jar:0.5.0-incubating]
at
org.apache.drill.exec.planner.sql.DrillSqlWorker.getPlan(DrillSqlWorker.java:121)
[drill-java-exec-0.5.0-incubating-rebuffed.jar:0.5.0-incubating]
at org.apache.drill.exec.work.foreman.Foreman.runSQL(Foreman.java:422)
[drill-java-exec-0.5.0-incubating-rebuffed.jar:0.5.0-incubating]
[27/1575]
at org.apache.drill.exec.work.foreman.Foreman.run(Foreman.java:220)
[drill-java-exec-0.5.0-incubating-rebuffed.jar:0.5.0-incubating]
at
org.apache.drill.exec.work.WorkManager$RunnableWrapper.run(WorkManager.java:250)
[drill-java-exec-0.5.0-incubating-rebuffed.jar:0.5.0-incubating]
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
[na:1.8.0_05]
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
[na:1.8.0_05]
at java.lang.Thread.run(Thread.java:745) [na:1.8.0_05]
{code}
> Need changes to Drill classpath to run Drill on CDH
> ---------------------------------------------------
>
> Key: DRILL-1160
> URL: https://issues.apache.org/jira/browse/DRILL-1160
> Project: Apache Drill
> Issue Type: Bug
> Components: Tools, Build & Test
> Environment: CDH 4 and CDH 5
> Reporter: Amit Katti
> Assignee: Patrick Wong
> Fix For: 0.7.0
>
>
> For Drill to work successfully on CDH, we need to make changes to the
> classpath. Otherwise while making changes to the storage plugin to point to
> hdfs on the UI, it simply hangs.
> The required jars to be present on the Drill classpath are:
> /opt/cloudera/parcels/CDH-4.7.0-1.cdh4.7.0.p0.40/lib/hadoop/hadoop-annotations-2.0.0-cdh4.7.0.jar
> /opt/cloudera/parcels/CDH-4.7.0-1.cdh4.7.0.p0.40/lib/hadoop/hadoop-auth-2.0.0-cdh4.7.0.jar
> /opt/cloudera/parcels/CDH-4.7.0-1.cdh4.7.0.p0.40/lib/hadoop/hadoop-common-2.0.0-cdh4.7.0.jar
> /opt/cloudera/parcels/CDH-4.7.0-1.cdh4.7.0.p0.40/lib/hadoop-hdfs/hadoop-hdfs-2.0.0-cdh4.7.0.jar
> However inside these 2 folders (hadoop & hadoop-hdfs) there are multiple
> duplicate soft links to these jars, which i believe is causing some kind of
> classpath ordering issue.
> These 2 commands should help us get the required jars from the folders:
> ls -all /opt/cloudera/parcels/CDH-4.7.0-1.cdh4.7.0.p0.40/lib/hadoop/*.jar |
> grep -v '^l' | grep -v 'test' | grep -Eo '([^ ]|\\ )*$'
> ls -all
> /opt/cloudera/parcels/CDH-4.7.0-1.cdh4.7.0.p0.40/lib/hadoop-hdfs/*.jar | grep
> -v '^l' | grep -v 'test' | grep -Eo '([^ ]|\\ )*$'
> Also the location of these jars can change depending on how/where they are
> installed
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)