[
https://issues.apache.org/jira/browse/OOZIE-3626?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
ighack updated OOZIE-3626:
--------------------------
Description:
I use CDH 6.3.2
hadoop is HA
I make a workflow with spark in hue
run workflow I get a error
{{}}
{code:java}
Failing Oozie Launcher, java.net.UnknownHostException: nameservice1
java.lang.IllegalArgumentException: java.net.UnknownHostException: nameservice1
at
org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:445)
at
org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:132)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:351) at
org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:285) at
org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:168)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3237) at
org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:123) at
org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3286) at
org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3254) at
org.apache.hadoop.fs.FileSystem.get(FileSystem.java:478) at
org.apache.spark.deploy.DependencyUtils$.org$apache$spark$deploy$DependencyUtils$$resolveGlobPath(DependencyUtils.scala:190)
at
org.apache.spark.deploy.DependencyUtils$$anonfun$resolveGlobPaths$2.apply(DependencyUtils.scala:146)
at
org.apache.spark.deploy.DependencyUtils$$anonfun$resolveGlobPaths$2.apply(DependencyUtils.scala:144)
at
scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
at
scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
at
scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35) at
scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241) at
scala.collection.AbstractTraversable.flatMap(Traversable.scala:104) at
org.apache.spark.deploy.DependencyUtils$.resolveGlobPaths(DependencyUtils.scala:144)
at
org.apache.spark.deploy.SparkSubmit$$anonfun$prepareSubmitEnvironment$3.apply(SparkSubmit.scala:355)
at
org.apache.spark.deploy.SparkSubmit$$anonfun$prepareSubmitEnvironment$3.apply(SparkSubmit.scala:355)
at scala.Option.map(Option.scala:146) at
org.apache.spark.deploy.SparkSubmit.prepareSubmitEnvironment(SparkSubmit.scala:355)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:143) at
org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) at
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:926) at
org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:935) at
org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) at
org.apache.oozie.action.hadoop.SparkMain.runSpark(SparkMain.java:186) at
org.apache.oozie.action.hadoop.SparkMain.run(SparkMain.java:93) at
org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:104) at
org.apache.oozie.action.hadoop.SparkMain.main(SparkMain.java:60) at
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498) at
org.apache.oozie.action.hadoop.LauncherAM.runActionMain(LauncherAM.java:410) at
org.apache.oozie.action.hadoop.LauncherAM.access$300(LauncherAM.java:55) at
org.apache.oozie.action.hadoop.LauncherAM$2.run(LauncherAM.java:223) at
java.security.AccessController.doPrivileged(Native Method) at
javax.security.auth.Subject.doAs(Subject.java:422) at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
at org.apache.oozie.action.hadoop.LauncherAM.run(LauncherAM.java:217) at
org.apache.oozie.action.hadoop.LauncherAM$1.run(LauncherAM.java:153) at
java.security.AccessController.doPrivileged(Native Method) at
javax.security.auth.Subject.doAs(Subject.java:422) at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
at org.apache.oozie.action.hadoop.LauncherAM.main(LauncherAM.java:141) Caused
by: java.net.UnknownHostException: nameservice1{code}
{{}}
in my hdfs-site.xml
{{}}
{code:java}
<property> <name>dfs.nameservices</name> <value>nameservice1</value>
</property> <property>
<name>dfs.client.failover.proxy.provider.nameservice1</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property> <property>
<name>dfs.ha.automatic-failover.enabled.nameservice1</name> <value>true</value>
</property> <property> <name>ha.zookeeper.quorum</name>
<value>bigdser2:2181,bigdser3:2181,bigdser5:2181</value> </property> <property>
<name>dfs.ha.namenodes.nameservice1</name>
<value>namenode337,namenode369</value> </property>{code}
{{}}
I can run workflow with hive in hue
and I can run spark-shell --jars hdfs://nameservice1/sparklib/*.jar
"hadoop fs -ls /user" works.
it just dose'nt work with oozie
so how can fix it.someone can helpe me.
was:
I use CDH 6.3.2
I make a workflow with spark in hue
run workflow I get a error
{{}}
{code:java}
Failing Oozie Launcher, java.net.UnknownHostException: nameservice1
java.lang.IllegalArgumentException: java.net.UnknownHostException: nameservice1
at
org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:445)
at
org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:132)
at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:351) at
org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:285) at
org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:168)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3237) at
org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:123) at
org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3286) at
org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3254) at
org.apache.hadoop.fs.FileSystem.get(FileSystem.java:478) at
org.apache.spark.deploy.DependencyUtils$.org$apache$spark$deploy$DependencyUtils$$resolveGlobPath(DependencyUtils.scala:190)
at
org.apache.spark.deploy.DependencyUtils$$anonfun$resolveGlobPaths$2.apply(DependencyUtils.scala:146)
at
org.apache.spark.deploy.DependencyUtils$$anonfun$resolveGlobPaths$2.apply(DependencyUtils.scala:144)
at
scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
at
scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
at
scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35) at
scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241) at
scala.collection.AbstractTraversable.flatMap(Traversable.scala:104) at
org.apache.spark.deploy.DependencyUtils$.resolveGlobPaths(DependencyUtils.scala:144)
at
org.apache.spark.deploy.SparkSubmit$$anonfun$prepareSubmitEnvironment$3.apply(SparkSubmit.scala:355)
at
org.apache.spark.deploy.SparkSubmit$$anonfun$prepareSubmitEnvironment$3.apply(SparkSubmit.scala:355)
at scala.Option.map(Option.scala:146) at
org.apache.spark.deploy.SparkSubmit.prepareSubmitEnvironment(SparkSubmit.scala:355)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:143) at
org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) at
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:926) at
org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:935) at
org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) at
org.apache.oozie.action.hadoop.SparkMain.runSpark(SparkMain.java:186) at
org.apache.oozie.action.hadoop.SparkMain.run(SparkMain.java:93) at
org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:104) at
org.apache.oozie.action.hadoop.SparkMain.main(SparkMain.java:60) at
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498) at
org.apache.oozie.action.hadoop.LauncherAM.runActionMain(LauncherAM.java:410) at
org.apache.oozie.action.hadoop.LauncherAM.access$300(LauncherAM.java:55) at
org.apache.oozie.action.hadoop.LauncherAM$2.run(LauncherAM.java:223) at
java.security.AccessController.doPrivileged(Native Method) at
javax.security.auth.Subject.doAs(Subject.java:422) at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
at org.apache.oozie.action.hadoop.LauncherAM.run(LauncherAM.java:217) at
org.apache.oozie.action.hadoop.LauncherAM$1.run(LauncherAM.java:153) at
java.security.AccessController.doPrivileged(Native Method) at
javax.security.auth.Subject.doAs(Subject.java:422) at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
at org.apache.oozie.action.hadoop.LauncherAM.main(LauncherAM.java:141) Caused
by: java.net.UnknownHostException: nameservice1{code}
{{}}
in my hdfs-site.xml
{{}}
{code:java}
<property> <name>dfs.nameservices</name> <value>nameservice1</value>
</property> <property>
<name>dfs.client.failover.proxy.provider.nameservice1</name>
<value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
</property> <property>
<name>dfs.ha.automatic-failover.enabled.nameservice1</name> <value>true</value>
</property> <property> <name>ha.zookeeper.quorum</name>
<value>bigdser2:2181,bigdser3:2181,bigdser5:2181</value> </property> <property>
<name>dfs.ha.namenodes.nameservice1</name>
<value>namenode337,namenode369</value> </property>{code}
{{}}
I can run workflow with hive in hue
and I can run spark-shell --jars hdfs://nameservice1/sparklib/*.jar
"hadoop fs -ls /user" works.
it just dose'nt work with oozie
so how can fix it.someone can helpe me.
> oozei with spark java.lang.IllegalArgumentException:
> java.net.UnknownHostException: nameservice1
> ------------------------------------------------------------------------------------------------
>
> Key: OOZIE-3626
> URL: https://issues.apache.org/jira/browse/OOZIE-3626
> Project: Oozie
> Issue Type: Task
> Components: workflow
> Reporter: ighack
> Priority: Major
>
> I use CDH 6.3.2
>
> hadoop is HA
>
> I make a workflow with spark in hue
> run workflow I get a error
>
> {{}}
> {code:java}
> Failing Oozie Launcher, java.net.UnknownHostException: nameservice1
> java.lang.IllegalArgumentException: java.net.UnknownHostException:
> nameservice1 at
> org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:445)
> at
> org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:132)
> at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:351) at
> org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:285) at
> org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:168)
> at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3237) at
> org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:123) at
> org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3286) at
> org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3254) at
> org.apache.hadoop.fs.FileSystem.get(FileSystem.java:478) at
> org.apache.spark.deploy.DependencyUtils$.org$apache$spark$deploy$DependencyUtils$$resolveGlobPath(DependencyUtils.scala:190)
> at
> org.apache.spark.deploy.DependencyUtils$$anonfun$resolveGlobPaths$2.apply(DependencyUtils.scala:146)
> at
> org.apache.spark.deploy.DependencyUtils$$anonfun$resolveGlobPaths$2.apply(DependencyUtils.scala:144)
> at
> scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
> at
> scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
> at
> scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
> at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35) at
> scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241) at
> scala.collection.AbstractTraversable.flatMap(Traversable.scala:104) at
> org.apache.spark.deploy.DependencyUtils$.resolveGlobPaths(DependencyUtils.scala:144)
> at
> org.apache.spark.deploy.SparkSubmit$$anonfun$prepareSubmitEnvironment$3.apply(SparkSubmit.scala:355)
> at
> org.apache.spark.deploy.SparkSubmit$$anonfun$prepareSubmitEnvironment$3.apply(SparkSubmit.scala:355)
> at scala.Option.map(Option.scala:146) at
> org.apache.spark.deploy.SparkSubmit.prepareSubmitEnvironment(SparkSubmit.scala:355)
> at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:143) at
> org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) at
> org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:926)
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:935) at
> org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) at
> org.apache.oozie.action.hadoop.SparkMain.runSpark(SparkMain.java:186) at
> org.apache.oozie.action.hadoop.SparkMain.run(SparkMain.java:93) at
> org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:104) at
> org.apache.oozie.action.hadoop.SparkMain.main(SparkMain.java:60) at
> sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498) at
> org.apache.oozie.action.hadoop.LauncherAM.runActionMain(LauncherAM.java:410)
> at org.apache.oozie.action.hadoop.LauncherAM.access$300(LauncherAM.java:55)
> at org.apache.oozie.action.hadoop.LauncherAM$2.run(LauncherAM.java:223) at
> java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:422) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
> at org.apache.oozie.action.hadoop.LauncherAM.run(LauncherAM.java:217) at
> org.apache.oozie.action.hadoop.LauncherAM$1.run(LauncherAM.java:153) at
> java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:422) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
> at org.apache.oozie.action.hadoop.LauncherAM.main(LauncherAM.java:141)
> Caused by: java.net.UnknownHostException: nameservice1{code}
> {{}}
> in my hdfs-site.xml
>
> {{}}
> {code:java}
> <property> <name>dfs.nameservices</name> <value>nameservice1</value>
> </property> <property>
> <name>dfs.client.failover.proxy.provider.nameservice1</name>
> <value>org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider</value>
> </property> <property>
> <name>dfs.ha.automatic-failover.enabled.nameservice1</name>
> <value>true</value> </property> <property> <name>ha.zookeeper.quorum</name>
> <value>bigdser2:2181,bigdser3:2181,bigdser5:2181</value> </property>
> <property> <name>dfs.ha.namenodes.nameservice1</name>
> <value>namenode337,namenode369</value> </property>{code}
> {{}}
> I can run workflow with hive in hue
> and I can run spark-shell --jars hdfs://nameservice1/sparklib/*.jar
> "hadoop fs -ls /user" works.
> it just dose'nt work with oozie
> so how can fix it.someone can helpe me.
--
This message was sent by Atlassian Jira
(v8.3.4#803005)