[
https://issues.apache.org/jira/browse/AMBARI-8644?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14241548#comment-14241548
]
Hudson commented on AMBARI-8644:
--------------------------------
SUCCESS: Integrated in Ambari-trunk-Commit #1163 (See
[https://builds.apache.org/job/Ambari-trunk-Commit/1163/])
AMBARI-8644. MR2 Service Check failed (aonishuk) (aonishuk:
http://git-wip-us.apache.org/repos/asf?p=ambari.git&a=commit&h=a5c9225c2445884149c4c27aec041ebcc9fef4b4)
*
ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
> MR2 Service Check failed
> ------------------------
>
> Key: AMBARI-8644
> URL: https://issues.apache.org/jira/browse/AMBARI-8644
> Project: Ambari
> Issue Type: Bug
> Reporter: Andrew Onischuk
> Assignee: Andrew Onischuk
> Fix For: 2.0.0
>
>
> Installed a single-node cluster with ZK, HDFS, YARN, TEZ.
>
>
> stderr:
> 2014-12-05 20:42:58,117 - Error while executing command 'service_check':
> Traceback (most recent call last):
> File
> "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py",
> line 152, in execute
> method(env)
> File
> "/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/YARN/package/scripts/mapred_service_check.py",
> line 67, in service_check
> logoutput=True
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/base.py", line
> 148, in __init__
> self.env.run()
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/environment.py",
> line 149, in run
> self.run_action(resource, action)
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/environment.py",
> line 115, in run_action
> provider_action()
> File
> "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/execute_hadoop.py",
> line 50, in action_run
> path = self.resource.bin_dir
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/base.py", line
> 148, in __init__
> self.env.run()
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/environment.py",
> line 149, in run
> self.run_action(resource, action)
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/environment.py",
> line 115, in run_action
> provider_action()
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/providers/system.py",
> line 245, in action_run
> raise ex
> Fail: Execution of '/usr/bin/sudo su ambari-qa -l -s /bin/bash -c 'export
>
> PATH='"'"'/usr/lib/ambari-server/*:/usr/lib/ambari-server/*:/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/usr/lib/ambari-server/*:/usr/lib/ambari-server/*:/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/usr/hdp/current/hadoop-client/bin:/usr/hdp/current/hadoop-yarn-client/bin'"'"'
> > /dev/null ; hadoop --config /etc/hadoop/conf jar
> /usr/hdp/current/hadoop-mapreduce-client/hadoop-mapreduce-examples-2.*.jar
> wordcount /user/ambari-qa/mapredsmokeinput
> /user/ambari-qa/mapredsmokeoutput'' returned 255. 14/12/05 20:42:56 INFO
> impl.TimelineClientImpl: Timeline service address:
> http://ys1-1.c.pramod-thangali.internal:8188/ws/v1/timeline/
> 14/12/05 20:42:57 INFO client.RMProxy: Connecting to ResourceManager at
> ys1-1.c.pramod-thangali.internal/10.240.26.65:8050
> java.io.FileNotFoundException: File does not exist:
> hdfs://ys1-1.c.pramod-thangali.internal:8020/hdp/apps/2.2.1.0-2048/mapreduce/mapreduce.tar.gz
> at org.apache.hadoop.fs.Hdfs.getFileStatus(Hdfs.java:137)
> at
> org.apache.hadoop.fs.AbstractFileSystem.resolvePath(AbstractFileSystem.java:460)
> at org.apache.hadoop.fs.FileContext$24.next(FileContext.java:2137)
> at org.apache.hadoop.fs.FileContext$24.next(FileContext.java:2133)
> at org.apache.hadoop.fs.FSLinkResolver.resolve(FSLinkResolver.java:90)
> at org.apache.hadoop.fs.FileContext.resolve(FileContext.java:2133)
> at org.apache.hadoop.fs.FileContext.resolvePath(FileContext.java:595)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.addMRFrameworkToDistributedCache(JobSubmitter.java:753)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:435)
> at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296)
> at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:415)
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
> at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293)
> at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1314)
> at org.apache.hadoop.examples.WordCount.main(WordCount.java:87)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at
> org.apache.hadoop.util.ProgramDriver$ProgramDescription.invoke(ProgramDriver.java:71)
> at org.apache.hadoop.util.ProgramDriver.run(ProgramDriver.java:144)
> at org.apache.hadoop.examples.ExampleDriver.main(ExampleDriver.java:74)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
> at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
> stdout:
> 2014-12-05 20:42:42,098 - ExecuteHadoop['fs -rm -r -f
> /user/ambari-qa/mapredsmokeoutput /user/ambari-qa/mapredsmokeinput']
> {'try_sleep': 5, 'tries': 1, 'bin_dir':
> '/usr/lib/ambari-server/*:/usr/lib/ambari-server/*:/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/usr/hdp/current/hadoop-client/bin:/usr/hdp/current/hadoop-yarn-client/bin',
> 'user': 'ambari-qa', 'conf_dir': '/etc/hadoop/conf'}
> 2014-12-05 20:42:42,099 - Execute['hadoop --config /etc/hadoop/conf fs
> -rm -r -f /user/ambari-qa/mapredsmokeoutput
> /user/ambari-qa/mapredsmokeinput'] {'logoutput': False, 'path':
> ['/usr/lib/ambari-server/*:/usr/lib/ambari-server/*:/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/usr/hdp/current/hadoop-client/bin:/usr/hdp/current/hadoop-yarn-client/bin'],
> 'tries': 1, 'user': 'ambari-qa', 'try_sleep': 5}
> 2014-12-05 20:42:46,727 - ExecuteHadoop['fs -put /etc/passwd
> /user/ambari-qa/mapredsmokeinput'] {'try_sleep': 5, 'tries': 1, 'bin_dir':
> '/usr/lib/ambari-server/*:/usr/lib/ambari-server/*:/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/usr/hdp/current/hadoop-client/bin:/usr/hdp/current/hadoop-yarn-client/bin',
> 'user': 'ambari-qa', 'conf_dir': '/etc/hadoop/conf'}
> 2014-12-05 20:42:46,729 - Execute['hadoop --config /etc/hadoop/conf fs
> -put /etc/passwd /user/ambari-qa/mapredsmokeinput'] {'logoutput': False,
> 'path':
> ['/usr/lib/ambari-server/*:/usr/lib/ambari-server/*:/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/usr/hdp/current/hadoop-client/bin:/usr/hdp/current/hadoop-yarn-client/bin'],
> 'tries': 1, 'user': 'ambari-qa', 'try_sleep': 5}
> 2014-12-05 20:42:51,738 - ExecuteHadoop['jar
> /usr/hdp/current/hadoop-mapreduce-client/hadoop-mapreduce-examples-2.*.jar
> wordcount /user/ambari-qa/mapredsmokeinput
> /user/ambari-qa/mapredsmokeoutput'] {'bin_dir':
> '/usr/lib/ambari-server/*:/usr/lib/ambari-server/*:/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/usr/hdp/current/hadoop-client/bin:/usr/hdp/current/hadoop-yarn-client/bin',
> 'conf_dir': '/etc/hadoop/conf', 'logoutput': True, 'try_sleep': 5, 'tries':
> 1, 'user': 'ambari-qa'}
> 2014-12-05 20:42:51,739 - Execute['hadoop --config /etc/hadoop/conf jar
> /usr/hdp/current/hadoop-mapreduce-client/hadoop-mapreduce-examples-2.*.jar
> wordcount /user/ambari-qa/mapredsmokeinput
> /user/ambari-qa/mapredsmokeoutput'] {'logoutput': True, 'path':
> ['/usr/lib/ambari-server/*:/usr/lib/ambari-server/*:/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/usr/hdp/current/hadoop-client/bin:/usr/hdp/current/hadoop-yarn-client/bin'],
> 'tries': 1, 'user': 'ambari-qa', 'try_sleep': 5}
> 2014-12-05 20:42:58,116 - 14/12/05 20:42:56 INFO impl.TimelineClientImpl:
> Timeline service address:
> http://ys1-1.c.pramod-thangali.internal:8188/ws/v1/timeline/
> 14/12/05 20:42:57 INFO client.RMProxy: Connecting to ResourceManager at
> ys1-1.c.pramod-thangali.internal/10.240.26.65:8050
> java.io.FileNotFoundException: File does not exist:
> hdfs://ys1-1.c.pramod-thangali.internal:8020/hdp/apps/2.2.1.0-2048/mapreduce/mapreduce.tar.gz
> at org.apache.hadoop.fs.Hdfs.getFileStatus(Hdfs.java:137)
> at
> org.apache.hadoop.fs.AbstractFileSystem.resolvePath(AbstractFileSystem.java:460)
> at org.apache.hadoop.fs.FileContext$24.next(FileContext.java:2137)
> at org.apache.hadoop.fs.FileContext$24.next(FileContext.java:2133)
> at org.apache.hadoop.fs.FSLinkResolver.resolve(FSLinkResolver.java:90)
> at org.apache.hadoop.fs.FileContext.resolve(FileContext.java:2133)
> at org.apache.hadoop.fs.FileContext.resolvePath(FileContext.java:595)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.addMRFrameworkToDistributedCache(JobSubmitter.java:753)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:435)
> at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296)
> at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:415)
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
> at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293)
> at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1314)
> at org.apache.hadoop.examples.WordCount.main(WordCount.java:87)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at
> org.apache.hadoop.util.ProgramDriver$ProgramDescription.invoke(ProgramDriver.java:71)
> at org.apache.hadoop.util.ProgramDriver.run(ProgramDriver.java:144)
> at org.apache.hadoop.examples.ExampleDriver.main(ExampleDriver.java:74)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
> at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
> 2014-12-05 20:42:58,117 - Error while executing command 'service_check':
> Traceback (most recent call last):
> File
> "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py",
> line 152, in execute
> method(env)
> File
> "/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/YARN/package/scripts/mapred_service_check.py",
> line 67, in service_check
> logoutput=True
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/base.py", line
> 148, in __init__
> self.env.run()
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/environment.py",
> line 149, in run
> self.run_action(resource, action)
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/environment.py",
> line 115, in run_action
> provider_action()
> File
> "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/execute_hadoop.py",
> line 50, in action_run
> path = self.resource.bin_dir
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/base.py", line
> 148, in __init__
> self.env.run()
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/environment.py",
> line 149, in run
> self.run_action(resource, action)
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/environment.py",
> line 115, in run_action
> provider_action()
> File
> "/usr/lib/python2.6/site-packages/resource_management/core/providers/system.py",
> line 245, in action_run
> raise ex
> Fail: Execution of '/usr/bin/sudo su ambari-qa -l -s /bin/bash -c 'export
>
> PATH='"'"'/usr/lib/ambari-server/*:/usr/lib/ambari-server/*:/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/usr/lib/ambari-server/*:/usr/lib/ambari-server/*:/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/usr/hdp/current/hadoop-client/bin:/usr/hdp/current/hadoop-yarn-client/bin'"'"'
> > /dev/null ; hadoop --config /etc/hadoop/conf jar
> /usr/hdp/current/hadoop-mapreduce-client/hadoop-mapreduce-examples-2.*.jar
> wordcount /user/ambari-qa/mapredsmokeinput
> /user/ambari-qa/mapredsmokeoutput'' returned 255. 14/12/05 20:42:56 INFO
> impl.TimelineClientImpl: Timeline service address:
> http://ys1-1.c.pramod-thangali.internal:8188/ws/v1/timeline/
> 14/12/05 20:42:57 INFO client.RMProxy: Connecting to ResourceManager at
> ys1-1.c.pramod-thangali.internal/10.240.26.65:8050
> java.io.FileNotFoundException: File does not exist:
> hdfs://ys1-1.c.pramod-thangali.internal:8020/hdp/apps/2.2.1.0-2048/mapreduce/mapreduce.tar.gz
> at org.apache.hadoop.fs.Hdfs.getFileStatus(Hdfs.java:137)
> at
> org.apache.hadoop.fs.AbstractFileSystem.resolvePath(AbstractFileSystem.java:460)
> at org.apache.hadoop.fs.FileContext$24.next(FileContext.java:2137)
> at org.apache.hadoop.fs.FileContext$24.next(FileContext.java:2133)
> at org.apache.hadoop.fs.FSLinkResolver.resolve(FSLinkResolver.java:90)
> at org.apache.hadoop.fs.FileContext.resolve(FileContext.java:2133)
> at org.apache.hadoop.fs.FileContext.resolvePath(FileContext.java:595)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.addMRFrameworkToDistributedCache(JobSubmitter.java:753)
> at
> org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:435)
> at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296)
> at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:415)
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628)
> at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293)
> at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:1314)
> at org.apache.hadoop.examples.WordCount.main(WordCount.java:87)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at
> org.apache.hadoop.util.ProgramDriver$ProgramDescription.invoke(ProgramDriver.java:71)
> at org.apache.hadoop.util.ProgramDriver.run(ProgramDriver.java:144)
> at org.apache.hadoop.examples.ExampleDriver.main(ExampleDriver.java:74)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606)
> at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
> at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
>
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)