Hi,
I am getting following error while starting the Zeppelin service from
ambari server .

/var/lib/ambari-agent/data/errors-2408.txt

Traceback (most recent call last):
  File 
"/var/lib/ambari-agent/cache/stacks/HDP/2.3/services/ZEPPELIN/package/scripts/master.py",
line 295, in <module>
    Master().execute()
  File 
"/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py",
line 216, in execute
    method(env)
  File 
"/var/lib/ambari-agent/cache/stacks/HDP/2.3/services/ZEPPELIN/package/scripts/master.py",
line 230, in start
    Execute (params.zeppelin_dir+'/bin/zeppelin-daemon.sh start >> ' +
params.zeppelin_log_file, user=params.zeppelin_user)
  File "/usr/lib/python2.6/site-packages/resource_management/core/base.py",
line 154, in __init__
    self.env.run()
  File 
"/usr/lib/python2.6/site-packages/resource_management/core/environment.py",
line 152, in run
    self.run_action(resource, action)
  File 
"/usr/lib/python2.6/site-packages/resource_management/core/environment.py",
line 118, in run_action
    provider_action()
  File 
"/usr/lib/python2.6/site-packages/resource_management/core/providers/system.py",
line 260, in action_run
    tries=self.resource.tries, try_sleep=self.resource.try_sleep)
  File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py",
line 70, in inner
    result = function(command, **kwargs)
  File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py",
line 92, in checked_call
    tries=tries, try_sleep=try_sleep)
  File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py",
line 140, in _call_wrapper
    result = _call(command, **kwargs_copy)
  File "/usr/lib/python2.6/site-packages/resource_management/core/shell.py",
line 290, in _call
    err_msg = Logger.filter_text(("Execution of '%s' returned %d. %s")
% (command_alias, code, all_output))
UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position
31: ordinal not in range(128)

stdout:   /var/lib/ambari-agent/data/output-2408.txt

2015-12-31 02:01:20,438 - Group['hadoop'] {}
2015-12-31 02:01:20,439 - Group['users'] {}
2015-12-31 02:01:20,439 - Group['zeppelin'] {}
2015-12-31 02:01:20,439 - Group['knox'] {}
2015-12-31 02:01:20,439 - Group['spark'] {}
2015-12-31 02:01:20,440 - User['hive'] {'gid': 'hadoop', 'groups': [u'hadoop']}
2015-12-31 02:01:20,440 - User['oozie'] {'gid': 'hadoop', 'groups': [u'users']}
2015-12-31 02:01:20,441 - User['zeppelin'] {'gid': 'hadoop', 'groups':
[u'hadoop']}
2015-12-31 02:01:20,441 - User['ambari-qa'] {'gid': 'hadoop',
'groups': [u'users']}
2015-12-31 02:01:20,442 - User['flume'] {'gid': 'hadoop', 'groups': [u'hadoop']}
2015-12-31 02:01:20,442 - User['hdfs'] {'gid': 'hadoop', 'groups': [u'hadoop']}
2015-12-31 02:01:20,443 - User['knox'] {'gid': 'hadoop', 'groups': [u'hadoop']}
2015-12-31 02:01:20,443 - User['spark'] {'gid': 'hadoop', 'groups': [u'hadoop']}
2015-12-31 02:01:20,444 - User['mapred'] {'gid': 'hadoop', 'groups':
[u'hadoop']}
2015-12-31 02:01:20,444 - User['tez'] {'gid': 'hadoop', 'groups': [u'users']}
2015-12-31 02:01:20,445 - User['zookeeper'] {'gid': 'hadoop',
'groups': [u'hadoop']}
2015-12-31 02:01:20,445 - User['sqoop'] {'gid': 'hadoop', 'groups': [u'hadoop']}
2015-12-31 02:01:20,446 - User['yarn'] {'gid': 'hadoop', 'groups': [u'hadoop']}
2015-12-31 02:01:20,446 - User['hcat'] {'gid': 'hadoop', 'groups': [u'hadoop']}
2015-12-31 02:01:20,447 - User['ams'] {'gid': 'hadoop', 'groups': [u'hadoop']}
2015-12-31 02:01:20,447 -
File['/var/lib/ambari-agent/tmp/changeUid.sh'] {'content':
StaticFile('changeToSecureUid.sh'), 'mode': 0555}
2015-12-31 02:01:20,448 -
Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa
/tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa']
{'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'}
2015-12-31 02:01:20,452 - Skipping
Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa
/tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa']
due to not_if
2015-12-31 02:01:20,453 - Group['hdfs'] {'ignore_failures': False}
2015-12-31 02:01:20,453 - User['hdfs'] {'ignore_failures': False,
'groups': [u'hadoop', u'hdfs']}
2015-12-31 02:01:20,453 - Directory['/etc/hadoop'] {'mode': 0755}
2015-12-31 02:01:20,465 -
File['/usr/hdp/current/hadoop-client/conf/hadoop-env.sh'] {'content':
InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'}
2015-12-31 02:01:20,466 -
Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir'] {'owner':
'hdfs', 'group': 'hadoop', 'mode': 0777}
2015-12-31 02:01:20,474 - Execute[('setenforce', '0')] {'not_if': '(!
which getenforce ) || (which getenforce && getenforce | grep -q
Disabled)', 'sudo': True, 'only_if': 'test -f /selinux/enforce'}
2015-12-31 02:01:20,482 - Skipping Execute[('setenforce', '0')] due to only_if
2015-12-31 02:01:20,482 - Directory['/var/log/hadoop'] {'owner':
'root', 'mode': 0775, 'group': 'hadoop', 'recursive': True,
'cd_access': 'a'}
2015-12-31 02:01:20,484 - Directory['/var/run/hadoop'] {'owner':
'root', 'group': 'root', 'recursive': True, 'cd_access': 'a'}
2015-12-31 02:01:20,484 - Directory['/tmp/hadoop-hdfs'] {'owner':
'hdfs', 'recursive': True, 'cd_access': 'a'}
2015-12-31 02:01:20,488 -
File['/usr/hdp/current/hadoop-client/conf/commons-logging.properties']
{'content': Template('commons-logging.properties.j2'), 'owner':
'hdfs'}
2015-12-31 02:01:20,489 -
File['/usr/hdp/current/hadoop-client/conf/health_check'] {'content':
Template('health_check.j2'), 'owner': 'hdfs'}
2015-12-31 02:01:20,490 -
File['/usr/hdp/current/hadoop-client/conf/log4j.properties']
{'content': ..., 'owner': 'hdfs', 'group': 'hadoop', 'mode': 0644}
2015-12-31 02:01:20,498 -
File['/usr/hdp/current/hadoop-client/conf/hadoop-metrics2.properties']
{'content': Template('hadoop-metrics2.properties.j2'), 'owner':
'hdfs'}
2015-12-31 02:01:20,499 -
File['/usr/hdp/current/hadoop-client/conf/task-log4j.properties']
{'content': StaticFile('task-log4j.properties'), 'mode': 0755}
2015-12-31 02:01:20,499 -
File['/usr/hdp/current/hadoop-client/conf/configuration.xsl']
{'owner': 'hdfs', 'group': 'hadoop'}
2015-12-31 02:01:20,503 -
File['/etc/hadoop/conf/topology_mappings.data'] {'owner': 'hdfs',
'content': Template('topology_mappings.data.j2'), 'only_if': 'test -d
/etc/hadoop/conf', 'group': 'hadoop'}
2015-12-31 02:01:20,506 - File['/etc/hadoop/conf/topology_script.py']
{'content': StaticFile('topology_script.py'), 'only_if': 'test -d
/etc/hadoop/conf', 'mode': 0755}
2015-12-31 02:01:20,683 - XmlConfig['zeppelin-site.xml'] {'owner':
'zeppelin', 'group': 'zeppelin', 'conf_dir':
'/opt/incubator-zeppelin/conf', 'configurations': ...}
2015-12-31 02:01:20,693 - Generating config:
/opt/incubator-zeppelin/conf/zeppelin-site.xml
2015-12-31 02:01:20,693 -
File['/opt/incubator-zeppelin/conf/zeppelin-site.xml'] {'owner':
'zeppelin', 'content': InlineTemplate(...), 'group': 'zeppelin',
'mode': None, 'encoding': 'UTF-8'}
2015-12-31 02:01:20,711 -
File['/opt/incubator-zeppelin/conf/zeppelin-env.sh'] {'owner':
'zeppelin', 'content': InlineTemplate(...), 'group': 'zeppelin'}
2015-12-31 02:01:20,713 -
Execute['/opt/incubator-zeppelin/bin/zeppelin-daemon.sh start >>
/var/log/zeppelin/zeppelin-setup.log'] {'user': 'zeppelin'}

Reply via email to