[04/24] ambari git commit: AMBARI-19032 HDFS Metric alerts turns to UNKNOWN state with error "'NoneType' object has no attribute 'split'" (dsen)

2016-12-02 Thread jonathanhurley
AMBARI-19032 HDFS Metric alerts turns to UNKNOWN state with error "'NoneType' 
object has no attribute 'split'" (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/406b245e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/406b245e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/406b245e

Branch: refs/heads/branch-feature-AMBARI-18456
Commit: 406b245ef9ed3a43fdf4d974dc1a7fc467c576a4
Parents: 63c0f2e
Author: Dmytro Sen 
Authored: Thu Dec 1 22:15:26 2016 +0200
Committer: Dmytro Sen 
Committed: Thu Dec 1 22:15:26 2016 +0200

--
 .../ambari_commons/ambari_metrics_helper.py | 45 +++-
 .../timeline/AbstractTimelineMetricsSink.java   |  6 +--
 .../AbstractTimelineMetricSinkTest.java | 10 ++---
 .../timeline/HadoopTimelineMetricsSink.java |  4 +-
 .../timeline/HadoopTimelineMetricsSinkTest.java |  6 +--
 .../src/main/python/core/config_reader.py   |  9 ++--
 .../src/test/python/core/TestEmitter.py |  2 +-
 .../1.6.1.2.2.0/package/scripts/params.py   |  2 +-
 .../0.1.0/package/scripts/params.py |  2 +-
 .../0.1.0/package/scripts/service_check.py  |  2 +-
 .../FLUME/1.4.0.2.0/package/scripts/params.py   |  2 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |  2 +-
 .../package/alerts/alert_metrics_deviation.py   |  2 +-
 .../KAFKA/0.8.1/package/scripts/params.py   |  2 +-
 .../STORM/0.9.1/package/scripts/params_linux.py |  2 +-
 .../2.0.6/hooks/before-START/scripts/params.py  |  2 +-
 .../2.1/hooks/before-START/scripts/params.py|  4 +-
 17 files changed, 51 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/406b245e/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
--
diff --git 
a/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py 
b/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
index 2eb0b6d..f6f4068 100644
--- a/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
+++ b/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
@@ -22,38 +22,41 @@ import os
 import random
 from resource_management.libraries.functions import conf_select
 
-DEFAULT_COLLECTOR_SUFFIX = '.sink.timeline.collector'
+DEFAULT_COLLECTOR_SUFFIX = '.sink.timeline.collector.hosts'
 DEFAULT_METRICS2_PROPERTIES_FILE_NAME = 'hadoop-metrics2.properties'
 
 def select_metric_collector_for_sink(sink_name):
-# TODO check '*' sink_name
+  # TODO check '*' sink_name
 
-all_collectors_string = 
get_metric_collectors_from_properties_file(sink_name)
+  all_collectors_string = get_metric_collectors_from_properties_file(sink_name)
+  if all_collectors_string:
 all_collectors_list = all_collectors_string.split(',')
 return select_metric_collector_hosts_from_hostnames(all_collectors_list)
+  else:
+return 'localhost'
 
 def select_metric_collector_hosts_from_hostnames(hosts):
-return get_random_host(hosts)
+  return get_random_host(hosts)
 
 def get_random_host(hosts):
-return random.choice(hosts)
+  return random.choice(hosts)
 
 def get_metric_collectors_from_properties_file(sink_name):
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-props = load_properties_from_file(os.path.join(hadoop_conf_dir, 
DEFAULT_METRICS2_PROPERTIES_FILE_NAME))
-return props.get(sink_name + DEFAULT_COLLECTOR_SUFFIX)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+  props = load_properties_from_file(os.path.join(hadoop_conf_dir, 
DEFAULT_METRICS2_PROPERTIES_FILE_NAME))
+  return props.get(sink_name + DEFAULT_COLLECTOR_SUFFIX)
 
 def load_properties_from_file(filepath, sep='=', comment_char='#'):
-"""
-Read the file passed as parameter as a properties file.
-"""
-props = {}
-with open(filepath, "rt") as f:
-for line in f:
-l = line.strip()
-if l and not l.startswith(comment_char):
-key_value = l.split(sep)
-key = key_value[0].strip()
-value = sep.join(key_value[1:]).strip('" \t')
-props[key] = value
-return props
+  """
+  Read the file passed as parameter as a properties file.
+  """
+  props = {}
+  with open(filepath, "rt") as f:
+for line in f:
+l = line.strip()
+if l and not l.startswith(comment_char):
+  key_value = l.split(sep)
+  key = key_value[0].strip()
+  value = sep.join(key_value[1:]).strip('" \t')
+  props[key] = value
+  return props

http://git-wip-us.apache.org/repos/asf/ambari/blob/406b245e/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java

[31/50] [abbrv] ambari git commit: AMBARI-19032 HDFS Metric alerts turns to UNKNOWN state with error "'NoneType' object has no attribute 'split'" (dsen)

2016-12-02 Thread ncole
AMBARI-19032 HDFS Metric alerts turns to UNKNOWN state with error "'NoneType' 
object has no attribute 'split'" (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/406b245e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/406b245e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/406b245e

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 406b245ef9ed3a43fdf4d974dc1a7fc467c576a4
Parents: 63c0f2e
Author: Dmytro Sen 
Authored: Thu Dec 1 22:15:26 2016 +0200
Committer: Dmytro Sen 
Committed: Thu Dec 1 22:15:26 2016 +0200

--
 .../ambari_commons/ambari_metrics_helper.py | 45 +++-
 .../timeline/AbstractTimelineMetricsSink.java   |  6 +--
 .../AbstractTimelineMetricSinkTest.java | 10 ++---
 .../timeline/HadoopTimelineMetricsSink.java |  4 +-
 .../timeline/HadoopTimelineMetricsSinkTest.java |  6 +--
 .../src/main/python/core/config_reader.py   |  9 ++--
 .../src/test/python/core/TestEmitter.py |  2 +-
 .../1.6.1.2.2.0/package/scripts/params.py   |  2 +-
 .../0.1.0/package/scripts/params.py |  2 +-
 .../0.1.0/package/scripts/service_check.py  |  2 +-
 .../FLUME/1.4.0.2.0/package/scripts/params.py   |  2 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |  2 +-
 .../package/alerts/alert_metrics_deviation.py   |  2 +-
 .../KAFKA/0.8.1/package/scripts/params.py   |  2 +-
 .../STORM/0.9.1/package/scripts/params_linux.py |  2 +-
 .../2.0.6/hooks/before-START/scripts/params.py  |  2 +-
 .../2.1/hooks/before-START/scripts/params.py|  4 +-
 17 files changed, 51 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/406b245e/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
--
diff --git 
a/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py 
b/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
index 2eb0b6d..f6f4068 100644
--- a/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
+++ b/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
@@ -22,38 +22,41 @@ import os
 import random
 from resource_management.libraries.functions import conf_select
 
-DEFAULT_COLLECTOR_SUFFIX = '.sink.timeline.collector'
+DEFAULT_COLLECTOR_SUFFIX = '.sink.timeline.collector.hosts'
 DEFAULT_METRICS2_PROPERTIES_FILE_NAME = 'hadoop-metrics2.properties'
 
 def select_metric_collector_for_sink(sink_name):
-# TODO check '*' sink_name
+  # TODO check '*' sink_name
 
-all_collectors_string = 
get_metric_collectors_from_properties_file(sink_name)
+  all_collectors_string = get_metric_collectors_from_properties_file(sink_name)
+  if all_collectors_string:
 all_collectors_list = all_collectors_string.split(',')
 return select_metric_collector_hosts_from_hostnames(all_collectors_list)
+  else:
+return 'localhost'
 
 def select_metric_collector_hosts_from_hostnames(hosts):
-return get_random_host(hosts)
+  return get_random_host(hosts)
 
 def get_random_host(hosts):
-return random.choice(hosts)
+  return random.choice(hosts)
 
 def get_metric_collectors_from_properties_file(sink_name):
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-props = load_properties_from_file(os.path.join(hadoop_conf_dir, 
DEFAULT_METRICS2_PROPERTIES_FILE_NAME))
-return props.get(sink_name + DEFAULT_COLLECTOR_SUFFIX)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+  props = load_properties_from_file(os.path.join(hadoop_conf_dir, 
DEFAULT_METRICS2_PROPERTIES_FILE_NAME))
+  return props.get(sink_name + DEFAULT_COLLECTOR_SUFFIX)
 
 def load_properties_from_file(filepath, sep='=', comment_char='#'):
-"""
-Read the file passed as parameter as a properties file.
-"""
-props = {}
-with open(filepath, "rt") as f:
-for line in f:
-l = line.strip()
-if l and not l.startswith(comment_char):
-key_value = l.split(sep)
-key = key_value[0].strip()
-value = sep.join(key_value[1:]).strip('" \t')
-props[key] = value
-return props
+  """
+  Read the file passed as parameter as a properties file.
+  """
+  props = {}
+  with open(filepath, "rt") as f:
+for line in f:
+l = line.strip()
+if l and not l.startswith(comment_char):
+  key_value = l.split(sep)
+  key = key_value[0].strip()
+  value = sep.join(key_value[1:]).strip('" \t')
+  props[key] = value
+  return props

http://git-wip-us.apache.org/repos/asf/ambari/blob/406b245e/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java

ambari git commit: AMBARI-19032 HDFS Metric alerts turns to UNKNOWN state with error "'NoneType' object has no attribute 'split'" (dsen)

2016-12-01 Thread dsen
Repository: ambari
Updated Branches:
  refs/heads/trunk 63c0f2e58 -> 406b245ef


AMBARI-19032 HDFS Metric alerts turns to UNKNOWN state with error "'NoneType' 
object has no attribute 'split'" (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/406b245e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/406b245e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/406b245e

Branch: refs/heads/trunk
Commit: 406b245ef9ed3a43fdf4d974dc1a7fc467c576a4
Parents: 63c0f2e
Author: Dmytro Sen 
Authored: Thu Dec 1 22:15:26 2016 +0200
Committer: Dmytro Sen 
Committed: Thu Dec 1 22:15:26 2016 +0200

--
 .../ambari_commons/ambari_metrics_helper.py | 45 +++-
 .../timeline/AbstractTimelineMetricsSink.java   |  6 +--
 .../AbstractTimelineMetricSinkTest.java | 10 ++---
 .../timeline/HadoopTimelineMetricsSink.java |  4 +-
 .../timeline/HadoopTimelineMetricsSinkTest.java |  6 +--
 .../src/main/python/core/config_reader.py   |  9 ++--
 .../src/test/python/core/TestEmitter.py |  2 +-
 .../1.6.1.2.2.0/package/scripts/params.py   |  2 +-
 .../0.1.0/package/scripts/params.py |  2 +-
 .../0.1.0/package/scripts/service_check.py  |  2 +-
 .../FLUME/1.4.0.2.0/package/scripts/params.py   |  2 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |  2 +-
 .../package/alerts/alert_metrics_deviation.py   |  2 +-
 .../KAFKA/0.8.1/package/scripts/params.py   |  2 +-
 .../STORM/0.9.1/package/scripts/params_linux.py |  2 +-
 .../2.0.6/hooks/before-START/scripts/params.py  |  2 +-
 .../2.1/hooks/before-START/scripts/params.py|  4 +-
 17 files changed, 51 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/406b245e/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
--
diff --git 
a/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py 
b/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
index 2eb0b6d..f6f4068 100644
--- a/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
+++ b/ambari-common/src/main/python/ambari_commons/ambari_metrics_helper.py
@@ -22,38 +22,41 @@ import os
 import random
 from resource_management.libraries.functions import conf_select
 
-DEFAULT_COLLECTOR_SUFFIX = '.sink.timeline.collector'
+DEFAULT_COLLECTOR_SUFFIX = '.sink.timeline.collector.hosts'
 DEFAULT_METRICS2_PROPERTIES_FILE_NAME = 'hadoop-metrics2.properties'
 
 def select_metric_collector_for_sink(sink_name):
-# TODO check '*' sink_name
+  # TODO check '*' sink_name
 
-all_collectors_string = 
get_metric_collectors_from_properties_file(sink_name)
+  all_collectors_string = get_metric_collectors_from_properties_file(sink_name)
+  if all_collectors_string:
 all_collectors_list = all_collectors_string.split(',')
 return select_metric_collector_hosts_from_hostnames(all_collectors_list)
+  else:
+return 'localhost'
 
 def select_metric_collector_hosts_from_hostnames(hosts):
-return get_random_host(hosts)
+  return get_random_host(hosts)
 
 def get_random_host(hosts):
-return random.choice(hosts)
+  return random.choice(hosts)
 
 def get_metric_collectors_from_properties_file(sink_name):
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-props = load_properties_from_file(os.path.join(hadoop_conf_dir, 
DEFAULT_METRICS2_PROPERTIES_FILE_NAME))
-return props.get(sink_name + DEFAULT_COLLECTOR_SUFFIX)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+  props = load_properties_from_file(os.path.join(hadoop_conf_dir, 
DEFAULT_METRICS2_PROPERTIES_FILE_NAME))
+  return props.get(sink_name + DEFAULT_COLLECTOR_SUFFIX)
 
 def load_properties_from_file(filepath, sep='=', comment_char='#'):
-"""
-Read the file passed as parameter as a properties file.
-"""
-props = {}
-with open(filepath, "rt") as f:
-for line in f:
-l = line.strip()
-if l and not l.startswith(comment_char):
-key_value = l.split(sep)
-key = key_value[0].strip()
-value = sep.join(key_value[1:]).strip('" \t')
-props[key] = value
-return props
+  """
+  Read the file passed as parameter as a properties file.
+  """
+  props = {}
+  with open(filepath, "rt") as f:
+for line in f:
+l = line.strip()
+if l and not l.startswith(comment_char):
+  key_value = l.split(sep)
+  key = key_value[0].strip()
+  value = sep.join(key_value[1:]).strip('" \t')
+  props[key] = value
+  return props