ambari git commit: AMBARI-9161. Disable per Region metrics for the Metrics System. Missing metric definition. (swagle)

2015-01-15 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk ad0d0b094 -> 43f227ac7


AMBARI-9161. Disable per Region metrics for the Metrics System. Missing metric 
definition. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/43f227ac
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/43f227ac
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/43f227ac

Branch: refs/heads/trunk
Commit: 43f227ac7e01f6aac5b3d4b93bf02fb25f6a2240
Parents: ad0d0b0
Author: Siddharth Wagle 
Authored: Thu Jan 15 16:46:16 2015 -0800
Committer: Siddharth Wagle 
Committed: Thu Jan 15 16:46:49 2015 -0800

--
 .../resources/common-services/HBASE/0.96.0.2.0/metrics.json | 5 +
 1 file changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/43f227ac/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/metrics.json
--
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/metrics.json
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/metrics.json
index 50f9c42..870aa38 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/metrics.json
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/metrics.json
@@ -819,6 +819,11 @@
 "pointInTime": true,
 "temporal": true
   },
+  "metrics/hbase/regionserver/blockCacheHitPercent": {
+"metric": "regionserver.Server.blockCountHitPercent",
+"pointInTime": true,
+"temporal": true
+  },
   "metrics/rpc/createTable_avg_time": {
 "metric": "rpc.rpc.createTable_avg_time",
 "pointInTime": true,



ambari git commit: AMBARI-9161. Disable per Region metrics for the Metrics System. (swagle)

2015-01-15 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 265f13f46 -> a10d56f0b


AMBARI-9161. Disable per Region metrics for the Metrics System. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a10d56f0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a10d56f0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a10d56f0

Branch: refs/heads/trunk
Commit: a10d56f0b2a14b11d97e835ed81bedc4b361eb17
Parents: 265f13f
Author: Siddharth Wagle 
Authored: Thu Jan 15 14:14:21 2015 -0800
Committer: Siddharth Wagle 
Committed: Thu Jan 15 14:14:21 2015 -0800

--
 .../common-services/AMS/0.1.0/configuration/ams-hbase-env.xml| 4 
 .../0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2  | 4 
 2 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/a10d56f0/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-env.xml
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-env.xml
 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-env.xml
index f18a0df..d42596a 100644
--- 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-env.xml
+++ 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-env.xml
@@ -74,10 +74,6 @@ export HBASE_CONF_DIR=${HBASE_CONF_DIR:-{{hbase_conf_dir}}}
 # Extra Java CLASSPATH elements. Optional.
 export HBASE_CLASSPATH=${HBASE_CLASSPATH}
 
-if [ -f "/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar" 
]; then
-  export 
HBASE_CLASSPATH=${HBASE_CLASSPATH}:/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
-fi
-
 # The maximum amount of heap to use, in MB. Default is 1000.
 export HBASE_HEAPSIZE={{hbase_heapsize}}
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a10d56f0/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
index 13b0948..91fe843 100644
--- 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
+++ 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
@@ -55,3 +55,7 @@ 
hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelin
 hbase.sink.timeline.period=10
 
hbase.sink.timeline.collector={{ams_collector_host_single}}:{{metric_collector_port}}
 hbase.sink.timeline.serviceName-prefix=ams
+
+# Switch off metrics generation on a per region basis
+*.source.filter.class=org.apache.hadoop.metrics2.filter.GlobFilter
+hbase.*.source.filter.exclude=*Regions*
\ No newline at end of file



[2/3] ambari git commit: AMBARI-9113. Metric Collector can not to start on secure cluster.

2015-01-13 Thread swagle
AMBARI-9113. Metric Collector can not to start on secure cluster.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a4af6ca4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a4af6ca4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a4af6ca4

Branch: refs/heads/trunk
Commit: a4af6ca4d943ee739b15196a502cd6e3bfdab43f
Parents: e904eb7
Author: Siddharth Wagle 
Authored: Tue Jan 13 16:22:51 2015 -0800
Committer: Siddharth Wagle 
Committed: Tue Jan 13 18:06:58 2015 -0800

--
 .../AMS/0.1.0/configuration/ams-hbase-env.xml   |  9 +
 .../common-services/AMS/0.1.0/package/scripts/ams.py| 12 
 2 files changed, 21 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/a4af6ca4/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-env.xml
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-env.xml
 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-env.xml
index bc3f516..f18a0df 100644
--- 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-env.xml
+++ 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-env.xml
@@ -131,6 +131,15 @@ export HBASE_OPTS="$HBASE_OPTS 
-Djava.security.auth.login.config={{client_jaas_c
 export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS 
-Djava.security.auth.login.config={{master_jaas_config_file}}"
 export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS 
-Djava.security.auth.login.config={{regionserver_jaas_config_file}}"
 {% endif %}
+
+#Specify native libraries of installed Hadoop paltform
+_HADOOP_IN_PATH=$(PATH="${HADOOP_HOME:-${HADOOP_PREFIX}}/bin:$PATH" which 
hadoop 2>/dev/null)
+_HADOOP_CLASSPATH="/usr/lib/ams-hbase/lib/*"
+_HADOOP_JAVA_LIBRARY_PATH=$(HADOOP_CLASSPATH="$_HADOOP_CLASSPATH" 
${_HADOOP_IN_PATH} org.apache.hadoop.hbase.util.GetJavaProperty 
java.library.path)
+export HBASE_OPTS="$HBASE_OPTS 
-Djava.library.path=${_HADOOP_JAVA_LIBRARY_PATH}"
+
+#"Unsetting" HADOOP_HOME to avoid importing HADOOP installed cluster related 
configs like: /usr/hdp/2.2.0.0-2041/hadoop/conf/ 
+export HADOOP_HOME=`pwd`
 
   
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/a4af6ca4/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams.py
 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams.py
index 9aa9484..81732b8 100644
--- 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams.py
+++ 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/scripts/ams.py
@@ -87,6 +87,18 @@ def ams(name=None):
   recursive=True
 )
 
+Directory(params.ams_monitor_log_dir,
+  owner=params.ams_user,
+  group=params.user_group,
+  recursive=True
+)
+
+Directory(params.ams_monitor_pid_dir,
+  owner=params.ams_user,
+  group=params.user_group,
+  recursive=True
+)
+
 TemplateConfig(
   format("{ams_monitor_conf_dir}/metric_monitor.ini"),
   owner=params.ams_user,



[1/3] ambari git commit: AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. Confusing property description. (swagle)

2015-01-13 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk f0f0c7b4b -> 572251803


AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. Confusing 
property description. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/57225180
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/57225180
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/57225180

Branch: refs/heads/trunk
Commit: 572251803f6fe25262234bf72a5dc5512eb3aa74
Parents: a4af6ca
Author: Siddharth Wagle 
Authored: Tue Jan 13 16:29:05 2015 -0800
Committer: Siddharth Wagle 
Committed: Tue Jan 13 18:06:58 2015 -0800

--
 .../common-services/AMS/0.1.0/configuration/ams-hbase-site.xml   | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/57225180/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-site.xml
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-site.xml
 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-site.xml
index a8e2cb0..8de4f30 100644
--- 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-site.xml
+++ 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-site.xml
@@ -27,9 +27,7 @@
 
   AMS service uses HBase as default storage backend. Set the rootdir for
   HBase to either local filesystem path if using AMS in embedded mode or
-  to a HDFS dir, example: hdfs://namenode.example.org:9000/hbase.  By
-  default HBase writes into /tmp. Change this configuration else all data
-  will be lost on machine restart.
+  to a HDFS dir, example: hdfs://namenode.example.org:9000/hbase.
 
   
   



[3/3] ambari git commit: AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. (swagle)

2015-01-13 Thread swagle
AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e904eb7f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e904eb7f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e904eb7f

Branch: refs/heads/trunk
Commit: e904eb7fa1bb8260bd89fed62cf9bd042f2c0aa3
Parents: f0f0c7b
Author: Siddharth Wagle 
Authored: Tue Jan 13 16:22:29 2015 -0800
Committer: Siddharth Wagle 
Committed: Tue Jan 13 18:06:58 2015 -0800

--
 .../conf/unix/ambari-metrics-monitor|  18 +++
 .../conf/unix/ambari-metrics-collector  |  16 ++-
 .../AMS/0.1.0/configuration/ams-env.xml |  32 -
 .../AMS/0.1.0/configuration/ams-hbase-env.xml   |   4 +-
 .../AMS/0.1.0/configuration/ams-log4j.xml   |   5 +-
 .../AMS/0.1.0/package/scripts/ams.py|  17 +++
 .../AMS/0.1.0/package/scripts/ams_service.py|   3 -
 .../AMS/0.1.0/package/scripts/params.py |  46 +++---
 .../AMS/0.1.0/package/scripts/status_params.py  |   4 +-
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |  19 +++
 .../stacks/HDP/2.2/services/stack_advisor.py|   7 +-
 ambari-web/app/data/HDP2/site_properties.js | 142 ++-
 ambari-web/app/models/stack_service.js  |   4 +-
 13 files changed, 240 insertions(+), 77 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/e904eb7f/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
 
b/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
index 5779b5b..aaa77c4 100644
--- 
a/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
@@ -102,6 +102,24 @@ while [[ -z "${_ams_configs_done}" ]]; do
   esac
 done
 
+#execute ams-env.sh
+if [[ -f "${MONITOR_CONF_DIR}/ams-env.sh" ]]; then
+  . "${MONITOR_CONF_DIR}/ams-env.sh"
+else
+  echo "ERROR: Cannot execute ${MONITOR_CONF_DIR}/ams-env.sh." 2>&1
+  exit 1
+fi
+
+# Set log directory path
+if [[ -n "${AMS_MONITOR_LOG_DIR}" ]]; then
+  OUTFILE=${AMS_MONITOR_LOG_DIR}/ambari-metrics-monitor.out
+fi
+
+# Set pid directory path
+if [[ -n "${AMS_MONITOR_PID_DIR}" ]]; then
+  PIDFILE=${AMS_MONITOR_PID_DIR}/ambari-metrics-monitor.pid
+fi
+
 case "$1" in
 
   start)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e904eb7f/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
 
b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
index 9aabbdc..c98fea2 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
@@ -31,6 +31,8 @@ HBASE_CONF_DIR=/etc/ams-hbase/conf
 
 METRIC_COLLECTOR=ambari-metrics-collector
 
+AMS_LOG_DIR=/var/log/ambari-metrics-collector
+
 STOP_TIMEOUT=5
 
 function hbase_daemon
@@ -160,7 +162,7 @@ while [[ -z "${_ams_configs_done}" ]]; do
   esac
 done
 
-#execute ams-env.sh
+# execute ams-env.sh
 if [[ -f "${COLLECTOR_CONF_DIR}/ams-env.sh" ]]; then
   . "${COLLECTOR_CONF_DIR}/ams-env.sh"
 else
@@ -168,6 +170,16 @@ else
   exit 1
 fi
 
+# set pid dir path
+if [[ -n "${AMS_PID_DIR}" ]]; then
+  PIDFILE=${AMS_PID_DIR}/ambari-metrics-collector.pid
+fi
+
+# set out file path
+if [[ -n "${AMS_COLLECTOR_LOG_DIR}" ]]; then
+  OUTFILE=${AMS_COLLECTOR_LOG_DIR}/ambari-metrics-collector.out
+fi
+
 #TODO manage 3 hbase daemons for start/stop/status
 case "$1" in
 
@@ -200,7 +212,7 @@ case "$1" in
 rm -f "${PIDFILE}" >/dev/null 2>&1
 fi
 
-nohup "${JAVA}" "-cp" 
"/usr/lib/ambari-metrics-collector/*:${COLLECTOR_CONF_DIR}" 
"-Djava.net.preferIPv4Stack=true" "-Dproc_${DAEMON_NAME}" "${CLASS}" "$@" > 
$OUTFILE 2>&1 &
+nohup "${JAVA}" "-cp" 
"/usr/lib/ambari-metrics-collector/*:${COLLECTOR_CONF_DIR}" 
"-Djava.net.preferIPv4Stack=true" "-Dams.log.dir=${AMS_COLLECTOR_LOG_DIR}" 
"-Dproc_${DAEMON_NAME}" "${CLASS}" "$@" > $OUTFILE 2>&1 &
 PID=$!
 write_pidfile "${PIDFILE}"
 sleep

[1/3] ambari git commit: Revert "AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. Log dir fix. (swagle)"

2015-01-07 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 78efd0483 -> 789b6544d


Revert "AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. 
Log dir fix. (swagle)"

This reverts commit 78efd0483f56026059f858b0eedaa8ba5db24e50.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fc7394bf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fc7394bf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fc7394bf

Branch: refs/heads/trunk
Commit: fc7394bf99fa963e5a1e3a205d4db1fe7e80374b
Parents: 78efd04
Author: Siddharth Wagle 
Authored: Wed Jan 7 18:42:26 2015 -0800
Committer: Siddharth Wagle 
Committed: Wed Jan 7 18:42:26 2015 -0800

--
 .../conf/unix/ambari-metrics-collector   | 2 +-
 .../common-services/AMS/0.1.0/configuration/ams-env.xml  | 8 
 2 files changed, 5 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/fc7394bf/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
 
b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
index 714fa94..aabbde8 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
@@ -31,7 +31,7 @@ HBASE_CONF_DIR=/etc/ams-hbase/conf
 
 METRIC_COLLECTOR=ambari-metrics-collector
 
-AMS_LOG_DIR=/var/log/ambari-metrics-collector
+AMS_LOG_DIR=/var/log/ambari-metric-collector
 
 STOP_TIMEOUT=5
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fc7394bf/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
index 18c4b61..c2f3d49 100644
--- 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
+++ 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
@@ -27,22 +27,22 @@
   
   
 ams_collector_log_dir
-/var/log/ambari-metrics-collector
+/var/log/ambari-metric-collector
 Collector log directory.
   
   
 ams_collector_pid_dir
-/var/run/ambari-metrics-collector
+/var/run/ambari-metric-collector
 Collector pid directory.
   
   
 ams_monitor_pid_dir
-/var/run/ambari-metrics-monitor
+/var/run/ambari-metric-monitor
 Monitor pid directory.
   
   
 ams_monitor_log_dir
-/var/log/ambari-metrics-monitor
+/var/log/ambari-metric-monitor
 Monitor log directory.
   
 



[3/3] ambari git commit: Revert "AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. Issues with monitor start. (swagle)"

2015-01-07 Thread swagle
Revert "AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. 
Issues with monitor start. (swagle)"

This reverts commit d111c8ee157e295cffcf63ba07736f827dfcb007.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/789b6544
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/789b6544
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/789b6544

Branch: refs/heads/trunk
Commit: 789b6544d91d1344142c88fa709a67b214aa3d3a
Parents: af38b56
Author: Siddharth Wagle 
Authored: Wed Jan 7 18:48:37 2015 -0800
Committer: Siddharth Wagle 
Committed: Wed Jan 7 18:48:37 2015 -0800

--
 .../conf/unix/ambari-metrics-monitor|  13 --
 .../conf/unix/ambari-metrics-collector  |   4 +-
 .../AMS/0.1.0/configuration/ams-env.xml |  26 +---
 .../AMS/0.1.0/configuration/ams-hbase-env.xml   |   4 +-
 .../AMS/0.1.0/configuration/ams-log4j.xml   |   5 +-
 .../AMS/0.1.0/package/scripts/ams.py|   5 -
 .../AMS/0.1.0/package/scripts/params.py |   2 -
 .../AMS/0.1.0/package/scripts/status_params.py  |   4 +-
 ambari-web/app/data/HDP2/site_properties.js | 140 +--
 ambari-web/app/models/stack_service.js  |   4 +-
 10 files changed, 46 insertions(+), 161 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/789b6544/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
 
b/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
index 319aa36..5779b5b 100644
--- 
a/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
@@ -102,19 +102,6 @@ while [[ -z "${_ams_configs_done}" ]]; do
   esac
 done
 
-#execute ams-env.sh
-if [[ -f "${MONITOR_CONF_DIR}/ams-env.sh" ]]; then
-  . "${MONITOR_CONF_DIR}/ams-env.sh"
-else
-  echo "ERROR: Cannot execute ${MONITOR_CONF_DIR}/ams-env.sh." 2>&1
-  exit 1
-fi
-
-# Set log directory path
-if [[ -n "${AMS_MONITOR_LOG_DIR}" ]]; then
-  OUTFILE=${AMS_MONITOR_LOG_DIR}/ambari-metrics-monitor.out
-fi
-
 case "$1" in
 
   start)

http://git-wip-us.apache.org/repos/asf/ambari/blob/789b6544/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
 
b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
index aabbde8..9aabbdc 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
@@ -31,8 +31,6 @@ HBASE_CONF_DIR=/etc/ams-hbase/conf
 
 METRIC_COLLECTOR=ambari-metrics-collector
 
-AMS_LOG_DIR=/var/log/ambari-metric-collector
-
 STOP_TIMEOUT=5
 
 function hbase_daemon
@@ -202,7 +200,7 @@ case "$1" in
 rm -f "${PIDFILE}" >/dev/null 2>&1
 fi
 
-nohup "${JAVA}" "-cp" 
"/usr/lib/ambari-metrics-collector/*:${COLLECTOR_CONF_DIR}" 
"-Djava.net.preferIPv4Stack=true" "-Dams.log.dir=${AMS_COLLECTOR_LOG_DIR}" 
"-Dproc_${DAEMON_NAME}" "${CLASS}" "$@" > $OUTFILE 2>&1 &
+nohup "${JAVA}" "-cp" 
"/usr/lib/ambari-metrics-collector/*:${COLLECTOR_CONF_DIR}" 
"-Djava.net.preferIPv4Stack=true" "-Dproc_${DAEMON_NAME}" "${CLASS}" "$@" > 
$OUTFILE 2>&1 &
 PID=$!
 write_pidfile "${PIDFILE}"
 sleep 2

http://git-wip-us.apache.org/repos/asf/ambari/blob/789b6544/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
index c2f3d49..fda1df0 100644
--- 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
+++ 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
@@ -25,26 +25,6 @@
 USER
 AMS User Name.
   
-  
-ams_collector_log_dir
-/var/log/ambari-metric-collector
-Collector log directory.
-  
-  
-ams_collector_pid_dir
-/var/run/ambari-metric-collector
-Collector pid d

[2/3] ambari git commit: Revert "AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. Edit descriptions. (swagle)"

2015-01-07 Thread swagle
Revert "AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. 
Edit descriptions. (swagle)"

This reverts commit 07a3078cc46fd31bdb58aee22a91f2c2ff3007d8.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/af38b560
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/af38b560
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/af38b560

Branch: refs/heads/trunk
Commit: af38b560f569d4260d13a4e5a89ac05c4dd12ca8
Parents: fc7394b
Author: Siddharth Wagle 
Authored: Wed Jan 7 18:42:51 2015 -0800
Committer: Siddharth Wagle 
Committed: Wed Jan 7 18:42:51 2015 -0800

--
 ambari-web/app/data/HDP2/site_properties.js | 12 ++--
 1 file changed, 6 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/af38b560/ambari-web/app/data/HDP2/site_properties.js
--
diff --git a/ambari-web/app/data/HDP2/site_properties.js 
b/ambari-web/app/data/HDP2/site_properties.js
index 99797cc..b6cb946 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -4410,7 +4410,7 @@ module.exports =
   "id": "site property",
   "name": "ams_collector_log_dir",
   "displayName": "Metrics Collector log dir",
-  "description":  "\n  Log location for collector logs\n",
+  "description":  "\n  Log location for collector logs.\n",
   "defaultValue": "embedded",
   "displayType": "string",
   "serviceName": "AMS",
@@ -4422,7 +4422,7 @@ module.exports =
   "id": "site property",
   "name": "ams_collector_pid_dir",
   "displayName": "Metrics Collector pid dir",
-  "description":  "\n  pid location for collector\n",
+  "description":  "\n  pid location for collector.\n",
   "defaultValue": "embedded",
   "displayType": "string",
   "serviceName": "AMS",
@@ -4434,7 +4434,7 @@ module.exports =
   "id": "site property",
   "name": "ams_monitor_log_dir",
   "displayName": "Metrics Monitor log dir",
-  "description":  "\n  Log location for monitor logs\n",
+  "description":  "\n  Log location for monitor logs.\n",
   "defaultValue": "embedded",
   "displayType": "string",
   "serviceName": "AMS",
@@ -4446,7 +4446,7 @@ module.exports =
   "id": "site property",
   "name": "ams_monitor_pid_dir",
   "displayName": "Metrics Monitor pid dir",
-  "description":  "\n  pid location for monitor\n",
+  "description":  "\n  pid location for monitor.\n",
   "defaultValue": "embedded",
   "displayType": "string",
   "serviceName": "AMS",
@@ -4458,7 +4458,7 @@ module.exports =
   "id": "site property",
   "name" : "timeline.metrics.aggregator.checkpoint.dir",
   "displayName": "Aggregator checkpoint directory",
-  "description":  "\n  Directory to store aggregator checkpoints\n
",
+  "description":  "\n  Directory to store aggregator checkpoints.\n
",
   "defaultValue": "/tmp",
   "displayType": "directory",
   "serviceName": "AMS",
@@ -4566,7 +4566,7 @@ module.exports =
   "id": "site property",
   "name": "timeline.metrics.host.aggregator.hourly.disabled",
   "displayName": "Disable Hourly host aggregator",
-  "description":  "\n  Disable host based hourly aggregations\n",
+  "description":  "\n  Disable host based hourly aggregations.\n",
   "defaultValue": "false",
   "displayType": "string",
   "serviceName": "AMS",



ambari git commit: AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. Log dir fix. (swagle)

2015-01-07 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 6b1eef55e -> 78efd0483


AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. Log dir 
fix. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/78efd048
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/78efd048
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/78efd048

Branch: refs/heads/trunk
Commit: 78efd0483f56026059f858b0eedaa8ba5db24e50
Parents: 6b1eef5
Author: Siddharth Wagle 
Authored: Wed Jan 7 18:33:42 2015 -0800
Committer: Siddharth Wagle 
Committed: Wed Jan 7 18:33:42 2015 -0800

--
 .../conf/unix/ambari-metrics-collector   | 2 +-
 .../common-services/AMS/0.1.0/configuration/ams-env.xml  | 8 
 2 files changed, 5 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/78efd048/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
 
b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
index aabbde8..714fa94 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
@@ -31,7 +31,7 @@ HBASE_CONF_DIR=/etc/ams-hbase/conf
 
 METRIC_COLLECTOR=ambari-metrics-collector
 
-AMS_LOG_DIR=/var/log/ambari-metric-collector
+AMS_LOG_DIR=/var/log/ambari-metrics-collector
 
 STOP_TIMEOUT=5
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/78efd048/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
index c2f3d49..18c4b61 100644
--- 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
+++ 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
@@ -27,22 +27,22 @@
   
   
 ams_collector_log_dir
-/var/log/ambari-metric-collector
+/var/log/ambari-metrics-collector
 Collector log directory.
   
   
 ams_collector_pid_dir
-/var/run/ambari-metric-collector
+/var/run/ambari-metrics-collector
 Collector pid directory.
   
   
 ams_monitor_pid_dir
-/var/run/ambari-metric-monitor
+/var/run/ambari-metrics-monitor
 Monitor pid directory.
   
   
 ams_monitor_log_dir
-/var/log/ambari-metric-monitor
+/var/log/ambari-metrics-monitor
 Monitor log directory.
   
 



[2/2] ambari git commit: AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. Edit descriptions. (swagle)

2015-01-07 Thread swagle
AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. Edit 
descriptions. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/07a3078c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/07a3078c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/07a3078c

Branch: refs/heads/trunk
Commit: 07a3078cc46fd31bdb58aee22a91f2c2ff3007d8
Parents: d111c8e
Author: Siddharth Wagle 
Authored: Wed Jan 7 15:32:38 2015 -0800
Committer: Siddharth Wagle 
Committed: Wed Jan 7 15:32:38 2015 -0800

--
 ambari-web/app/data/HDP2/site_properties.js | 12 ++--
 1 file changed, 6 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/07a3078c/ambari-web/app/data/HDP2/site_properties.js
--
diff --git a/ambari-web/app/data/HDP2/site_properties.js 
b/ambari-web/app/data/HDP2/site_properties.js
index b6cb946..99797cc 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -4410,7 +4410,7 @@ module.exports =
   "id": "site property",
   "name": "ams_collector_log_dir",
   "displayName": "Metrics Collector log dir",
-  "description":  "\n  Log location for collector logs.\n",
+  "description":  "\n  Log location for collector logs\n",
   "defaultValue": "embedded",
   "displayType": "string",
   "serviceName": "AMS",
@@ -4422,7 +4422,7 @@ module.exports =
   "id": "site property",
   "name": "ams_collector_pid_dir",
   "displayName": "Metrics Collector pid dir",
-  "description":  "\n  pid location for collector.\n",
+  "description":  "\n  pid location for collector\n",
   "defaultValue": "embedded",
   "displayType": "string",
   "serviceName": "AMS",
@@ -4434,7 +4434,7 @@ module.exports =
   "id": "site property",
   "name": "ams_monitor_log_dir",
   "displayName": "Metrics Monitor log dir",
-  "description":  "\n  Log location for monitor logs.\n",
+  "description":  "\n  Log location for monitor logs\n",
   "defaultValue": "embedded",
   "displayType": "string",
   "serviceName": "AMS",
@@ -4446,7 +4446,7 @@ module.exports =
   "id": "site property",
   "name": "ams_monitor_pid_dir",
   "displayName": "Metrics Monitor pid dir",
-  "description":  "\n  pid location for monitor.\n",
+  "description":  "\n  pid location for monitor\n",
   "defaultValue": "embedded",
   "displayType": "string",
   "serviceName": "AMS",
@@ -4458,7 +4458,7 @@ module.exports =
   "id": "site property",
   "name" : "timeline.metrics.aggregator.checkpoint.dir",
   "displayName": "Aggregator checkpoint directory",
-  "description":  "\n  Directory to store aggregator checkpoints.\n
",
+  "description":  "\n  Directory to store aggregator checkpoints\n
",
   "defaultValue": "/tmp",
   "displayType": "directory",
   "serviceName": "AMS",
@@ -4566,7 +4566,7 @@ module.exports =
   "id": "site property",
   "name": "timeline.metrics.host.aggregator.hourly.disabled",
   "displayName": "Disable Hourly host aggregator",
-  "description":  "\n  Disable host based hourly aggregations.\n",
+  "description":  "\n  Disable host based hourly aggregations\n",
   "defaultValue": "false",
   "displayType": "string",
   "serviceName": "AMS",



[1/2] ambari git commit: AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. (swagle)

2015-01-07 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk b6ecaa244 -> 07a3078cc


AMBARI-9034. Add ability to change log and pid dirs for AMS daemons. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d111c8ee
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d111c8ee
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d111c8ee

Branch: refs/heads/trunk
Commit: d111c8ee157e295cffcf63ba07736f827dfcb007
Parents: b6ecaa2
Author: Siddharth Wagle 
Authored: Wed Jan 7 15:19:54 2015 -0800
Committer: Siddharth Wagle 
Committed: Wed Jan 7 15:20:00 2015 -0800

--
 .../conf/unix/ambari-metrics-monitor|  13 ++
 .../conf/unix/ambari-metrics-collector  |   4 +-
 .../AMS/0.1.0/configuration/ams-env.xml |  26 +++-
 .../AMS/0.1.0/configuration/ams-hbase-env.xml   |   4 +-
 .../AMS/0.1.0/configuration/ams-log4j.xml   |   5 +-
 .../AMS/0.1.0/package/scripts/ams.py|   5 +
 .../AMS/0.1.0/package/scripts/params.py |   2 +
 .../AMS/0.1.0/package/scripts/status_params.py  |   4 +-
 ambari-web/app/data/HDP2/site_properties.js | 140 ++-
 ambari-web/app/models/stack_service.js  |   4 +-
 10 files changed, 161 insertions(+), 46 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/d111c8ee/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
 
b/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
index 5779b5b..319aa36 100644
--- 
a/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/conf/unix/ambari-metrics-monitor
@@ -102,6 +102,19 @@ while [[ -z "${_ams_configs_done}" ]]; do
   esac
 done
 
+#execute ams-env.sh
+if [[ -f "${MONITOR_CONF_DIR}/ams-env.sh" ]]; then
+  . "${MONITOR_CONF_DIR}/ams-env.sh"
+else
+  echo "ERROR: Cannot execute ${MONITOR_CONF_DIR}/ams-env.sh." 2>&1
+  exit 1
+fi
+
+# Set log directory path
+if [[ -n "${AMS_MONITOR_LOG_DIR}" ]]; then
+  OUTFILE=${AMS_MONITOR_LOG_DIR}/ambari-metrics-monitor.out
+fi
+
 case "$1" in
 
   start)

http://git-wip-us.apache.org/repos/asf/ambari/blob/d111c8ee/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
 
b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
index 9aabbdc..aabbde8 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/conf/unix/ambari-metrics-collector
@@ -31,6 +31,8 @@ HBASE_CONF_DIR=/etc/ams-hbase/conf
 
 METRIC_COLLECTOR=ambari-metrics-collector
 
+AMS_LOG_DIR=/var/log/ambari-metric-collector
+
 STOP_TIMEOUT=5
 
 function hbase_daemon
@@ -200,7 +202,7 @@ case "$1" in
 rm -f "${PIDFILE}" >/dev/null 2>&1
 fi
 
-nohup "${JAVA}" "-cp" 
"/usr/lib/ambari-metrics-collector/*:${COLLECTOR_CONF_DIR}" 
"-Djava.net.preferIPv4Stack=true" "-Dproc_${DAEMON_NAME}" "${CLASS}" "$@" > 
$OUTFILE 2>&1 &
+nohup "${JAVA}" "-cp" 
"/usr/lib/ambari-metrics-collector/*:${COLLECTOR_CONF_DIR}" 
"-Djava.net.preferIPv4Stack=true" "-Dams.log.dir=${AMS_COLLECTOR_LOG_DIR}" 
"-Dproc_${DAEMON_NAME}" "${CLASS}" "$@" > $OUTFILE 2>&1 &
 PID=$!
 write_pidfile "${PIDFILE}"
 sleep 2

http://git-wip-us.apache.org/repos/asf/ambari/blob/d111c8ee/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
index fda1df0..c2f3d49 100644
--- 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
+++ 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-env.xml
@@ -25,6 +25,26 @@
 USER
 AMS User Name.
   
+  
+ams_collector_log_dir
+/var/log/ambari-metric-collector
+Collector log directory.
+  
+  
+ams_collector_pid_dir
+/var/run/ambari-metric-collector
+Collector pid directory.
+  
+  
+ams_moni

ambari git commit: AMBARI-9015. Unsorted aggregate metrics returned by API. (swagle)

2015-01-06 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 09a01c2b9 -> 4000258cb


AMBARI-9015. Unsorted aggregate metrics returned by API. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4000258c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4000258c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4000258c

Branch: refs/heads/trunk
Commit: 4000258cb2846b0dca620be26ee0ee3be26cf326
Parents: 09a01c2
Author: Siddharth Wagle 
Authored: Tue Jan 6 13:02:22 2015 -0800
Committer: Siddharth Wagle 
Committed: Tue Jan 6 13:02:28 2015 -0800

--
 .../metrics/timeline/PhoenixHBaseAccessor.java   | 11 ---
 1 file changed, 4 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/4000258c/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
index c3a418a..8be5112 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
@@ -221,8 +221,7 @@ public class PhoenixHBaseAccessor {
 return metricHostAggregate;
   }
 
-  static TimelineClusterMetric
-  getTimelineMetricClusterKeyFromResultSet(ResultSet rs)
+  static TimelineClusterMetric 
getTimelineMetricClusterKeyFromResultSet(ResultSet rs)
 throws SQLException, IOException {
 TimelineClusterMetric metric = new TimelineClusterMetric(
   rs.getString("METRIC_NAME"),
@@ -234,8 +233,7 @@ public class PhoenixHBaseAccessor {
 return metric;
   }
 
-  static MetricClusterAggregate
-  getMetricClusterAggregateFromResultSet(ResultSet rs)
+  static MetricClusterAggregate 
getMetricClusterAggregateFromResultSet(ResultSet rs)
 throws SQLException {
 MetricClusterAggregate agg = new MetricClusterAggregate();
 agg.setSum(rs.getDouble("METRIC_SUM"));
@@ -547,15 +545,14 @@ public class PhoenixHBaseAccessor {
 return stmt;
   }
 
-  private TimelineMetric getAggregateTimelineMetricFromResultSet(
-ResultSet rs) throws SQLException {
+  private TimelineMetric getAggregateTimelineMetricFromResultSet(ResultSet rs) 
throws SQLException {
 TimelineMetric metric = new TimelineMetric();
 metric.setMetricName(rs.getString("METRIC_NAME"));
 metric.setAppId(rs.getString("APP_ID"));
 metric.setInstanceId(rs.getString("INSTANCE_ID"));
 metric.setTimestamp(rs.getLong("SERVER_TIME"));
 metric.setStartTime(rs.getLong("SERVER_TIME"));
-Map valueMap = new HashMap();
+Map valueMap = new TreeMap();
 valueMap.put(rs.getLong("SERVER_TIME"),
   rs.getDouble("METRIC_SUM") / rs.getInt("HOSTS_COUNT"));
 metric.setMetricValues(valueMap);



ambari git commit: AMBARI-8594. Push AMS Hbase metrics with its own appid. HBase metrics fix. (swagle)

2015-01-05 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 2c9820f69 -> 08a13e49e


AMBARI-8594. Push AMS Hbase metrics with its own appid. HBase metrics fix. 
(swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/08a13e49
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/08a13e49
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/08a13e49

Branch: refs/heads/trunk
Commit: 08a13e49eab35ac02e017213204cf349da4b8184
Parents: 2c9820f
Author: Siddharth Wagle 
Authored: Mon Jan 5 17:20:39 2015 -0800
Committer: Siddharth Wagle 
Committed: Mon Jan 5 17:20:45 2015 -0800

--
 .../metrics/timeline/AMSComponentPropertyProvider.java   | 4 
 .../metrics/timeline/AMSHostComponentPropertyProvider.java   | 4 
 2 files changed, 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/08a13e49/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSComponentPropertyProvider.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSComponentPropertyProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSComponentPropertyProvider.java
index 815224f..1fb0869 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSComponentPropertyProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSComponentPropertyProvider.java
@@ -46,10 +46,6 @@ public class AMSComponentPropertyProvider extends 
AMSPropertyProvider {
   protected String getComponentName(Resource resource) {
 String componentName = (String) 
resource.getPropertyValue(componentNamePropertyId);
 
-if (TIMELINE_APPID_MAP.containsKey(componentName)) {
-  componentName = TIMELINE_APPID_MAP.get(componentName);
-}
-
 return componentName;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/08a13e49/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSHostComponentPropertyProvider.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSHostComponentPropertyProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSHostComponentPropertyProvider.java
index 523371a..8ee2acb 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSHostComponentPropertyProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSHostComponentPropertyProvider.java
@@ -47,10 +47,6 @@ public class AMSHostComponentPropertyProvider extends 
AMSPropertyProvider {
   protected String getComponentName(Resource resource) {
 String componentName = (String) 
resource.getPropertyValue(componentNamePropertyId);
 
-if (TIMELINE_APPID_MAP.containsKey(componentName)) {
-  componentName = TIMELINE_APPID_MAP.get(componentName);
-}
-
 return componentName;
   }
 }



ambari git commit: AMBARI-8994. AMS : Yarn service - RPC metrics returns duplicate array elements. (swagle)

2015-01-05 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 83b8ab969 -> dec5f7d69


AMBARI-8994. AMS : Yarn service - RPC metrics returns duplicate array elements. 
(swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/dec5f7d6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/dec5f7d6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/dec5f7d6

Branch: refs/heads/trunk
Commit: dec5f7d69c43db12a4080d6e65de8be102936fb1
Parents: 83b8ab9
Author: Siddharth Wagle 
Authored: Mon Jan 5 14:19:27 2015 -0800
Committer: Siddharth Wagle 
Committed: Mon Jan 5 14:19:34 2015 -0800

--
 .../timeline/AbstractTimelineMetricsSink.java   |  83 +++
 .../base/AbstractTimelineMetricsSink.java   |  79 ---
 .../timeline/cache/TimelineMetricsCache.java|  30 +++-
 .../sink/flume/FlumeTimelineMetricsSink.java|   2 +-
 .../timeline/HadoopTimelineMetricsSink.java |  11 +-
 .../timeline/HadoopTimelineMetricsSinkTest.java | 142 ++-
 .../sink/storm/StormTimelineMetricsSink.java|   2 +-
 7 files changed, 257 insertions(+), 92 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/dec5f7d6/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
--
diff --git 
a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
 
b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
new file mode 100644
index 000..a382ccb
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import org.apache.commons.httpclient.HttpClient;
+import org.apache.commons.httpclient.methods.PostMethod;
+import org.apache.commons.httpclient.methods.StringRequestEntity;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.codehaus.jackson.map.AnnotationIntrospector;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
+
+import java.io.IOException;
+import java.net.SocketAddress;
+
+public abstract class AbstractTimelineMetricsSink {
+  public static final String TAGS_FOR_PREFIX_PROPERTY_PREFIX = 
"tagsForPrefix.";
+  public static final String MAX_METRIC_ROW_CACHE_SIZE = "maxRowCacheSize";
+  public static final String METRICS_SEND_INTERVAL = "sendInterval";
+  public static final String COLLECTOR_HOST_PROPERTY = "collector";
+
+  protected final Log LOG;
+  private HttpClient httpClient = new HttpClient();
+
+  protected static ObjectMapper mapper;
+
+  static {
+mapper = new ObjectMapper();
+AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
+mapper.setAnnotationIntrospector(introspector);
+mapper.getSerializationConfig()
+.setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
+  }
+
+  public AbstractTimelineMetricsSink() {
+LOG = LogFactory.getLog(this.getClass());
+  }
+
+  protected void emitMetrics(TimelineMetrics metrics) throws IOException {
+String jsonData = mapper.writeValueAsString(metrics);
+
+SocketAddress socketAddress = getServerSocketAddress();
+
+if (socketAddress != null) {
+  StringRequestEntity requestEntity = new StringRequestEntity(jsonData, 
"application/json", "UTF-8");
+
+  PostMethod postMethod = new PostMethod(getCollectorUri());
+  postMethod.setRequestEntity(requestEntity);
+  int statusCode = httpClient.executeMethod(postMe

ambari git commit: AMBARI-8977. Unit test failue on CentOS 5.

2015-01-02 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 1cc10ed3b -> 6e5df9188


AMBARI-8977. Unit test failue on CentOS 5.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6e5df918
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6e5df918
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6e5df918

Branch: refs/heads/trunk
Commit: 6e5df9188d512cfd966e2f85e95ae89b2f30ef18
Parents: 1cc10ed
Author: Siddharth Wagle 
Authored: Fri Jan 2 10:01:48 2015 -0800
Committer: Siddharth Wagle 
Committed: Fri Jan 2 10:02:31 2015 -0800

--
 ambari-metrics/ambari-metrics-host-monitoring/pom.xml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/6e5df918/ambari-metrics/ambari-metrics-host-monitoring/pom.xml
--
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/pom.xml 
b/ambari-metrics/ambari-metrics-host-monitoring/pom.xml
index c2f322c..3a130ff 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/pom.xml
+++ b/ambari-metrics/ambari-metrics-host-monitoring/pom.xml
@@ -162,7 +162,7 @@
 
 
   
-
+
   
   
   
@@ -211,7 +211,7 @@
 
 
   
-
+
   
   
   



ambari git commit: AMBARI-8961. Add alert description for AMS. (swagle)

2014-12-30 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 8cf0e9155 -> 9884cbdd5


AMBARI-8961. Add alert description for AMS. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9884cbdd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9884cbdd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9884cbdd

Branch: refs/heads/trunk
Commit: 9884cbdd51f6d465044d4573a265f124424ec821
Parents: 8cf0e91
Author: Siddharth Wagle 
Authored: Tue Dec 30 10:16:33 2014 -0800
Committer: Siddharth Wagle 
Committed: Tue Dec 30 10:16:45 2014 -0800

--
 .../common-services/AMS/0.1.0/alerts.json | 18 --
 1 file changed, 12 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/9884cbdd/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
index 93b224a..ad22f59 100644
--- a/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
@@ -3,7 +3,8 @@
 "service": [
   {
 "name": "ams_metric_monitor_process_percent",
-"label": "Percent AMS Metric Monitors Available",
+"label": "Percent Metric Monitors Available",
+"description": "This alert is triggered if a percentage of Metric 
Monitor processes are not up and listening on the network for the configured 
warning and critical thresholds.",
 "interval": 1,
 "scope": "SERVICE",
 "enabled": true,
@@ -29,7 +30,8 @@
 "METRIC_COLLECTOR": [
   {
 "name": "ams_metric_collector_process",
-"label": "AMS Metric Collector Process",
+"label": "Metric Collector Process",
+"description": "This alert is triggered if the Metric Collector cannot 
be confirmed to be up and listening on the configured port for number of 
seconds equal to threshold.",
 "interval": 1,
 "scope": "ANY",
 "enabled": true,
@@ -54,7 +56,8 @@
   },
   {
 "name": "ams_metric_collector_hbase_master_process",
-"label": "AMS Metric Collector HBase Master Process",
+"label": "Metric Collector - HBase Master Process",
+"description": "This alert is triggered if the Metric Collector's 
HBase master processes cannot be confirmed to be up and listening on the 
network for the configured critical threshold, given in seconds.",
 "interval": 1,
 "scope": "ANY",
 "source": {
@@ -78,7 +81,8 @@
   },
   {
 "name": "ams_metric_collector_hbase_master_cpu",
-"label": "AMS Metric Collector HBase Maser CPU Utilization",
+"label": "Metric Collector HBase Maser CPU Utilization",
+"description": "This host-level alert is triggered if CPU utilization 
of the Metric Collector's HBase Master exceeds certain warning and critical 
thresholds. It checks the HBase Master JMX Servlet for the SystemCPULoad 
property. The threshold values are in percent.",
 "interval": 5,
 "scope": "ANY",
 "enabled": true,
@@ -116,7 +120,8 @@
   },
   {
 "name": "ams_metric_collector_zookeeper_server_process",
-"label": "AMS Metric Collector ZooKeeper Server Process",
+"label": "Metric Collector - ZooKeeper Server Process",
+"description": "This host-level alert is triggered if the Metric 
Collector's ZooKeeper server process cannot be determined to be up and 
listening on the network.",
 "interval": 1,
 "scope": "ANY",
 "source": {
@@ -142,7 +147,8 @@
 "METRIC_MONITOR": [
   {
 "name": "ams_metric_monitor_process",
-"label": "AMS Metric Monitor Status",
+"label": "Metric Monitor Status",
+"description": "This alert indicates the status of the Metric Monitor 
process as determined by the monitor status script.",
 "interval": 1,
 "scope": "ANY",
 "source": {



ambari git commit: AMBARI-8872. Support point in time queries.

2014-12-30 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk f186c4bad -> 72881097d


AMBARI-8872. Support point in time queries.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/72881097
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/72881097
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/72881097

Branch: refs/heads/trunk
Commit: 72881097dc0f4c9432c62427f2ce5ae6c54c966d
Parents: f186c4b
Author: Siddharth Wagle 
Authored: Tue Dec 30 09:45:14 2014 -0800
Committer: Siddharth Wagle 
Committed: Tue Dec 30 09:45:14 2014 -0800

--
 .../metrics/timeline/PhoenixHBaseAccessor.java  | 475 +++---
 .../metrics/timeline/PhoenixTransactSQL.java| 491 ++-
 .../timeline/TimelineMetricAggregator.java  |   3 +-
 .../TimelineMetricClusterAggregator.java|   3 +-
 .../TimelineMetricClusterAggregatorHourly.java  |   3 +-
 .../metrics/timeline/ITClusterAggregator.java   |   5 +-
 .../metrics/timeline/ITMetricAggregator.java|   7 +-
 .../timeline/TestPhoenixTransactSQL.java|  21 +-
 .../metrics/timeline/AMSPropertyProvider.java   |  76 +--
 .../timeline/AMSPropertyProviderTest.java   |  92 +++-
 10 files changed, 835 insertions(+), 341 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/72881097/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
index b5226ee..c3a418a 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
@@ -17,7 +17,6 @@
  */
 package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -35,6 +34,7 @@ import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -48,6 +48,7 @@ import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_TABLE_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_TABLE_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.SplitByMetricNamesCondition;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DEFAULT_ENCODING;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DEFAULT_TABLE_COMPRESSION;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_HOURLY_TABLE_NAME;
@@ -134,7 +135,6 @@ public class PhoenixHBaseAccessor {
 }
   }
 
-
   /**
* Get JDBC connection to HBase store. Assumption is that the hbase
* configuration is present on the classpath and loaded by the caller into
@@ -148,13 +148,28 @@ public class PhoenixHBaseAccessor {
 return dataSource.getConnection();
   }
 
-  public static Map readMetricFromJSON(String json) throws IOException {
-return mapper.readValue(json, metricValuesTypeRef);
+  private static TimelineMetric getLastTimelineMetricFromResultSet(ResultSet 
rs)
+throws SQLException, IOException {
+TimelineMetric metric = getTimelineMetricCommonsFromResultSet(rs);
+
metric.setMetricValues(readLastMetricValueFromJSON(rs.getString("METRICS")));
+
+return metric;
   }
 
-  @SuppressWarnings("unchecked")
   static TimelineMetric getTimelineMetricFromResultSet(ResultSet rs)
 throws SQLException, IOException {
+TimelineMetric metric = getTimelineMetricCommonsFromResultSet(rs);
+Map sortedByTimeMetrics =
+  new TreeMap(rea

[1/2] ambari git commit: Revert "AMBARI-8770 [WinGA] Add windows server assembly for AMS. Unit test failure on CentOS."

2014-12-29 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk dc8355c64 -> ab8c0e350


http://git-wip-us.apache.org/repos/asf/ambari/blob/ab8c0e35/ambari-metrics/ambari-metrics-timelineservice/src/main/python/amc_service.py
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/python/amc_service.py 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/python/amc_service.py
deleted file mode 100644
index b901e5c..000
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/python/amc_service.py
+++ /dev/null
@@ -1,174 +0,0 @@
-#!/usr/bin/env python
-
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-import optparse
-import os
-import sys
-
-from ambari_commons.ambari_service import AmbariService
-from ambari_commons.exceptions import FatalException
-from ambari_commons.os_utils import remove_file
-from ambari_commons.os_windows import SvcStatusCallback, WinServiceController
-from ambari_metrics_collector.serviceConfiguration import get_properties, 
get_value_from_properties, DEBUG_MODE_KEY, \
-  SUSPEND_START_MODE_KEY, PID_OUT_FILE, SERVER_OUT_FILE_KEY, SERVER_OUT_FILE, 
SERVICE_USERNAME_KEY, SERVICE_PASSWORD_KEY, \
-  DEFAULT_CONF_DIR, EMBEDDED_HBASE_MASTER_SERVICE
-from embedded_hbase_service import EmbeddedHBaseService
-from main import server_process_main
-
-
-class AMCollectorService(AmbariService):
-  AmbariService._svc_name_ = "AmbariMetricsCollector"
-  AmbariService._svc_display_name_ = "Ambari Metrics Collector"
-  AmbariService._svc_description_ = "Ambari Metrics Collector Service"
-
-  AmbariService._AdjustServiceVersion()
-
-  # Adds the necessary script dir(s) to the Python's modules path.
-  # Modify this as the deployed product's dir structure changes.
-  def _adjustPythonPath(self, current_dir):
-python_path = os.path.join(current_dir, "sbin")
-sys.path.insert(0, python_path)
-pass
-
-  @classmethod
-  def Install(cls, startupMode = "auto", username = None, password = None, 
interactive = False,
-  perfMonIni = None, perfMonDll = None):
-script_path = os.path.dirname(__file__.replace('/', os.sep))
-classPath = os.path.join(script_path, cls.__module__) + "." + cls.__name__
-
-return AmbariService.Install(classPath, startupMode, username, password, 
interactive,
-perfMonIni, perfMonDll)
-
-  def SvcDoRun(self):
-scmStatus = SvcStatusCallback(self)
-
-properties = get_properties()
-self.options.debug = get_value_from_properties(properties, DEBUG_MODE_KEY, 
self.options.debug)
-self.options.suspend_start = get_value_from_properties(properties, 
SUSPEND_START_MODE_KEY, self.options.suspend_start)
-
-self.redirect_output_streams()
-
-childProc = server_process_main(self.options, scmStatus)
-
-if not self._StopOrWaitForChildProcessToFinish(childProc):
-  return
-
-remove_file(PID_OUT_FILE)
-pass
-
-  def _InitOptionsParser(self):
-return init_options_parser()
-
-  def redirect_output_streams(self):
-properties = get_properties()
-
-outFilePath = properties[SERVER_OUT_FILE_KEY]
-if (outFilePath is None or outFilePath == ""):
-  outFilePath = SERVER_OUT_FILE
-
-self._RedirectOutputStreamsToFile(outFilePath)
-pass
-
-def ctrlHandler(ctrlType):
-  AMCollectorService.DefCtrlCHandler()
-  return True
-
-def svcsetup():
-  AMCollectorService.set_ctrl_c_handler(ctrlHandler)
-
-  # we don't save password between 'setup' runs, so we can't run Install every 
time. We run 'setup' only if user and
-  # password provided or if service not installed
-  if (SERVICE_USERNAME_KEY in os.environ and SERVICE_PASSWORD_KEY in 
os.environ):
-EmbeddedHBaseService.Install(username=os.environ[SERVICE_USERNAME_KEY], 
password=os.environ[SERVICE_PASSWORD_KEY])
-AMCollectorService.Install(username=os.environ[SERVICE_USERNAME_KEY], 
password=os.environ[SERVICE_PASSWORD_KEY])
-  else:
-EmbeddedHBaseService.Install()
-AMCollectorService.Install()
-  pass
-
-#
-# Starts the Ambari Metrics Collector. The server can start as a service or 
standalone process.
-# args:
-#  options.is_process = True - start the server as a process. For now, there 
is no re

[2/2] ambari git commit: Revert "AMBARI-8770 [WinGA] Add windows server assembly for AMS. Unit test failure on CentOS."

2014-12-29 Thread swagle
Revert "AMBARI-8770 [WinGA] Add windows server assembly for AMS. Unit test 
failure on CentOS."

This reverts commit 19e972cf98423200bf30518803f09f2dcd2a.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ab8c0e35
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ab8c0e35
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ab8c0e35

Branch: refs/heads/trunk
Commit: ab8c0e350fded50f32edbe23e2a14376caebbbfc
Parents: dc8355c
Author: Siddharth Wagle 
Authored: Mon Dec 29 19:11:02 2014 -0800
Committer: Siddharth Wagle 
Committed: Mon Dec 29 19:11:02 2014 -0800

--
 ambari-agent/conf/windows/service_wrapper.py|   2 +-
 .../src/main/python/ambari_commons/os_utils.py  |  17 +-
 .../main/python/ambari_commons/os_windows.py|  21 +-
 .../src/main/python/ambari_commons/xml_utils.py |  33 ---
 ambari-metrics/ambari-metrics-assembly/pom.xml  |  77 +--
 .../src/main/assembly/collector-windows.xml | 101 
 .../src/main/assembly/monitor-windows.xml   |  84 ---
 .../src/main/assembly/monitor.xml   |   2 +-
 .../src/main/assembly/sink-windows.xml  |  60 -
 .../conf/windows/ambari-metrics-monitor.cmd |  17 --
 .../conf/windows/metric_groups.conf |  19 --
 .../conf/windows/metric_monitor.ini |  30 ---
 .../ambari-metrics-host-monitoring/pom.xml  | 146 
 .../src/main/python/amhm_service.py | 231 ---
 .../src/main/python/core/__init__.py|   3 +-
 .../src/main/python/core/config_reader.py   |  66 +-
 .../src/main/python/core/controller.py  |  27 +--
 .../src/main/python/core/emitter.py |  17 +-
 .../src/main/python/core/stop_handler.py| 138 ---
 .../src/main/python/main.py |  58 +
 .../conf/windows/ambari-metrics-collector.cmd   |  17 --
 .../conf/windows/ams-env.cmd|  16 --
 .../conf/windows/ams-site.xml   |  25 --
 .../conf/windows/ams.properties |  17 --
 .../conf/windows/log4j.properties   |  29 ---
 .../ambari-metrics-timelineservice/pom.xml  |  31 ---
 .../python/ambari_metrics_collector/__init__.py |  21 --
 .../ambari_metrics_collector/properties.py  | 223 --
 .../serviceConfiguration.py | 152 
 .../src/main/python/amc_service.py  | 174 --
 .../src/main/python/embedded_hbase_service.py   | 201 
 .../src/main/python/main.py | 214 -
 ambari-metrics/pom.xml  |  27 ---
 .../main/python/ambari-server-state/Entities.py |  17 +-
 pom.xml |   2 -
 35 files changed, 105 insertions(+), 2210 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/ab8c0e35/ambari-agent/conf/windows/service_wrapper.py
--
diff --git a/ambari-agent/conf/windows/service_wrapper.py 
b/ambari-agent/conf/windows/service_wrapper.py
index 5eb06c4..40be1d0 100644
--- a/ambari-agent/conf/windows/service_wrapper.py
+++ b/ambari-agent/conf/windows/service_wrapper.py
@@ -92,7 +92,7 @@ class AmbariAgentService(AmbariService):
 # Soft dependency on the Windows Time service
 ensure_time_service_is_started()
 
-self.heartbeat_stop_handler = 
HeartbeatStopHandlers(AmbariAgentService._heventSvcStop)
+self.heartbeat_stop_handler = HeartbeatStopHandlers(self._heventSvcStop)
 
 self.ReportServiceStatus(win32service.SERVICE_RUNNING)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/ab8c0e35/ambari-common/src/main/python/ambari_commons/os_utils.py
--
diff --git a/ambari-common/src/main/python/ambari_commons/os_utils.py 
b/ambari-common/src/main/python/ambari_commons/os_utils.py
index 942a920..3f4819d 100644
--- a/ambari-common/src/main/python/ambari_commons/os_utils.py
+++ b/ambari-common/src/main/python/ambari_commons/os_utils.py
@@ -48,17 +48,14 @@ def quote_path(filepath):
 filepath_ret = filepath
   return filepath_ret
 
-def _search_file(filename, search_path, pathsep):
+def search_file(filename, search_path, pathsep=os.pathsep):
+  """ Given a search path, find file with requested name """
   for path in string.split(search_path, pathsep):
 candidate = os.path.join(path, filename)
 if os.path.exists(candidate):
   return os.path.abspath(candidate)
   return None
 
-def search_file(filename, search_path, pathsep=os.pathsep):
-  """ Given a search path, find file with requested name """
-  return _search_file(filename, search_path, pathsep)
-
 def copy_file(src, dest_file):
   try:
 shutil.copyfile(src,

ambari git commit: AMBARI-8945. Add Phoenix server package to HBase stack definition. (Ted Yu via swagle)

2014-12-29 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk cbbd413c9 -> 1395c8b8c


AMBARI-8945. Add Phoenix server package to HBase stack definition. (Ted Yu via 
swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1395c8b8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1395c8b8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1395c8b8

Branch: refs/heads/trunk
Commit: 1395c8b8c13255422155305d0370763200082149
Parents: cbbd413
Author: Siddharth Wagle 
Authored: Mon Dec 29 14:56:12 2014 -0800
Committer: Siddharth Wagle 
Committed: Mon Dec 29 14:56:12 2014 -0800

--
 .../main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml  | 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/1395c8b8/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
index c632cd0..8918adb 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HBASE/metainfo.xml
@@ -30,6 +30,9 @@
 
   hbase_2_2_*
 
+
+  phoenix_2_2_*
+
   
 
 
@@ -38,6 +41,9 @@
 
   hbase-2-2-.*
 
+
+  phoenix-2-2-.*
+
   
 
   



ambari git commit: AMBARI-8946. Fix non-deterministic unit test in Flume sink. (swagle)

2014-12-29 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 19e972cf9 -> cbbd413c9


AMBARI-8946. Fix non-deterministic unit test in Flume sink. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cbbd413c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cbbd413c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cbbd413c

Branch: refs/heads/trunk
Commit: cbbd413c9d11449fdc87cb664931272da225003b
Parents: 19e972c
Author: Siddharth Wagle 
Authored: Mon Dec 29 14:31:04 2014 -0800
Committer: Siddharth Wagle 
Committed: Mon Dec 29 14:31:04 2014 -0800

--
 .../sink/flume/FlumeTimelineMetricsSink.java|  2 +-
 .../flume/FlumeTimelineMetricsSinkTest.java | 38 +++-
 .../common-services/AMS/0.1.0/metainfo.xml  |  2 +-
 3 files changed, 15 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/cbbd413c/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
--
diff --git 
a/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
 
b/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
index 87c4ab8..d28b345 100644
--- 
a/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
+++ 
b/ambari-metrics/ambari-metrics-flume-sink/src/main/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSink.java
@@ -116,7 +116,7 @@ public class FlumeTimelineMetricsSink extends 
AbstractTimelineMetricsSink implem
* org.apache.flume. All attributes of such beans are sent
* to the metrics collector service.
*/
-  private class TimelineMetricsCollector implements Runnable {
+  class TimelineMetricsCollector implements Runnable {
 @Override
 public void run() {
   LOG.debug("Collecting Metrics for Flume");

http://git-wip-us.apache.org/repos/asf/ambari/blob/cbbd413c/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
--
diff --git 
a/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
 
b/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
index ad59576..90831bf 100644
--- 
a/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
+++ 
b/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
@@ -38,34 +38,26 @@ public class FlumeTimelineMetricsSinkTest {
   @Test
   public void testNonNumericMetricMetricExclusion() throws 
InterruptedException {
 FlumeTimelineMetricsSink flumeTimelineMetricsSink = new 
FlumeTimelineMetricsSink();
-TimelineMetricsCache timelineMetricsCache = 
getTimelineMetricsCache(flumeTimelineMetricsSink);
-flumeTimelineMetricsSink.setPollFrequency(1);
-HttpClient httpClient = EasyMock.createNiceMock(HttpClient.class);
-flumeTimelineMetricsSink.setHttpClient(httpClient);
+FlumeTimelineMetricsSink.TimelineMetricsCollector collector =
+  flumeTimelineMetricsSink.new TimelineMetricsCollector();
 mockStatic(JMXPollUtil.class);
 EasyMock.expect(JMXPollUtil.getAllMBeans()).andReturn(
 Collections.singletonMap("component1", 
Collections.singletonMap("key1", "value1"))).once();
-replay(JMXPollUtil.class, timelineMetricsCache, httpClient);
-flumeTimelineMetricsSink.start();
-Thread.sleep(5);
-flumeTimelineMetricsSink.stop();
+replay(JMXPollUtil.class);
+collector.run();
 verifyAll();
   }
 
   @Test
-  public void testNumericMetricMetricSubmission() throws InterruptedException {
+  public void testNumericMetricSubmission() throws InterruptedException {
 FlumeTimelineMetricsSink flumeTimelineMetricsSink = new 
FlumeTimelineMetricsSink();
-TimelineMetricsCache timelineMetricsCache = 
getTimelineMetricsCache(flumeTimelineMetricsSink);
-flumeTimelineMetricsSink.setPollFrequency(1);
-HttpClient httpClient = EasyMock.createNiceMock(HttpClient.class);
-flumeTimelineMetricsSink.setHttpClient(httpClient);
+FlumeTimelineMetricsSink.TimelineMetricsCollector collector =
+  flumeTimelineMetricsSink.new TimelineMetricsCollector();
 mockStatic(JMXPollUtil.class);
 EasyMock.expect(JMXPollUtil.getAllMBeans()).andReturn(
  

ambari git commit: AMBARI-8808. Resolve potential port conflicts with Hadoop daemons. Typo in metrics.properties. (swagle)

2014-12-23 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk c1c980525 -> bf3d2857f


AMBARI-8808. Resolve potential port conflicts with Hadoop daemons. Typo in 
metrics.properties. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bf3d2857
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bf3d2857
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bf3d2857

Branch: refs/heads/trunk
Commit: bf3d2857f25914b49cd20b85378c5fda92b4189f
Parents: c1c9805
Author: Siddharth Wagle 
Authored: Tue Dec 23 20:19:50 2014 -0800
Committer: Siddharth Wagle 
Committed: Tue Dec 23 20:19:50 2014 -0800

--
 .../templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/bf3d2857/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
--
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
index 8e2f445..9586e1a 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
@@ -48,7 +48,7 @@ hbase.extendedperiod = 3600
 
 {% if has_metric_collector %}
 
-*.timline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+*.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
 hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 hbase.period=10
 hbase.collector={{metric_collector_host}}:{{metric_collector_port}}



ambari git commit: Revert "AMBARI-8872. Support point in time queries. Breaks dashboard graphs."

2014-12-23 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 70f1170b0 -> 102b47736


Revert "AMBARI-8872. Support point in time queries. Breaks dashboard graphs."

This reverts commit 9bf9034a5c2481a8b40befab8c3713dcd3b6f584.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/102b4773
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/102b4773
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/102b4773

Branch: refs/heads/trunk
Commit: 102b47736e9f721baa6bac434cd58bfeaf105aff
Parents: 70f1170
Author: Siddharth Wagle 
Authored: Tue Dec 23 14:43:58 2014 -0800
Committer: Siddharth Wagle 
Committed: Tue Dec 23 14:43:58 2014 -0800

--
 .../metrics/timeline/PhoenixHBaseAccessor.java  | 473 +++---
 .../metrics/timeline/PhoenixTransactSQL.java| 491 +--
 .../timeline/TimelineMetricAggregator.java  |   3 +-
 .../TimelineMetricClusterAggregator.java|   3 +-
 .../TimelineMetricClusterAggregatorHourly.java  |   3 +-
 .../metrics/timeline/ITClusterAggregator.java   |   5 +-
 .../metrics/timeline/ITMetricAggregator.java|   7 +-
 .../timeline/TestPhoenixTransactSQL.java|  21 +-
 .../metrics/timeline/AMSPropertyProvider.java   |  76 ++-
 .../timeline/AMSPropertyProviderTest.java   |  92 +---
 10 files changed, 341 insertions(+), 833 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/102b4773/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
index 4b04ba9..b5226ee 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
@@ -17,6 +17,7 @@
  */
 package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -34,7 +35,6 @@ import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -48,7 +48,6 @@ import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_TABLE_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_TABLE_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
-import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.SplitByMetricNamesCondition;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DEFAULT_ENCODING;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DEFAULT_TABLE_COMPRESSION;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_HOURLY_TABLE_NAME;
@@ -135,6 +134,7 @@ public class PhoenixHBaseAccessor {
 }
   }
 
+
   /**
* Get JDBC connection to HBase store. Assumption is that the hbase
* configuration is present on the classpath and loaded by the caller into
@@ -148,28 +148,13 @@ public class PhoenixHBaseAccessor {
 return dataSource.getConnection();
   }
 
-  private static TimelineMetric getLastTimelineMetricFromResultSet(ResultSet 
rs)
-throws SQLException, IOException {
-TimelineMetric metric = getTimelineMetricCommonsFromResultSet(rs);
-
metric.setMetricValues(readLastMetricValueFromJSON(rs.getString("METRICS")));
-
-return metric;
+  public static Map readMetricFromJSON(String json) throws IOException {
+return mapper.readValue(json, metricValuesTypeRef);
   }
 
+  @SuppressWarnings("unchecked")
   static TimelineMetric getTimelineMetricFromResultSet(ResultSet rs)
 throws SQLException, IOException {
-TimelineMetric metric =

ambari git commit: AMBARI-8808. Resolve potential port conflicts with Hadoop daemons. alerts.json default port change. (swagle)

2014-12-23 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 3f1d3dfac -> 70f1170b0


AMBARI-8808. Resolve potential port conflicts with Hadoop daemons. alerts.json 
default port change. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/70f1170b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/70f1170b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/70f1170b

Branch: refs/heads/trunk
Commit: 70f1170b037ca0a9ac1cc175db8f957de50d5055
Parents: 3f1d3df
Author: Siddharth Wagle 
Authored: Tue Dec 23 14:35:57 2014 -0800
Committer: Siddharth Wagle 
Committed: Tue Dec 23 14:36:02 2014 -0800

--
 .../src/main/resources/common-services/AMS/0.1.0/alerts.json | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/70f1170b/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
index cba8b76..700f021 100644
--- a/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
@@ -35,8 +35,8 @@
 "enabled": true,
 "source": {
   "type": "PORT",
-  "uri": "{{metric_collector_port}}",
-  "default_port": 8188,
+  "uri": "{{ams-site/timeline.metrics.service.webapp.address}}",
+  "default_port": 6188,
   "reporting": {
 "ok": {
   "text": "TCP OK - {0:.3f}s response on port {1}"



ambari git commit: AMBARI-8869. Include AMS debian packages in Ambari build. Updated build properties. (swagle)

2014-12-23 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 895b1adc0 -> 0c1c14b76


AMBARI-8869. Include AMS debian packages in Ambari build. Updated build 
properties. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0c1c14b7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0c1c14b7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0c1c14b7

Branch: refs/heads/trunk
Commit: 0c1c14b765ab73a634a231e64ef511d2731c92ce
Parents: 895b1ad
Author: Siddharth Wagle 
Authored: Tue Dec 23 12:50:18 2014 -0800
Committer: Siddharth Wagle 
Committed: Tue Dec 23 12:50:28 2014 -0800

--
 ambari-metrics/ambari-metrics-timelineservice/pom.xml   | 9 +
 .../ApplicationHistoryManagerImpl.java  | 1 +
 .../applicationhistoryservice/ApplicationHistoryServer.java | 4 ++--
 3 files changed, 8 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/0c1c14b7/ambari-metrics/ambari-metrics-timelineservice/pom.xml
--
diff --git a/ambari-metrics/ambari-metrics-timelineservice/pom.xml 
b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
index c2c0323..196d7a1 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/pom.xml
+++ b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
@@ -34,8 +34,9 @@
 
 
 2.5.0
-2.4.0
-4.2.0.2.2.1.0-2114
+(2.6.0.2.2.0.0, 2.6.0.2.2.1.0)
+4.2.0.2.2.0.0-2041
+0.98.4.2.2.0.0-2041-hadoop2
   
 
   
@@ -477,14 +478,14 @@
 
   org.apache.hbase
   hbase-it
-  0.98.4-hadoop2
+  ${hbase.version}
   test
   tests
 
   
 org.apache.hbase
 hbase-testing-util
-0.98.4-hadoop2
+${hbase.version}
 test
 true
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/0c1c14b7/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
index 85a5e3a..84e9a39 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
@@ -171,6 +171,7 @@ public class ApplicationHistoryManagerImpl extends 
AbstractService implements
 return ApplicationAttemptReport.newInstance(
   appAttemptHistory.getApplicationAttemptId(), appAttemptHistory.getHost(),
   appAttemptHistory.getRPCPort(), appAttemptHistory.getTrackingURL(),
+  null,
   appAttemptHistory.getDiagnosticsInfo(),
   appAttemptHistory.getYarnApplicationAttemptState(),
   appAttemptHistory.getMasterContainerId());

http://git-wip-us.apache.org/repos/asf/ambari/blob/0c1c14b7/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
index 3dd4d8d..ed4d057 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
@@ -177,9 +177,9 @@ public class ApplicationHistoryServer extends 
CompositeService {
   ahsClientService, "ws")
 .with(getConfig())
 .withHttpSpnegoPrincipalKey(
-  YarnConfiguration.TIMELINE_SERVICE_WEBAPP_SPNEGO_USER_NAME_KEY)
+  YarnConfiguration.TIMELINE_SERVICE_PRINCIPAL)
 .withHttpSpnegoKeytabKey(
-  YarnConfiguration.TIMELINE_SERVICE_WEBAPP_SPNEGO_KEYTAB_FILE_KEY)
+  YarnConfiguration.TIMELINE_SERVICE_KEYTAB)
 .at(bindAddress)
 .start(new AHSWebApp(historyManager, timelineStore, 
timeli

ambari git commit: AMBARI-8872. Support point in time queries.

2014-12-22 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 6801758ff -> 9bf9034a5


AMBARI-8872. Support point in time queries.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9bf9034a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9bf9034a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9bf9034a

Branch: refs/heads/trunk
Commit: 9bf9034a5c2481a8b40befab8c3713dcd3b6f584
Parents: 6801758
Author: Siddharth Wagle 
Authored: Mon Dec 22 17:07:17 2014 -0800
Committer: Siddharth Wagle 
Committed: Mon Dec 22 17:07:17 2014 -0800

--
 .../metrics/timeline/PhoenixHBaseAccessor.java  | 473 +++---
 .../metrics/timeline/PhoenixTransactSQL.java| 491 ++-
 .../timeline/TimelineMetricAggregator.java  |   3 +-
 .../TimelineMetricClusterAggregator.java|   3 +-
 .../TimelineMetricClusterAggregatorHourly.java  |   3 +-
 .../metrics/timeline/ITClusterAggregator.java   |   5 +-
 .../metrics/timeline/ITMetricAggregator.java|   7 +-
 .../timeline/TestPhoenixTransactSQL.java|  21 +-
 .../metrics/timeline/AMSPropertyProvider.java   |  76 +--
 .../timeline/AMSPropertyProviderTest.java   |  92 +++-
 10 files changed, 833 insertions(+), 341 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/9bf9034a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
index b5226ee..4b04ba9 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
@@ -17,7 +17,6 @@
  */
 package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -35,6 +34,7 @@ import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -48,6 +48,7 @@ import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_TABLE_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_TABLE_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.SplitByMetricNamesCondition;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DEFAULT_ENCODING;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DEFAULT_TABLE_COMPRESSION;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_HOURLY_TABLE_NAME;
@@ -134,7 +135,6 @@ public class PhoenixHBaseAccessor {
 }
   }
 
-
   /**
* Get JDBC connection to HBase store. Assumption is that the hbase
* configuration is present on the classpath and loaded by the caller into
@@ -148,13 +148,28 @@ public class PhoenixHBaseAccessor {
 return dataSource.getConnection();
   }
 
-  public static Map readMetricFromJSON(String json) throws IOException {
-return mapper.readValue(json, metricValuesTypeRef);
+  private static TimelineMetric getLastTimelineMetricFromResultSet(ResultSet 
rs)
+throws SQLException, IOException {
+TimelineMetric metric = getTimelineMetricCommonsFromResultSet(rs);
+
metric.setMetricValues(readLastMetricValueFromJSON(rs.getString("METRICS")));
+
+return metric;
   }
 
-  @SuppressWarnings("unchecked")
   static TimelineMetric getTimelineMetricFromResultSet(ResultSet rs)
 throws SQLException, IOException {
+TimelineMetric metric = getTimelineMetricCommonsFromResultSet(rs);
+Map sortedByTimeMetrics =
+  new TreeMap(rea

ambari git commit: AMBARI-8869. Include AMS debian packages in Ambari build. Missing property fix. (swagle)

2014-12-22 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk fa74434ba -> f0f889cec


AMBARI-8869. Include AMS debian packages in Ambari build. Missing property fix. 
(swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f0f889ce
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f0f889ce
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f0f889ce

Branch: refs/heads/trunk
Commit: f0f889cecd53108fe7a150ddecf4226ece1ec7a1
Parents: fa74434
Author: Siddharth Wagle 
Authored: Mon Dec 22 16:14:28 2014 -0800
Committer: Siddharth Wagle 
Committed: Mon Dec 22 16:14:28 2014 -0800

--
 ambari-metrics/ambari-metrics-assembly/pom.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/f0f889ce/ambari-metrics/ambari-metrics-assembly/pom.xml
--
diff --git a/ambari-metrics/ambari-metrics-assembly/pom.xml 
b/ambari-metrics/ambari-metrics-assembly/pom.xml
index c31e34f..40fef0e 100644
--- a/ambari-metrics/ambari-metrics-assembly/pom.xml
+++ b/ambari-metrics/ambari-metrics-assembly/pom.xml
@@ -40,6 +40,7 @@
 python (>= 2.6)
 amd64
 ${deb.python.ver},python-dev,gcc
+
ambari-metrics-hadoop-sink-with-common-${project.version}.jar
   
 
   



ambari git commit: AMBARI-8869. Include AMS debian packages in Ambari build. Phoenix dep fix. (swagle)

2014-12-22 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 03463912a -> fa74434ba


AMBARI-8869. Include AMS debian packages in Ambari build. Phoenix dep fix. 
(swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fa74434b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fa74434b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fa74434b

Branch: refs/heads/trunk
Commit: fa74434baffdaf01fa6cfbbd2f42860027aeac62
Parents: 0346391
Author: Siddharth Wagle 
Authored: Mon Dec 22 15:28:10 2014 -0800
Committer: Siddharth Wagle 
Committed: Mon Dec 22 15:31:10 2014 -0800

--
 ambari-metrics/ambari-metrics-timelineservice/pom.xml | 8 
 1 file changed, 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/fa74434b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
--
diff --git a/ambari-metrics/ambari-metrics-timelineservice/pom.xml 
b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
index 5b81b41..c2c0323 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/pom.xml
+++ b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
@@ -242,14 +242,6 @@
   org.apache.hadoop
   hadoop-annotations
 
-
-  org.apache.hbase
-  hbase-client
-
-
-  org.apache.hbase
-  hbase-server
-
   
 
 



ambari git commit: AMBARI-8869. Include AMS debian packages in Ambari build. (swagle, dsen via swagle)

2014-12-22 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk ff4c6c114 -> e6cdb06bc


AMBARI-8869. Include AMS debian packages in Ambari build. (swagle, dsen via 
swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e6cdb06b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e6cdb06b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e6cdb06b

Branch: refs/heads/trunk
Commit: e6cdb06bc7cd6d7ff010592c20e49de0a0f5b855
Parents: ff4c6c1
Author: Siddharth Wagle 
Authored: Mon Dec 22 13:53:30 2014 -0800
Committer: Siddharth Wagle 
Committed: Mon Dec 22 13:53:30 2014 -0800

--
 ambari-metrics/ambari-metrics-assembly/pom.xml  | 286 ++-
 .../src/main/package/deb/control/control|  22 ++
 .../src/main/package/deb/control/postinst   |  24 ++
 .../src/main/package/deb/control/prerm  |  28 ++
 .../ambari-metrics-hadoop-sink/pom.xml  |  42 ---
 .../conf/unix/ambari-metrics-monitor|   2 +-
 ambari-metrics/pom.xml  |   8 +-
 .../common-services/AMS/0.1.0/metainfo.xml  |  13 +-
 8 files changed, 375 insertions(+), 50 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/e6cdb06b/ambari-metrics/ambari-metrics-assembly/pom.xml
--
diff --git a/ambari-metrics/ambari-metrics-assembly/pom.xml 
b/ambari-metrics/ambari-metrics-assembly/pom.xml
index 0a0afbd..c31e34f 100644
--- a/ambari-metrics/ambari-metrics-assembly/pom.xml
+++ b/ambari-metrics/ambari-metrics-assembly/pom.xml
@@ -36,6 +36,10 @@
 
${project.basedir}/../ambari-metrics-hadoop-sink
 
${project.basedir}/../ambari-metrics-storm-sink
 
${project.basedir}/../ambari-metrics-flume-sink
+python >= 2.6
+python (>= 2.6)
+amd64
+${deb.python.ver},python-dev,gcc
   
 
   
@@ -96,7 +100,6 @@
 
   
 
-
   
 
   rpm
@@ -455,12 +458,287 @@
   
 
   
-
-
-
 
+  
+
+  
+
+
+  deb
 
+  
+
+  build-deb
+
+  
 
+  
+
+  
+jdeb
+org.vafer
+1.0.1
+
+  
+
+package
+
+  jdeb
+
+  
+
+
+  ${basedir}/src/main/package/deb/control
+  
${basedir}/target/${artifactId}_${package-version}-${package-release}.deb
+  
+
+  file
+  ${monitor.dir}/src/main/python/__init__.py
+  
+perm
+${resmonitor.install.dir}
+
+
+755
+  
+
+
+  file
+  ${monitor.dir}/src/main/python/main.py
+  
+perm
+${resmonitor.install.dir}
+755
+  
+
+
+  directory
+  ${monitor.dir}/src/main/python/core
+  
+perm
+${resmonitor.install.dir}/core
+  
+
+
+  directory
+  ${monitor.dir}/src/main/python/psutil
+  build/**
+  
+perm
+${resmonitor.install.dir}/psutil
+  
+
+
+  template
+  
+/var/run/ambari-metrics-monitor
+/var/log/ambari-metrics-monitor
+/etc/ambari-metrics-monitor/conf
+/usr/lib/ambari-metrics-collector
+/etc/ambari-metrics-collector/conf
+/etc/ams-hbase/conf
+/var/run/ambari-metrics-collector
+/var/run/ams-hbase
+/var/log/ambari-metrics-collector
+/var/lib/ambari-metrics-collector
+/usr/lib/ambari-metrics-hadoop-sink
+/usr/lib/flume/lib
+/usr/lib/storm/lib
+  
+
+
+  ${monitor.dir}/conf/unix/metric_groups.conf
+  file
+  
+perm
+/etc/ambari-metrics-monitor/conf
+644
+  
+
+
+  ${monitor.dir}/conf/unix/metric_monitor.ini
+  file
+  
+   

ambari git commit: AMBARI-8808. Resolve potential port conflicts with Hadoop daemons. alerts.json change. (swagle)

2014-12-22 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 57490ffa6 -> ff4c6c114


AMBARI-8808. Resolve potential port conflicts with Hadoop daemons. alerts.json 
change. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ff4c6c11
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ff4c6c11
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ff4c6c11

Branch: refs/heads/trunk
Commit: ff4c6c114b626b7164c6160f677e459fdcbaf1e6
Parents: 57490ff
Author: Siddharth Wagle 
Authored: Mon Dec 22 12:57:02 2014 -0800
Committer: Siddharth Wagle 
Committed: Mon Dec 22 12:57:02 2014 -0800

--
 .../src/main/resources/common-services/AMS/0.1.0/alerts.json   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/ff4c6c11/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
index 714b940..cba8b76 100644
--- a/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
@@ -35,7 +35,7 @@
 "enabled": true,
 "source": {
   "type": "PORT",
-  "uri": "8188",
+  "uri": "{{metric_collector_port}}",
   "default_port": 8188,
   "reporting": {
 "ok": {



ambari git commit: AMBARI-8866. Jasper dependencies cause AMS webapp start failure. (swagle)

2014-12-22 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 4bab98c9e -> 57490ffa6


AMBARI-8866. Jasper dependencies cause AMS webapp start failure. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/57490ffa
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/57490ffa
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/57490ffa

Branch: refs/heads/trunk
Commit: 57490ffa6a93184fa7dc65bc2104cbd0e93b78d1
Parents: 4bab98c
Author: Siddharth Wagle 
Authored: Mon Dec 22 12:37:39 2014 -0800
Committer: Siddharth Wagle 
Committed: Mon Dec 22 12:45:12 2014 -0800

--
 ambari-metrics/ambari-metrics-timelineservice/pom.xml | 9 +
 1 file changed, 9 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/57490ffa/ambari-metrics/ambari-metrics-timelineservice/pom.xml
--
diff --git a/ambari-metrics/ambari-metrics-timelineservice/pom.xml 
b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
index 5141359..5b81b41 100644
--- a/ambari-metrics/ambari-metrics-timelineservice/pom.xml
+++ b/ambari-metrics/ambari-metrics-timelineservice/pom.xml
@@ -52,6 +52,7 @@
   ${project.build.directory}/lib
   compile
   test
+  
jasper-runtime,jasper-compiler
 
   
 
@@ -241,6 +242,14 @@
   org.apache.hadoop
   hadoop-annotations
 
+
+  org.apache.hbase
+  hbase-client
+
+
+  org.apache.hbase
+  hbase-server
+
   
 
 



ambari git commit: AMBARI-8845. Configure number of regions created for SYSTEM.SEQUENCE. (swagle)

2014-12-19 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 50819a309 -> ecce96ad6


AMBARI-8845. Configure number of regions created for SYSTEM.SEQUENCE. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ecce96ad
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ecce96ad
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ecce96ad

Branch: refs/heads/trunk
Commit: ecce96ad62a60f4ea79dabeb4749be02fae3ab65
Parents: 50819a3
Author: Siddharth Wagle 
Authored: Fri Dec 19 16:40:24 2014 -0800
Committer: Siddharth Wagle 
Committed: Fri Dec 19 16:40:30 2014 -0800

--
 .../metrics/timeline/TimelineMetricConfiguration.java| 6 +++---
 .../AMS/0.1.0/configuration/ams-hbase-site.xml   | 8 +++-
 .../package/templates/hadoop-metrics2-hbase.properties.j2| 1 +
 .../hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2   | 2 +-
 4 files changed, 12 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/ecce96ad/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
index e1275c8..248894c 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
@@ -182,10 +182,10 @@ public class TimelineMetricConfiguration {
   }
 
   public String getTimelineServiceRpcAddress() {
-String defaultHttpAddress = "0.0.0.0:60200";
+String defaultRpcAddress = "0.0.0.0:60200";
 if (metricsConf != null) {
-  return metricsConf.get(TIMELINE_SERVICE_RPC_ADDRESS, defaultHttpAddress);
+  return metricsConf.get(TIMELINE_SERVICE_RPC_ADDRESS, defaultRpcAddress);
 }
-return defaultHttpAddress;
+return defaultRpcAddress;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ecce96ad/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-site.xml
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-site.xml
 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-site.xml
index 9d35ba2..a8e2cb0 100644
--- 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-site.xml
+++ 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/configuration/ams-hbase-site.xml
@@ -264,5 +264,11 @@
 12
 ZooKeeper session timeout in milliseconds.
   
-
+  
+phoenix.sequence.saltBuckets
+2
+
+  Controls the number of pre-allocated regions for SYSTEM.SEQUENCE table.
+
+  
 
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ecce96ad/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
index e74dbe3..13b0948 100644
--- 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
+++ 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/package/templates/hadoop-metrics2-hbase.properties.j2
@@ -50,6 +50,7 @@ 
rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 rpc.period=10
 rpc.collector={{ams_collector_host_single}}:{{metric_collector_port}}
 
+*.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
 
hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 hbase.sink.timeline.period=10
 
hbase.sink.timeline.collector={{ams_collector_host_single}}:{{metric_collector_port}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ecce96ad/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-me

ambari git commit: AMBARI-8808. Resolve potential port conflicts with Hadoop daemons. Missing template property. (swagle)

2014-12-19 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk fa8c51e41 -> f8a9bd90c


AMBARI-8808. Resolve potential port conflicts with Hadoop daemons. Missing 
template property. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f8a9bd90
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f8a9bd90
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f8a9bd90

Branch: refs/heads/trunk
Commit: f8a9bd90cb2a384e7bb5f6c4464657c49db67da3
Parents: fa8c51e
Author: Siddharth Wagle 
Authored: Fri Dec 19 15:09:29 2014 -0800
Committer: Siddharth Wagle 
Committed: Fri Dec 19 15:09:29 2014 -0800

--
 .../hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2   | 8 
 1 file changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/f8a9bd90/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
--
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
index bd13f0c..34a7d63 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
@@ -52,19 +52,19 @@ hbase.extendedperiod = 3600
 
*.timline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
 hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 hbase.period=10
-hbase.collector={{metric_collector_host}}:8188
+hbase.collector={{metric_collector_host}}:{{metric_collector_port}}
 
 jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 jvm.period=10
-jvm.collector={{metric_collector_host}}:8188
+jvm.collector={{metric_collector_host}}:{{metric_collector_port}}
 
 rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 rpc.period=10
-rpc.collector={{metric_collector_host}}:8188
+rpc.collector={{metric_collector_host}}:{{metric_collector_port}}
 
 
hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
 hbase.sink.timeline.period=10
-hbase.sink.timeline.collector={{metric_collector_host}}:8188
+hbase.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
 
 {% else %}
 



ambari git commit: AMBARI-8808. Resolve potential port conflicts with Hadoop daemons. ATS port fix. (swagle)

2014-12-19 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 43abcf0d7 -> fa8c51e41


AMBARI-8808. Resolve potential port conflicts with Hadoop daemons. ATS port 
fix. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fa8c51e4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fa8c51e4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fa8c51e4

Branch: refs/heads/trunk
Commit: fa8c51e41990664db274643f8113c490dd5c17b4
Parents: 43abcf0
Author: Siddharth Wagle 
Authored: Fri Dec 19 14:44:07 2014 -0800
Committer: Siddharth Wagle 
Committed: Fri Dec 19 14:44:07 2014 -0800

--
 .../ApplicationHistoryClientService.java  | 14 +++---
 .../ApplicationHistoryServer.java |  5 ++---
 .../metrics/timeline/TimelineMetricConfiguration.java | 11 +++
 .../controller/internal/AbstractProviderModule.java   | 14 +-
 .../AMS/0.1.0/configuration/ams-site.xml  |  7 +++
 .../AMS/0.1.0/package/templates/metric_monitor.ini.j2 |  2 +-
 6 files changed, 37 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/fa8c51e4/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
index e15198b..8a37a57 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
@@ -27,6 +27,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.Server;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
 import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest;
@@ -57,6 +58,7 @@ import 
org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
 import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.ipc.YarnRPC;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration;
 
 public class ApplicationHistoryClientService extends AbstractService {
   private static final Log LOG = LogFactory
@@ -65,6 +67,7 @@ public class ApplicationHistoryClientService extends 
AbstractService {
   private ApplicationHistoryProtocol protocolHandler;
   private Server server;
   private InetSocketAddress bindAddress;
+  private TimelineMetricConfiguration metricConfiguration;
 
   public ApplicationHistoryClientService(ApplicationHistoryManager history) {
 super("ApplicationHistoryClientService");
@@ -72,13 +75,18 @@ public class ApplicationHistoryClientService extends 
AbstractService {
 this.protocolHandler = new ApplicationHSClientProtocolHandler();
   }
 
+  public ApplicationHistoryClientService(ApplicationHistoryManager history,
+   TimelineMetricConfiguration metricConfiguration) {
+this(history);
+this.metricConfiguration = metricConfiguration;
+  }
+
   protected void serviceStart() throws Exception {
 Configuration conf = getConfig();
 YarnRPC rpc = YarnRPC.create(conf);
 InetSocketAddress address =
-conf.getSocketAddr(YarnConfiguration.TIMELINE_SERVICE_ADDRESS,
-  YarnConfiguration.DEFAULT_TIMELINE_SERVICE_ADDRESS,
-  YarnConfiguration.DEFAULT_TIMELINE_SERVICE_PORT);
+  
NetUtils.createSocketAddr(metricConfiguration.getTimelineServiceRpcAddress(),
+YarnConfiguration.DEFAULT_TIMELINE_SERVICE_PORT);
 
 server =
 rpc.getServer(ApplicationHistoryProtocol.class, protocolHandler,

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa8c51e4/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationh

ambari git commit: AMBARI-8835. Unit test failure on Centos5.

2014-12-19 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 156b9b547 -> bee937981


AMBARI-8835. Unit test failure on Centos5.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bee93798
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bee93798
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bee93798

Branch: refs/heads/trunk
Commit: bee937981cbc837af98af3d787fedef511a8d738
Parents: 156b9b5
Author: Siddharth Wagle 
Authored: Fri Dec 19 11:03:43 2014 -0800
Committer: Siddharth Wagle 
Committed: Fri Dec 19 11:03:43 2014 -0800

--
 ambari-metrics/ambari-metrics-host-monitoring/pom.xml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/bee93798/ambari-metrics/ambari-metrics-host-monitoring/pom.xml
--
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/pom.xml 
b/ambari-metrics/ambari-metrics-host-monitoring/pom.xml
index 6080c8d..c213057 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/pom.xml
+++ b/ambari-metrics/ambari-metrics-host-monitoring/pom.xml
@@ -112,7 +112,7 @@
 
 
   
-
+
   
   
   
@@ -130,7 +130,7 @@
 
   
 
-  python
+  
${project.basedir}/../../ambari-common/src/main/unix/ambari-python-wrap
   src/test/python
   
 unitTests.py



ambari git commit: AMBARI-8808. Resolve potential port conflicts with Hadoop daemons. (swagle)

2014-12-19 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 3d37d5323 -> 156b9b547


AMBARI-8808. Resolve potential port conflicts with Hadoop daemons. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/156b9b54
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/156b9b54
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/156b9b54

Branch: refs/heads/trunk
Commit: 156b9b547db36813a3f30b6dc8d032f3f64be5c3
Parents: 3d37d53
Author: Siddharth Wagle 
Authored: Fri Dec 19 10:40:11 2014 -0800
Committer: Siddharth Wagle 
Committed: Fri Dec 19 10:40:11 2014 -0800

--
 .../flume/FlumeTimelineMetricsSinkTest.java |  6 +-
 .../ApplicationHistoryServer.java   |  8 ++-
 .../timeline/HBaseTimelineMetricStore.java  | 32 ++
 .../timeline/TimelineMetricConfiguration.java   | 67 +++-
 .../internal/AbstractProviderModule.java| 12 +++-
 .../AMS/0.1.0/configuration/ams-hbase-log4j.xml |  2 +-
 .../AMS/0.1.0/configuration/ams-site.xml|  7 ++
 .../AMS/0.1.0/package/scripts/params.py |  5 +-
 .../hadoop-metrics2-hbase.properties.j2 |  8 +--
 .../HBASE/0.96.0.2.0/package/scripts/params.py  |  4 ++
 ...doop-metrics2-hbase.properties-GANGLIA-RS.j2 |  8 +--
 .../2.0.6/hooks/before-START/scripts/params.py  |  4 ++
 .../templates/hadoop-metrics2.properties.j2 | 21 +++---
 13 files changed, 128 insertions(+), 56 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/156b9b54/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
--
diff --git 
a/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
 
b/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
index 0275db6..ad59576 100644
--- 
a/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
+++ 
b/ambari-metrics/ambari-metrics-flume-sink/src/test/java/org/apache/hadoop/metrics2/sink/flume/FlumeTimelineMetricsSinkTest.java
@@ -27,10 +27,10 @@ import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.powermock.core.classloader.annotations.PrepareForTest;
 import org.powermock.modules.junit4.PowerMockRunner;
-
 import java.util.Collections;
-
-import static org.powermock.api.easymock.PowerMock.*;
+import static org.powermock.api.easymock.PowerMock.mockStatic;
+import static org.powermock.api.easymock.PowerMock.replay;
+import static org.powermock.api.easymock.PowerMock.verifyAll;
 
 @RunWith(PowerMockRunner.class)
 @PrepareForTest(JMXPollUtil.class)

http://git-wip-us.apache.org/repos/asf/ambari/blob/156b9b54/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
index 3adb3b8..131636d 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
@@ -63,6 +63,7 @@ public class ApplicationHistoryServer extends 
CompositeService {
   TimelineStore timelineStore;
   TimelineMetricStore timelineMetricStore;
   private WebApp webApp;
+  private TimelineMetricConfiguration metricConfiguration;
 
   public ApplicationHistoryServer() {
 super(ApplicationHistoryServer.class.getName());
@@ -70,6 +71,8 @@ public class ApplicationHistoryServer extends 
CompositeService {
 
   @Override
   protected void serviceInit(Configuration conf) throws Exception {
+metricConfiguration = new TimelineMetricConfiguration();
+metricConfiguration.initialize();
 historyManager = createApplicationHistory();
 ahsClientService = createApplicationHistoryClientService(historyManager);
 addService(ahsClientService);
@@ -162,11 +165,11 @@ public class ApplicationHistoryServer extends 
CompositeService {
 
   protected TimelineMetricStore createTimelineMetricStore(Configuration conf) {
 LOG.info("Creating metrics store.");
-return ReflectionUtils.newInstance(HBaseTimelineMetricStor

ambari git commit: AMBARI-8805. Exception on collector start. Rat check fix.

2014-12-18 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk e075a2ec4 -> 4b52b64d1


AMBARI-8805. Exception on collector start. Rat check fix.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4b52b64d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4b52b64d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4b52b64d

Branch: refs/heads/trunk
Commit: 4b52b64d1d7337973c4a9f17afb89fe8bf86c3ed
Parents: e075a2e
Author: Siddharth Wagle 
Authored: Thu Dec 18 15:17:44 2014 -0800
Committer: Siddharth Wagle 
Committed: Thu Dec 18 15:18:00 2014 -0800

--
 .../AbstractTimelineAggregatorTest.java | 29 ++--
 1 file changed, 20 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/4b52b64d/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
index 82d3017..969192d 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
@@ -1,22 +1,33 @@
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .timeline;
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
-import junit.framework.Assert;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.util.Clock;
 import org.junit.Before;
 import org.junit.Test;
-
 import java.io.IOException;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.concurrent.atomic.AtomicLong;
-
 import static junit.framework.Assert.assertEquals;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .timeline.TimelineMetricConfiguration.AGGREGATOR_CHECKPOINT_DELAY;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .timeline.TimelineMetricConfiguration.RESULTSET_FETCH_SIZE;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.AGGREGATOR_CHECKPOINT_DELAY;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.RESULTSET_FETCH_SIZE;
 
 public class AbstractTimelineAggregatorTest {
 



ambari git commit: AMBARI-8805. Exception on collector start.

2014-12-18 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 60c9f8c82 -> 105d073ab


AMBARI-8805. Exception on collector start.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/105d073a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/105d073a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/105d073a

Branch: refs/heads/trunk
Commit: 105d073abc9e08bfd4d7ac749e75d95118d362a9
Parents: 60c9f8c
Author: Siddharth Wagle 
Authored: Thu Dec 18 14:50:25 2014 -0800
Committer: Siddharth Wagle 
Committed: Thu Dec 18 14:54:43 2014 -0800

--
 .../timeline/AbstractTimelineAggregator.java|   8 +-
 .../AbstractTimelineAggregatorTest.java | 153 +++
 2 files changed, 96 insertions(+), 65 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/105d073a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregator.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregator.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregator.java
index f169003..9bffee2 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregator.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregator.java
@@ -110,7 +110,13 @@ public abstract class AbstractTimelineAggregator 
implements Runnable {
 
   if (success) {
 try {
-  saveCheckPoint(lastCheckPointTime + SLEEP_INTERVAL);
+  // Comment to bug fix:
+  // cannot just save lastCheckPointTime + SLEEP_INTERVAL,
+  // it has to be verified so it is not a time in the future
+  // checkpoint says what was aggregated, and there is no way
+  // the future metrics were aggregated!
+  saveCheckPoint(Math.min(currentTime, lastCheckPointTime +
+SLEEP_INTERVAL));
 } catch (IOException io) {
   LOG.warn("Error saving checkpoint, restarting aggregation at " +
 "previous checkpoint.");

http://git-wip-us.apache.org/repos/asf/ambari/blob/105d073a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
index 8aa8436..82d3017 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
@@ -1,21 +1,5 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
+  .timeline;
 
 import junit.framework.Assert;
 import org.apache.hadoop.conf.Configuration;
@@ -28,6 +12,7 @@ import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.concurrent.atomic.AtomicLong;
 
+import static junit.framework.Assert.a

ambari git commit: AMBARI-8741. Create assembly module to place ambari metrics rpms into single output dir. Symlink fix. (swagle)

2014-12-18 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 02b034bea -> 9f68828eb


AMBARI-8741. Create assembly module to place ambari metrics rpms into single 
output dir. Symlink fix. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9f68828e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9f68828e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9f68828e

Branch: refs/heads/trunk
Commit: 9f68828ebf1d52dcb44e8195d56ac5fb16d5a872
Parents: 02b034b
Author: Siddharth Wagle 
Authored: Thu Dec 18 13:37:46 2014 -0800
Committer: Siddharth Wagle 
Committed: Thu Dec 18 13:37:46 2014 -0800

--
 ambari-metrics/ambari-metrics-assembly/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/9f68828e/ambari-metrics/ambari-metrics-assembly/pom.xml
--
diff --git a/ambari-metrics/ambari-metrics-assembly/pom.xml 
b/ambari-metrics/ambari-metrics-assembly/pom.xml
index 79d5eba..0a0afbd 100644
--- a/ambari-metrics/ambari-metrics-assembly/pom.xml
+++ b/ambari-metrics/ambari-metrics-assembly/pom.xml
@@ -372,7 +372,7 @@
   root
 
   
-
${hadoop-sink.dir}/target/rpm/postinstall.sh
+
${hadoop-sink.dir}/target/classes/rpm/postinstall.sh
 utf-8
   
 



ambari git commit: AMBARI-8742. Implement Report graphs capability in AMS. Missing properties. (swagle)

2014-12-17 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 40aa66f54 -> e80f7aa4a


AMBARI-8742. Implement Report graphs capability in AMS. Missing properties. 
(swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e80f7aa4
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e80f7aa4
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e80f7aa4

Branch: refs/heads/trunk
Commit: e80f7aa4a8ffe5959d0ab1d91376b974eac048b4
Parents: 40aa66f
Author: Siddharth Wagle 
Authored: Wed Dec 17 19:43:30 2014 -0800
Committer: Siddharth Wagle 
Committed: Wed Dec 17 19:43:30 2014 -0800

--
 .../services/AMS/configuration/ams-hbase-site.xml   | 16 
 1 file changed, 16 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/e80f7aa4/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-site.xml
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-site.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-site.xml
index eb07685..9d35ba2 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-site.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-site.xml
@@ -249,4 +249,20 @@
 phoenix.query.spoolThresholdBytes
 12582912
   
+  
+hbase.snapshot.enabled
+false
+Enable/Disable HBase snapshots.
+  
+  
+hbase.replication
+false
+Enable/Disable HBase replication.
+  
+  
+zookeeper.session.timeout
+12
+ZooKeeper session timeout in milliseconds.
+  
+
 
\ No newline at end of file



ambari git commit: AMBARI-8741. Create assembly module to place ambari metrics rpms into single output dir. (mpapirkovskyy via swagle)

2014-12-17 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 4d4fc0cbb -> 40aa66f54


AMBARI-8741. Create assembly module to place ambari metrics rpms into single 
output dir. (mpapirkovskyy via swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/40aa66f5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/40aa66f5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/40aa66f5

Branch: refs/heads/trunk
Commit: 40aa66f548c5387ed08f867ca84bdb477c90b9ad
Parents: 4d4fc0c
Author: Siddharth Wagle 
Authored: Wed Dec 17 18:29:50 2014 -0800
Committer: Siddharth Wagle 
Committed: Wed Dec 17 18:29:50 2014 -0800

--
 ambari-metrics/ambari-metrics-assembly/pom.xml  | 507 +++
 .../src/main/assembly/collector.xml |  75 +++
 .../src/main/assembly/monitor.xml   |  64 +++
 .../src/main/assembly/sink.xml  |  61 +++
 .../ambari-metrics-flume-sink/pom.xml   |  53 +-
 .../src/main/assemblies/jar-with-common.xml |  34 ++
 .../src/main/assemblies/sink.xml|  34 --
 .../ambari-metrics-hadoop-sink/pom.xml  |  91 ++--
 .../src/main/assemblies/jar-with-common.xml |  37 ++
 .../src/main/assemblies/sink-jar.xml|  37 --
 .../src/main/assemblies/sink.xml|  34 --
 .../conf/unix/metric_groups.conf|  37 ++
 .../conf/unix/metric_monitor.ini|  30 ++
 .../ambari-metrics-host-monitoring/pom.xml  | 111 
 .../ambari-metrics-storm-sink/pom.xml   |  53 +-
 .../src/main/assemblies/jar-with-common.xml |  34 ++
 .../src/main/assemblies/sink.xml|  34 --
 .../ambari-metrics-timelineservice/pom.xml  |  22 +-
 .../src/main/assemblies/ats.xml |  34 --
 .../src/main/assemblies/phoenix-client.xml  |  62 ---
 .../phoenix-components-major-client.xml |  53 --
 .../assemblies/phoenix-components-minimal.xml   |  71 ---
 .../src/main/assemblies/phoenix-server.xml  |  46 --
 ambari-metrics/pom.xml  |  31 +-
 ...-metrics2-hbase.properties-GANGLIA-MASTER.j2 |   6 +-
 ...doop-metrics2-hbase.properties-GANGLIA-RS.j2 |   1 +
 26 files changed, 946 insertions(+), 706 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/40aa66f5/ambari-metrics/ambari-metrics-assembly/pom.xml
--
diff --git a/ambari-metrics/ambari-metrics-assembly/pom.xml 
b/ambari-metrics/ambari-metrics-assembly/pom.xml
new file mode 100644
index 000..79d5eba
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-assembly/pom.xml
@@ -0,0 +1,507 @@
+
+
+
+http://maven.apache.org/POM/4.0.0";
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  
+ambari-metrics
+org.apache.ambari
+0.1.0-SNAPSHOT
+  
+  4.0.0
+
+  ambari-metrics-assembly
+  pom
+
+  
+
${project.basedir}/../ambari-metrics-timelineservice
+
${project.basedir}/../ambari-metrics-host-monitoring
+
${project.basedir}/../ambari-metrics-hadoop-sink
+
${project.basedir}/../ambari-metrics-storm-sink
+
${project.basedir}/../ambari-metrics-flume-sink
+  
+
+  
+
+  
+maven-assembly-plugin
+
+  
+collector
+package
+
+  single
+
+
+  false
+  
ambari-metrics-collector-${project.version}
+  false
+  
+src/main/assembly/collector.xml
+  
+  gnu
+
+  
+  
+monitor
+package
+
+  single
+
+
+  false
+  ambari-metrics-monitor-${project.version}
+  false
+  
+src/main/assembly/monitor.xml
+  
+  gnu
+
+  
+  
+hadoop-sink
+package
+
+  single
+
+
+  false
+  
ambari-metrics-hadoop-sink-${project.version}
+  false
+  
+src/main/assembly/sink.xml
+  
+  gnu
+
+  
+
+  
+
+  
+
+
+  
+
+  rpm
+
+  
+
+  build-rpm
+
+  
+
+  
+
+  
+org.codehaus.mojo
+rpm-maven-plugin
+2.0.1
+
+  Development
+  x86_64
+  2012, Apache Software Foundation
+  ${package-version}
+  ${packag

ambari git commit: AMBARI-8742. Implement Report graphs capability in AMS. Unit test fix. (swagle)

2014-12-17 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 24493d3cf -> 6f948db83


AMBARI-8742. Implement Report graphs capability in AMS. Unit test fix. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6f948db8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6f948db8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6f948db8

Branch: refs/heads/trunk
Commit: 6f948db83274abd72d0099fe0a65d3b9441b896a
Parents: 24493d3
Author: Siddharth Wagle 
Authored: Wed Dec 17 15:30:49 2014 -0800
Committer: Siddharth Wagle 
Committed: Wed Dec 17 15:30:49 2014 -0800

--
 .../src/test/python/core/TestHostInfo.py | 8 +---
 1 file changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/6f948db8/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/core/TestHostInfo.py
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/core/TestHostInfo.py
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/core/TestHostInfo.py
index 71aee86..3338db5 100644
--- 
a/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/core/TestHostInfo.py
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/core/TestHostInfo.py
@@ -27,11 +27,13 @@ from mock.mock import patch
 logger = logging.getLogger()
 
 class TestHostInfo(TestCase):
-  
+
+  @patch("psutil.cpu_count")
   @patch("os.getloadavg")
   @patch("psutil.cpu_times_percent")
-  def testCpuTimes(self, cp_mock, avg_mock):
-
+  def testCpuTimes(self, cp_mock, avg_mock, count_mock):
+count_mock.return_value = 1
+
 cp = cp_mock.return_value
 cp.user = 0.1
 cp.system = 0.1



ambari git commit: AMBARI-8769. Aggregator checkpoint logic should take into account the checkPointDelay. Rat check fix.

2014-12-17 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk cd3fb17f4 -> 24493d3cf


AMBARI-8769. Aggregator checkpoint logic should take into account the 
checkPointDelay. Rat check fix.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/24493d3c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/24493d3c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/24493d3c

Branch: refs/heads/trunk
Commit: 24493d3cf122db8d5b1ed3fb96c09c4a4609827d
Parents: cd3fb17
Author: Siddharth Wagle 
Authored: Wed Dec 17 14:20:26 2014 -0800
Committer: Siddharth Wagle 
Committed: Wed Dec 17 14:20:26 2014 -0800

--
 .../AbstractTimelineAggregatorTest.java | 20 ++--
 1 file changed, 18 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/24493d3c/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
index c274c61..8aa8436 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregatorTest.java
@@ -1,5 +1,21 @@
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .timeline;
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
 import junit.framework.Assert;
 import org.apache.hadoop.conf.Configuration;



ambari git commit: AMBARI-8769. Aggregator checkpoint logic should take into account the checkPointDelay.

2014-12-17 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 0dcbcc1ba -> cd3fb17f4


AMBARI-8769. Aggregator checkpoint logic should take into account the 
checkPointDelay.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cd3fb17f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cd3fb17f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cd3fb17f

Branch: refs/heads/trunk
Commit: cd3fb17f4d770fb354c76f0eb1df738aa66848d5
Parents: 0dcbcc1
Author: Siddharth Wagle 
Authored: Wed Dec 17 12:53:50 2014 -0800
Committer: Siddharth Wagle 
Committed: Wed Dec 17 12:53:50 2014 -0800

--
 .../timeline/AbstractTimelineAggregator.java| 135 ++-
 .../AbstractTimelineAggregatorTest.java | 222 +++
 2 files changed, 303 insertions(+), 54 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/cd3fb17f/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregator.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregator.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregator.java
index a123e57..f169003 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregator.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractTimelineAggregator.java
@@ -21,6 +21,8 @@ import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.util.Clock;
+import org.apache.hadoop.yarn.util.SystemClock;
 
 import java.io.File;
 import java.io.IOException;
@@ -37,18 +39,26 @@ import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 public abstract class AbstractTimelineAggregator implements Runnable {
   protected final PhoenixHBaseAccessor hBaseAccessor;
   private final Log LOG;
+
+  private Clock clock;
   protected final long checkpointDelayMillis;
   protected final Integer resultsetFetchSize;
   protected Configuration metricsConf;
 
   public AbstractTimelineAggregator(PhoenixHBaseAccessor hBaseAccessor,
 Configuration metricsConf) {
+this(hBaseAccessor, metricsConf, new SystemClock());
+  }
+
+  public AbstractTimelineAggregator(PhoenixHBaseAccessor hBaseAccessor,
+Configuration metricsConf, Clock clk) {
 this.hBaseAccessor = hBaseAccessor;
 this.metricsConf = metricsConf;
 this.checkpointDelayMillis = SECONDS.toMillis(
   metricsConf.getInt(AGGREGATOR_CHECKPOINT_DELAY, 120));
 this.resultsetFetchSize = metricsConf.getInt(RESULTSET_FETCH_SIZE, 2000);
 this.LOG = LogFactory.getLog(this.getClass());
+this.clock = clk;
   }
 
   @Override
@@ -57,75 +67,93 @@ public abstract class AbstractTimelineAggregator implements 
Runnable {
 Long SLEEP_INTERVAL = getSleepIntervalMillis();
 
 while (true) {
-  long currentTime = System.currentTimeMillis();
-  long lastCheckPointTime = -1;
+  long sleepTime = runOnce(SLEEP_INTERVAL);
 
   try {
-lastCheckPointTime = readCheckPoint();
-if (isLastCheckPointTooOld(lastCheckPointTime)) {
-  LOG.warn("Last Checkpoint is too old, discarding last checkpoint. " +
-"lastCheckPointTime = " + lastCheckPointTime);
-  lastCheckPointTime = -1;
-}
-if (lastCheckPointTime == -1) {
-  // Assuming first run, save checkpoint and sleep.
-  // Set checkpoint to 2 minutes in the past to allow the
-  // agents/collectors to catch up
-  saveCheckPoint(currentTime - checkpointDelayMillis);
-}
-  } catch (IOException io) {
-LOG.warn("Unable to write last checkpoint time. Resuming sleep.", io);
+Thread.sleep(sleepTime);
+  } catch (InterruptedException e) {
+LOG.info("Sleep interrupted, continuing with aggregation.");
   }
-  long sleepTime = SLEEP_INTERVAL;
-
-  if (lastCheckPointTime != -1) {
-LOG.info("Last check point time: " + lastCheckPointTime + ", lagBy: "
-  + ((System.currentTimeMillis() - lastCheckPointTime) / 1000)
-  + " seconds.");
-
-long startTime = System.currentTimeMillis();
-boolean s

ambari git commit: AMBARI-8742. Implement Report graphs capability in AMS. (swagle)

2014-12-17 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk ef341466e -> 6355c79a4


AMBARI-8742. Implement Report graphs capability in AMS. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6355c79a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6355c79a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6355c79a

Branch: refs/heads/trunk
Commit: 6355c79a46b78dcd02734df32560392af756548c
Parents: ef34146
Author: Siddharth Wagle 
Authored: Wed Dec 17 12:07:44 2014 -0800
Committer: Siddharth Wagle 
Committed: Wed Dec 17 12:11:25 2014 -0800

--
 .../src/main/python/core/host_info.py   |  11 +-
 .../MetricsInitializationException.java |  42 
 .../MetricsSystemInitializationException.java   |  42 
 .../metrics/timeline/PhoenixHBaseAccessor.java  |  12 +-
 .../metrics/timeline/PhoenixTransactSQL.java|  26 +--
 .../src/test/conf/hbase-site.xml|  15 ++
 .../internal/AbstractPropertyProvider.java  |  64 ++
 .../controller/internal/PropertyInfo.java   |   9 +
 .../metrics/MetricsPropertyProvider.java|   4 +-
 .../metrics/timeline/AMSPropertyProvider.java   |  77 +--
 .../timeline/AMSReportPropertyProvider.java | 200 ++-
 .../controller/utilities/PropertyHelper.java|  15 +-
 .../src/main/resources/ganglia_properties.json  |  51 +++--
 .../timeline/AMSPropertyProviderTest.java   |  24 +--
 .../timeline/AMSReportPropertyProviderTest.java |  86 
 15 files changed, 513 insertions(+), 165 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/6355c79a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/host_info.py
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/host_info.py
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/host_info.py
index 43c3a41..a7add91 100644
--- 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/host_info.py
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/host_info.py
@@ -38,9 +38,10 @@ pass
 
 class HostInfo():
   def __init__(self):
-self.__last_network_io_time = 0;
+self.__last_network_io_time = 0
 self.__last_network_data = {}
 self.__last_network_lock = threading.Lock()
+self.__host_static_info = self.get_host_static_info()
 
   def get_cpu_times(self):
 """
@@ -48,10 +49,13 @@ class HostInfo():
 """
 cpu_times = psutil.cpu_times_percent()
 load_avg = os.getloadavg()
+cpu_count = self.__host_static_info.get('cpu_num', 1)
 
-number2percents = lambda x: x * 100
+# Divide by number of cpu's on the system
+number2percents = lambda x: ((x / int(cpu_count)) * 100)
 
 return {
+  'cpu_num': int(cpu_count),
   'cpu_user': number2percents(cpu_times.user) if hasattr(cpu_times, 
'user') else '',
   'cpu_system': number2percents(cpu_times.system) if hasattr(cpu_times, 
'system') else '',
   'cpu_idle': number2percents(cpu_times.idle) if hasattr(cpu_times, 
'idle') else '',
@@ -96,10 +100,13 @@ class HostInfo():
 mem_stats = psutil.virtual_memory()
 swap_stats = psutil.swap_memory()
 disk_usage = self.get_combined_disk_usage()
+mem_total = self.__host_static_info.get('mem_total')
 
 bytes2kilobytes = lambda x: x / 1024
 
 return {
+  'mem_total': bytes2kilobytes(mem_total) if mem_total else '',
+  'mem_used': bytes2kilobytes(mem_stats.used) if hasattr(mem_stats, 
'used') else '',
   'mem_free': bytes2kilobytes(mem_stats.free) if hasattr(mem_stats, 
'free') else '',
   'mem_shared': bytes2kilobytes(mem_stats.shared) if hasattr(mem_stats, 
'shared') else '',
   'mem_buffered': bytes2kilobytes(mem_stats.buffers) if hasattr(mem_stats, 
'buffers') else '',

http://git-wip-us.apache.org/repos/asf/ambari/blob/6355c79a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsInitializationException.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsInitializationException.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsInitia

ambari git commit: AMBARI-7679. Add psutil based resource monitoring to collect host metrics. Unitests.py python version. (swagle)

2014-12-11 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 4c0ac95e8 -> 8235e7d63


AMBARI-7679. Add psutil based resource monitoring to collect host metrics. 
Unitests.py python version. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8235e7d6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8235e7d6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8235e7d6

Branch: refs/heads/trunk
Commit: 8235e7d63fbb0fb4f30ba209e3ac7f95bf3bf76f
Parents: 4c0ac95
Author: Siddharth Wagle 
Authored: Thu Dec 11 16:37:08 2014 -0800
Committer: Siddharth Wagle 
Committed: Thu Dec 11 16:45:15 2014 -0800

--
 .../src/test/python/unitTests.py  | 7 ++-
 1 file changed, 6 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/8235e7d6/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/unitTests.py
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/unitTests.py 
b/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/unitTests.py
index 73798cb..3469de1 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/unitTests.py
+++ b/ambari-metrics/ambari-metrics-host-monitoring/src/test/python/unitTests.py
@@ -1,3 +1,5 @@
+#!/usr/bin/env python
+
 '''
 Licensed to the Apache Software Foundation (ASF) under one
 or more contributor license agreements.  See the NOTICE file
@@ -39,7 +41,10 @@ def get_parent_path(base, directory_name):
 base = os.path.dirname(base)
 if base == "/":
   return None
-done = True if os.path.split(base)[-1] == directory_name else False
+if os.path.split(base)[-1] == directory_name:
+  done = True
+else:
+  done = False
   return base
 
 def get_test_files(path, mask = None, recursive=True):



ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. Ignore bad unit test. (swagle)

2014-12-11 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 6e09a2ff3 -> f5558eb82


AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. 
Ignore bad unit test. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f5558eb8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f5558eb8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f5558eb8

Branch: refs/heads/trunk
Commit: f5558eb8271076757a994be71211e71a70e2f885
Parents: 6e09a2f
Author: Siddharth Wagle 
Authored: Thu Dec 11 12:17:08 2014 -0800
Committer: Siddharth Wagle 
Committed: Thu Dec 11 12:21:32 2014 -0800

--
 .../TestMemoryApplicationHistoryStore.java | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/f5558eb8/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
index 7a45405..fc5c096 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestMemoryApplicationHistoryStore.java
@@ -30,6 +30,7 @@ import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.Applicati
 import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
 import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.junit.Test;
 
 public class TestMemoryApplicationHistoryStore extends
@@ -184,6 +185,7 @@ public class TestMemoryApplicationHistoryStore extends
   }
 
   @Test
+  @Ignore
   public void testMassiveWriteContainerHistory() throws IOException {
 long mb = 1024 * 1024;
 Runtime runtime = Runtime.getRuntime();



ambari git commit: AMBARI-8521. Add STORM metric sink implementation to enable sink to AMS. (Szilard Nemethy via swagle)

2014-12-10 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 391a53b40 -> 8e4a00308


AMBARI-8521. Add STORM metric sink implementation to enable sink to AMS. 
(Szilard Nemethy via swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8e4a0030
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8e4a0030
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8e4a0030

Branch: refs/heads/trunk
Commit: 8e4a00308147ba496325bd4e3766ca4f9fba9d54
Parents: 391a53b
Author: Siddharth Wagle 
Authored: Wed Dec 10 13:51:44 2014 -0800
Committer: Siddharth Wagle 
Committed: Wed Dec 10 13:51:52 2014 -0800

--
 .../ambari-metrics-storm-sink/pom.xml   | 181 +++
 .../src/main/assemblies/empty.xml   |  21 +++
 .../src/main/assemblies/sink.xml|  34 
 .../src/main/conf/storm-metrics2.properties.j2  |  21 +++
 .../sink/storm/StormTimelineMetricsSink.java| 136 ++
 .../storm/StormTimelineMetricsSinkTest.java |  68 +++
 ambari-metrics/pom.xml  |   1 +
 .../services/STORM/package/scripts/params.py|   5 +
 .../2.1/services/STORM/package/scripts/storm.py |   6 +
 .../templates/storm-metrics2.properties.j2  |  21 +++
 .../STORM/package/templates/storm.yaml.j2   |   6 +
 11 files changed, 500 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/8e4a0030/ambari-metrics/ambari-metrics-storm-sink/pom.xml
--
diff --git a/ambari-metrics/ambari-metrics-storm-sink/pom.xml 
b/ambari-metrics/ambari-metrics-storm-sink/pom.xml
new file mode 100644
index 000..9b8960c
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-storm-sink/pom.xml
@@ -0,0 +1,181 @@
+
+
+http://maven.apache.org/POM/4.0.0";
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+ http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  
+ambari-metrics
+org.apache.ambari
+0.1.0-SNAPSHOT
+  
+  4.0.0
+  ambari-metrics-storm-sink
+  0.1.0-SNAPSHOT
+  jar
+  
+
+  
+maven-dependency-plugin
+
+  
+package
+
+  copy-dependencies
+
+
+  ${project.build.directory}/lib
+
+  
+
+  
+  
+maven-assembly-plugin
+
+  
+src/main/assemblies/sink.xml
+  
+  gnu
+
+
+  
+build-tarball
+package
+
+  single
+
+  
+
+  
+  
+maven-compiler-plugin
+3.0
+  
+  
+org.codehaus.mojo
+build-helper-maven-plugin
+1.8
+
+  
+parse-version
+validate
+
+  parse-version
+
+  
+  
+regex-property
+
+  regex-property
+
+
+  ambariVersion
+  ${project.version}
+  ^([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-).*
+  $1.$2.$3
+  false
+
+  
+
+  
+  
+com.github.goldin
+copy-maven-plugin
+0.2.5
+
+  
+create-archive
+none
+  
+
+  
+  
+org.codehaus.mojo
+rpm-maven-plugin
+2.0.1
+
+  
+none
+
+  rpm
+
+  
+
+
+  ambari-metrics-storm-sink
+  2012, Apache Software Foundation
+  Development
+  Maven Recipe: RPM Package.
+  
+
+  /usr/lib/storm/lib
+  644
+  root
+  root
+  
+
+  
target/${project.artifactId}-${project.version}.jar
+
+
+  target/lib
+
+  
+
+  
+
+  
+
+  
+  
+
+  org.apache.storm
+  storm-core
+  0.9.3
+  provided
+
+
+  org.apache.ambari
+  ambari-metrics-common
+  0.1.0-SNAPSHOT
+
+
+  junit
+  junit
+  test
+  4.10
+
+
+  org.easymock
+  easymock
+  3.2
+  test
+
+
+  org.powermock
+  powermock-api-easymock
+  1.4.9
+  test
+
+
+  org.powermock
+  powermock-module-junit4
+  1.4.9
+  test
+
+  
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/8e4a0030/ambari-metrics/ambari-metrics

ambari git commit: AMBARI-8420. Add Amabri Metric Provider to get metrics from AMS. Check liveliness. (swagle)

2014-12-10 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 7bf992ddb -> d2bc0ac81


AMBARI-8420. Add Amabri Metric Provider to get metrics from AMS. Check 
liveliness. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d2bc0ac8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d2bc0ac8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d2bc0ac8

Branch: refs/heads/trunk
Commit: d2bc0ac8193e13988879bde4078a7406bacd0935
Parents: 7bf992d
Author: Siddharth Wagle 
Authored: Wed Dec 10 11:34:53 2014 -0800
Committer: Siddharth Wagle 
Committed: Wed Dec 10 11:39:43 2014 -0800

--
 .../metrics/timeline/AMSPropertyProvider.java   | 16 
 1 file changed, 16 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/d2bc0ac8/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
index fef5e9d..50eb08e 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProvider.java
@@ -159,6 +159,22 @@ public abstract class AMSPropertyProvider extends 
MetricsPropertyProvider {
 Set resourceSet = resourceEntry.getValue();
 
 for (Resource resource : resourceSet) {
+  String clusterName = (String) 
resource.getPropertyValue(clusterNamePropertyId);
+
+  // Check liveliness of host
+  if (!hostProvider.isCollectorHostLive(clusterName, 
TIMELINE_METRICS)) {
+LOG.info("METRIC_COLLECTOR host is not live. Skip populating " +
+  "resources with metrics.");
+return Collections.emptySet();
+  }
+
+  // Check liveliness of Collector
+  if (!hostProvider.isCollectorComponentLive(clusterName, 
TIMELINE_METRICS)) {
+LOG.info("METRIC_COLLECTOR is not live. Skip populating resources" 
+
+  " with metrics.");
+return Collections.emptySet();
+  }
+
   String metricsParam = getSetString(metrics.keySet(), -1);
   // Reuse uriBuilder
   uriBuilder.removeQuery();



ambari git commit: AMBARI-8594. Push AMS Hbase metrics with its own appid. Unit test fix. (swagle)

2014-12-09 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 1f950c054 -> cff01d6ff


AMBARI-8594. Push AMS Hbase metrics with its own appid. Unit test fix. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cff01d6f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cff01d6f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cff01d6f

Branch: refs/heads/trunk
Commit: cff01d6ffda174c6a5be34de5d41a3f638c9a3dd
Parents: 1f950c0
Author: Siddharth Wagle 
Authored: Tue Dec 9 09:32:16 2014 -0800
Committer: Siddharth Wagle 
Committed: Tue Dec 9 09:34:38 2014 -0800

--
 .../hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java   | 2 +-
 .../metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java  | 1 +
 2 files changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/cff01d6f/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
--
diff --git 
a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
 
b/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
index 0755f51..fb061a9 100644
--- 
a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
+++ 
b/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
@@ -110,7 +110,7 @@ public class HadoopTimelineMetricsSink extends 
AbstractTimelineMetricsSink imple
   private String getServiceName(SubsetConfiguration conf) {
 String serviceNamePrefix = conf.getString(SERVICE_NAME_PREFIX, "");
 return serviceNamePrefix.isEmpty() ? getFirstConfigPrefix(conf) :
-  serviceNamePrefix + "-" + getFirstConfigPrefix(conf);
+   serviceNamePrefix + "-" + getFirstConfigPrefix(conf);
   }
 
   private String getFirstConfigPrefix(SubsetConfiguration conf) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/cff01d6f/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
--
diff --git 
a/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
 
b/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
index 2421a73..3c83868 100644
--- 
a/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
+++ 
b/ambari-metrics/ambari-metrics-hadoop-sink/src/test/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSinkTest.java
@@ -50,6 +50,7 @@ public class HadoopTimelineMetricsSinkTest {
 expect(conf.getParent()).andReturn(null).anyTimes();
 expect(conf.getPrefix()).andReturn("service").anyTimes();
 
expect(conf.getString(eq(COLLECTOR_HOST_PROPERTY))).andReturn("localhost:63188").anyTimes();
+expect(conf.getString(eq("serviceName-prefix"), 
eq(""))).andReturn("").anyTimes();
 
 expect(conf.getInt(eq(MAX_METRIC_ROW_CACHE_SIZE), 
anyInt())).andReturn(10).anyTimes();
 expect(conf.getInt(eq(METRICS_SEND_INTERVAL), 
anyInt())).andReturn(1000).anyTimes();



ambari git commit: AMBARI-8599. Support altering TTL on Phoenix tables.

2014-12-08 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk cda2ea25f -> e0c8df5a9


AMBARI-8599. Support altering TTL on Phoenix tables.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e0c8df5a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e0c8df5a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e0c8df5a

Branch: refs/heads/trunk
Commit: e0c8df5a93726933021cdfb62b94bd1894c17d20
Parents: cda2ea2
Author: Siddharth Wagle 
Authored: Mon Dec 8 20:17:58 2014 -0800
Committer: Siddharth Wagle 
Committed: Mon Dec 8 20:17:58 2014 -0800

--
 .../metrics/timeline/PhoenixHBaseAccessor.java  | 37 +---
 .../metrics/timeline/PhoenixTransactSQL.java|  5 +++
 2 files changed, 38 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/e0c8df5a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
index 4f248b7..cb28a8b 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.util.RetryCounterFactory;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
 import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
+import org.apache.phoenix.exception.SQLExceptionCode;
 import org.codehaus.jackson.map.ObjectMapper;
 import org.codehaus.jackson.type.TypeReference;
 import java.io.IOException;
@@ -39,6 +40,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 import static java.util.concurrent.TimeUnit.SECONDS;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.ALTER_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_AGGREGATE_HOURLY_TABLE_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_AGGREGATE_MINUTE_TABLE_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_SQL;
@@ -47,6 +49,10 @@ import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DEFAULT_ENCODING;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DEFAULT_TABLE_COMPRESSION;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_HOURLY_TABLE_NAME;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_MINUTE_TABLE_NAME;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_NAME;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_CLUSTER_AGGREGATE_TABLE_NAME;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_RECORD_TABLE_NAME;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.UPSERT_AGGREGATE_RECORD_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.UPSERT_CLUSTER_AGGREGATE_SQL;
@@ -239,13 +245,36 @@ public class PhoenixHBaseAccessor {
 encoding, clusterMinTtl, compression));
   
stmt.executeUpdate(String.format(CREATE_METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_SQL,
 encoding, clusterHourTtl, compression));
+
+  //alter TTL options to update tables
+  stmt.executeUpdate(String.format(ALTER_SQL,
+METRICS_RECORD_TABLE_NAME,
+prec

ambari git commit: AMBARI-8594. Push AMS Hbase metrics with its own appid. (swagle)

2014-12-08 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 23e0228c4 -> cda2ea25f


AMBARI-8594. Push AMS Hbase metrics with its own appid. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cda2ea25
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cda2ea25
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cda2ea25

Branch: refs/heads/trunk
Commit: cda2ea25f55d05b8d995af63ba05c055e9d313df
Parents: 23e0228
Author: Siddharth Wagle 
Authored: Mon Dec 8 20:14:34 2014 -0800
Committer: Siddharth Wagle 
Committed: Mon Dec 8 20:14:40 2014 -0800

--
 .../sink/timeline/HadoopTimelineMetricsSink.java|  9 -
 .../metrics/timeline/AMSComponentPropertyProvider.java  |  4 ++--
 .../timeline/AMSHostComponentPropertyProvider.java  |  4 ++--
 .../metrics/timeline/AMSPropertyProvider.java   | 12 +++-
 .../templates/hadoop-metrics2-hbase.properties.j2   |  1 +
 5 files changed, 20 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/cda2ea25/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
--
diff --git 
a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
 
b/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
index 8fcf464..0755f51 100644
--- 
a/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
+++ 
b/ambari-metrics/ambari-metrics-hadoop-sink/src/main/java/org/apache/hadoop/metrics2/sink/timeline/HadoopTimelineMetricsSink.java
@@ -42,6 +42,7 @@ public class HadoopTimelineMetricsSink extends 
AbstractTimelineMetricsSink imple
   private String serviceName = "";
   private List metricsServers;
   private String collectorUri;
+  private static final String SERVICE_NAME_PREFIX = "serviceName-prefix";
 
   @Override
   public void init(SubsetConfiguration conf) {
@@ -61,7 +62,7 @@ public class HadoopTimelineMetricsSink extends 
AbstractTimelineMetricsSink imple
   }
 }
 
-serviceName = getFirstConfigPrefix(conf);
+serviceName = getServiceName(conf);
 
 // Load collector configs
 metricsServers = Servers.parse(conf.getString(COLLECTOR_HOST_PROPERTY), 
8188);
@@ -106,6 +107,12 @@ public class HadoopTimelineMetricsSink extends 
AbstractTimelineMetricsSink imple
 }
   }
 
+  private String getServiceName(SubsetConfiguration conf) {
+String serviceNamePrefix = conf.getString(SERVICE_NAME_PREFIX, "");
+return serviceNamePrefix.isEmpty() ? getFirstConfigPrefix(conf) :
+  serviceNamePrefix + "-" + getFirstConfigPrefix(conf);
+  }
+
   private String getFirstConfigPrefix(SubsetConfiguration conf) {
 while (conf.getParent() instanceof SubsetConfiguration) {
   conf = (SubsetConfiguration) conf.getParent();

http://git-wip-us.apache.org/repos/asf/ambari/blob/cda2ea25/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSComponentPropertyProvider.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSComponentPropertyProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSComponentPropertyProvider.java
index 944ec5c..815224f 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSComponentPropertyProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSComponentPropertyProvider.java
@@ -46,8 +46,8 @@ public class AMSComponentPropertyProvider extends 
AMSPropertyProvider {
   protected String getComponentName(Resource resource) {
 String componentName = (String) 
resource.getPropertyValue(componentNamePropertyId);
 
-if (TIMLINE_APPID_MAP.containsKey(componentName)) {
-  componentName = TIMLINE_APPID_MAP.get(componentName);
+if (TIMELINE_APPID_MAP.containsKey(componentName)) {
+  componentName = TIMELINE_APPID_MAP.get(componentName);
 }
 
 return componentName;

http://git-wip-us.apache.org/repos/asf/ambari/blob/cda2ea25/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSHostComponentPropertyProvider.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/timeline/AMSHostComponentPropertyProvider.java
 
b/ambari-server/src/main/java/

ambari git commit: AMBARI-8566. Remaining changes to Hadoop Sink implementation. HBASE template fix. (swagle)

2014-12-05 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 9e052eed1 -> 05f2f7755


AMBARI-8566. Remaining changes to Hadoop Sink implementation. HBASE template 
fix. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/05f2f775
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/05f2f775
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/05f2f775

Branch: refs/heads/trunk
Commit: 05f2f77555a44e00c288a7ef3ddec570c55e6745
Parents: 9e052ee
Author: Siddharth Wagle 
Authored: Fri Dec 5 16:08:44 2014 -0800
Committer: Siddharth Wagle 
Committed: Fri Dec 5 16:08:44 2014 -0800

--
 ...-metrics2-hbase.properties-GANGLIA-MASTER.j2 | 46 ++--
 ...doop-metrics2-hbase.properties-GANGLIA-RS.j2 | 44 +--
 2 files changed, 45 insertions(+), 45 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/05f2f775/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
index 2f13b28..3a0aaf6 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
@@ -47,6 +47,30 @@
 # If this variable is left out, then the default is no expiration.
 hbase.extendedperiod = 3600
 
+{% if has_metric_collector %}
+
+# HBase-specific configuration to reset long-running stats (e.g. compactions)
+# If this variable is left out, then the default is no expiration.
+hbase.extendedperiod = 3600
+
+hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+hbase.period=10
+hbase.collector={{metric_collector_host}}:8188
+
+jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+jvm.period=10
+jvm.collector={{metric_collector_host}}:8188
+
+rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+rpc.period=10
+rpc.collector={{metric_collector_host}}:8188
+
+hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+hbase.sink.timeline.period=10
+hbase.sink.timeline.collector={{metric_collector_host}}:8188
+
+{% else %}
+
 # Configuration of the "hbase" context for ganglia
 # Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
 # hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext
@@ -80,26 +104,4 @@ hbase.sink.ganglia.period=10
 
 hbase.sink.ganglia.servers={{ganglia_server_host}}:8663
 
-{% if has_metric_collector %}
-
-# HBase-specific configuration to reset long-running stats (e.g. compactions)
-# If this variable is left out, then the default is no expiration.
-hbase.extendedperiod = 3600
-
-hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-hbase.period=10
-hbase.collector={{metric_collector_host}}:8188
-
-jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-jvm.period=10
-jvm.collector={{metric_collector_host}}:8188
-
-rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-rpc.period=10
-rpc.collector={{metric_collector_host}}:8188
-
-hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-hbase.sink.timeline.period=10
-hbase.sink.timeline.collector={{metric_collector_host}}:8188
-
 {% endif %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/05f2f775/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
index 1ca3a98..d13540f 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2
@@ -46,6 +46,26 @@
 # If this variable is left out, then the default is no expiration.
 hbase.extendedperiod = 3600
 
+{% if has_metric_collector %}
+
+hbase.class=org.apache.ha

ambari git commit: AMBARI-8553. Cluster cannot be installed if Ganglia is not selected. (swagle)

2014-12-04 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk ae24c7331 -> 76ca626b6


AMBARI-8553. Cluster cannot be installed if Ganglia is not selected. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/76ca626b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/76ca626b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/76ca626b

Branch: refs/heads/trunk
Commit: 76ca626b6001c867a2970fa516e3a9ce529f3a02
Parents: ae24c73
Author: Siddharth Wagle 
Authored: Thu Dec 4 19:09:12 2014 -0800
Committer: Siddharth Wagle 
Committed: Thu Dec 4 19:09:12 2014 -0800

--
 .../metrics/MetricsPropertyProviderProxy.java  | 17 -
 .../MetricsReportPropertyProviderProxy.java| 16 +++-
 2 files changed, 23 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/76ca626b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsPropertyProviderProxy.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsPropertyProviderProxy.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsPropertyProviderProxy.java
index e1f607e..d2cd959 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsPropertyProviderProxy.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsPropertyProviderProxy.java
@@ -37,6 +37,8 @@ import 
org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.utilities.StreamProvider;
 import java.util.Map;
 import java.util.Set;
+
+import static 
org.apache.ambari.server.controller.metrics.MetricsServiceProvider.MetricsService;
 import static 
org.apache.ambari.server.controller.metrics.MetricsServiceProvider.MetricsService.GANGLIA;
 import static 
org.apache.ambari.server.controller.metrics.MetricsServiceProvider.MetricsService.TIMELINE_METRICS;
 import static org.apache.ambari.server.controller.spi.Resource.InternalType;
@@ -159,11 +161,16 @@ public class MetricsPropertyProviderProxy extends 
AbstractPropertyProvider {
   public Set populateResources(Set resources, Request 
request,
  Predicate predicate) throws 
SystemException {
 
-if (metricsServiceProvider.getMetricsServiceType().equals(GANGLIA)) {
-  return gangliaPropertyProvider.populateResources(resources, request, 
predicate);
-} else if 
(metricsServiceProvider.getMetricsServiceType().equals(TIMELINE_METRICS)) {
-  return amsPropertyProvider.populateResources(resources, request, 
predicate);
+MetricsService metricsService = 
metricsServiceProvider.getMetricsServiceType();
+
+if (metricsService != null) {
+  if (metricsService.equals(GANGLIA)) {
+return gangliaPropertyProvider.populateResources(resources, request, 
predicate);
+  } else if (metricsService.equals(TIMELINE_METRICS)) {
+return amsPropertyProvider.populateResources(resources, request, 
predicate);
+  }
 }
-return null;
+
+return resources;
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/76ca626b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsReportPropertyProviderProxy.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsReportPropertyProviderProxy.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsReportPropertyProviderProxy.java
index 154046f..1bcf01a 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsReportPropertyProviderProxy.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/MetricsReportPropertyProviderProxy.java
@@ -31,6 +31,7 @@ import 
org.apache.ambari.server.controller.utilities.StreamProvider;
 import java.util.Map;
 import java.util.Set;
 
+import static 
org.apache.ambari.server.controller.metrics.MetricsServiceProvider.MetricsService;
 import static 
org.apache.ambari.server.controller.metrics.MetricsServiceProvider.MetricsService.GANGLIA;
 import static 
org.apache.ambari.server.controller.metrics.MetricsServiceProvider.MetricsService.TIMELINE_METRICS;
 
@@ -83,11 +84,16 @@ public class MetricsReportPropertyProviderProxy extends 
AbstractPropertyProvider
   public Set populateResources(Set resources, Request 
request,
  Predicate predicate) throws 
SystemException {
 
-if (metricsServiceProvider.getMetricsServiceType().equals(GANGLIA)) {
-  return gangliaMetricsReportProvider.populateResour

[1/2] ambari git commit: AMBARI-8535. AMS Provider is not instantiated after install. (swagle)

2014-12-03 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 2518a56a3 -> 2160d544a


http://git-wip-us.apache.org/repos/asf/ambari/blob/2160d544/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaReportPropertyProviderTest.java
--
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaReportPropertyProviderTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaReportPropertyProviderTest.java
index 8301224..c7ba4a9 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaReportPropertyProviderTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaReportPropertyProviderTest.java
@@ -31,14 +31,12 @@ import org.junit.Assert;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
-
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
-
-import static 
org.apache.ambari.server.controller.metrics.MetricsPropertyProvider.MetricsService;
+import static 
org.apache.ambari.server.controller.metrics.MetricsServiceProvider.MetricsService;
 
 /**
  * Test the Ganglia report property provider.

http://git-wip-us.apache.org/repos/asf/ambari/blob/2160d544/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProviderTest.java
--
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProviderTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProviderTest.java
index d475e0f..82f9aff 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProviderTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/timeline/AMSPropertyProviderTest.java
@@ -17,27 +17,42 @@
  */
 package org.apache.ambari.server.controller.metrics.timeline;
 
+import com.google.inject.Injector;
 import org.apache.ambari.server.configuration.ComponentSSLConfiguration;
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.MaintenanceStateHelper;
+import org.apache.ambari.server.controller.internal.AbstractProviderModule;
+import org.apache.ambari.server.controller.internal.ClusterResourceProvider;
+import 
org.apache.ambari.server.controller.internal.ConfigurationResourceProvider;
+import 
org.apache.ambari.server.controller.internal.HostComponentResourceProvider;
 import org.apache.ambari.server.controller.internal.PropertyInfo;
 import org.apache.ambari.server.controller.internal.ResourceImpl;
+import org.apache.ambari.server.controller.internal.ServiceResourceProvider;
 import org.apache.ambari.server.controller.internal.TemporalInfoImpl;
 import org.apache.ambari.server.controller.metrics.MetricHostProvider;
-import org.apache.ambari.server.controller.metrics.MetricsPropertyProvider;
 import org.apache.ambari.server.controller.metrics.ganglia.TestStreamProvider;
 import org.apache.ambari.server.controller.spi.Request;
 import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.spi.ResourceProvider;
 import org.apache.ambari.server.controller.spi.SystemException;
 import org.apache.ambari.server.controller.spi.TemporalInfo;
 import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.state.Clusters;
 import org.apache.http.client.utils.URIBuilder;
 import org.junit.Assert;
 import org.junit.Test;
+
 import java.io.File;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
+
+import static 
org.apache.ambari.server.controller.metrics.MetricsServiceProvider.MetricsService;
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
 import static org.mockito.Mockito.mock;
 
 public class AMSPropertyProviderTest {
@@ -179,7 +194,8 @@ public class AMSPropertyProviderTest {
   public class TestMetricHostProvider implements MetricHostProvider {
 
 @Override
-public String getCollectorHostName(String clusterName, 
MetricsPropertyProvider.MetricsService service) throws SystemException {
+public String getCollectorHostName(String clusterName, MetricsService 
service)
+  throws SystemException {
   return "localhost";
 }
 
@@ -189,17 +205,17 @@ public class AMSPropertyProviderTest {
 }
 
 @Override
-public String getCollectorPortName(String clusterName, 
MetricsPropertyProvider.MetricsService service) throws SystemException {
+publi

[2/2] ambari git commit: AMBARI-8535. AMS Provider is not instantiated after install. (swagle)

2014-12-03 Thread swagle
AMBARI-8535. AMS Provider is not instantiated after install. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2160d544
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2160d544
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2160d544

Branch: refs/heads/trunk
Commit: 2160d544a648224f5320337bff5184fa0498b655
Parents: 2518a56
Author: Siddharth Wagle 
Authored: Wed Dec 3 12:29:37 2014 -0800
Committer: Siddharth Wagle 
Committed: Wed Dec 3 12:29:37 2014 -0800

--
 .../internal/AbstractProviderModule.java| 143 
 .../internal/ClusterResourceProvider.java   |   4 +-
 .../internal/StackDefinedPropertyProvider.java  |  22 +--
 .../controller/metrics/MetricHostProvider.java  |   3 +-
 .../metrics/MetricsPropertyProvider.java| 118 +
 .../metrics/MetricsPropertyProviderProxy.java   | 169 +++
 .../metrics/MetricsReportPropertyProvider.java  |  31 +---
 .../MetricsReportPropertyProviderProxy.java |  93 ++
 .../metrics/MetricsServiceProvider.java |  35 
 .../ganglia/GangliaHostPropertyProvider.java|   1 -
 .../ganglia/GangliaPropertyProvider.java|  32 ++--
 .../ganglia/GangliaReportPropertyProvider.java  |   4 +-
 .../metrics/timeline/AMSPropertyProvider.java   |  21 +--
 .../internal/MetricsServiceProviderTest.java| 135 +++
 .../StackDefinedPropertyProviderTest.java   |  39 -
 .../metrics/JMXPropertyProviderTest.java|  17 +-
 .../RestMetricsPropertyProviderTest.java|  17 +-
 .../ganglia/GangliaPropertyProviderTest.java|  15 +-
 .../GangliaReportPropertyProviderTest.java  |   4 +-
 .../timeline/AMSPropertyProviderTest.java   |  26 ++-
 20 files changed, 718 insertions(+), 211 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/2160d544/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
index 7b734cf..1616493 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
@@ -33,6 +33,7 @@ import 
org.apache.ambari.server.controller.jmx.JMXPropertyProvider;
 import org.apache.ambari.server.controller.metrics.MetricHostProvider;
 import org.apache.ambari.server.controller.metrics.MetricsPropertyProvider;
 import 
org.apache.ambari.server.controller.metrics.MetricsReportPropertyProvider;
+import org.apache.ambari.server.controller.metrics.MetricsServiceProvider;
 import org.apache.ambari.server.controller.spi.NoSuchParentResourceException;
 import org.apache.ambari.server.controller.spi.NoSuchResourceException;
 import org.apache.ambari.server.controller.spi.Predicate;
@@ -56,6 +57,7 @@ import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.State;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import java.net.InetAddress;
 import java.util.Collections;
 import java.util.EnumMap;
@@ -67,15 +69,16 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
-import static 
org.apache.ambari.server.controller.metrics.MetricsPropertyProvider.MetricsService;
-import static 
org.apache.ambari.server.controller.metrics.MetricsPropertyProvider.MetricsService.GANGLIA;
-import static 
org.apache.ambari.server.controller.metrics.MetricsPropertyProvider.MetricsService.TIMELINE_METRICS;
 
+import static 
org.apache.ambari.server.controller.metrics.MetricsServiceProvider.MetricsService.GANGLIA;
+import static 
org.apache.ambari.server.controller.metrics.MetricsServiceProvider.MetricsService.TIMELINE_METRICS;
 
 /**
  * An abstract provider module implementation.
  */
-public abstract class AbstractProviderModule implements ProviderModule, 
ResourceProviderObserver, JMXHostProvider, MetricHostProvider, HostInfoProvider 
{
+public abstract class AbstractProviderModule implements ProviderModule,
+ResourceProviderObserver, JMXHostProvider, MetricHostProvider,
+MetricsServiceProvider, HostInfoProvider {
 
   private static final int PROPERTY_REQUEST_CONNECT_TIMEOUT = 5000;
   private static final int PROPERTY_REQUEST_READ_TIMEOUT= 1;
@@ -226,8 +229,93 @@ public abstract class AbstractProviderModule implements 
ProviderModule, Resource
 }
   }
 
+  // - MetricsServiceProvider

[28/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/docs/index.rst
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/docs/index.rst
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/docs/index.rst
new file mode 100644
index 000..12327a9
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/docs/index.rst
@@ -0,0 +1,1247 @@
+.. module:: psutil
+   :synopsis: psutil module
+.. moduleauthor:: Giampaolo Rodola' 
+
+.. warning::
+
+   This documentation refers to new 2.X version of psutil.
+   Instructions on how to port existing 1.2.1 code are
+   `here `__.
+   Old 1.2.1 documentation is still available
+   `here `__.
+
+psutil documentation
+
+
+Quick links
+---
+
+* `Home page `__
+* `Blog `__
+* `Download 
`__
+* `Forum `__
+* `What's new `__
+
+About
+-
+
+From project's home page:
+
+  psutil (python system and process utilities) is a cross-platform library for
+  retrieving information on running
+  **processes** and **system utilization** (CPU, memory, disks, network) in
+  **Python**.
+  It is useful mainly for **system monitoring**, **profiling** and **limiting
+  process resources** and **management of running processes**.
+  It implements many functionalities offered by command line tools
+  such as: *ps, top, lsof, netstat, ifconfig, who, df, kill, free, nice,
+  ionice, iostat, iotop, uptime, pidof, tty, taskset, pmap*.
+  It currently supports **Linux, Windows, OSX, FreeBSD** and **Sun Solaris**,
+  both **32-bit** and **64-bit** architectures, with Python versions from
+  **2.4** to **3.4**.
+  `Pypy `__ is also known to work.
+
+The psutil documentation you're reading is distributed as a single HTML page.
+
+System related functions
+
+
+CPU
+---
+
+.. function:: cpu_times(percpu=False)
+
+  Return system CPU times as a namedtuple.
+  Every attribute represents the seconds the CPU has spent in the given mode.
+  The attributes availability varies depending on the platform:
+
+  - **user**
+  - **system**
+  - **idle**
+  - **nice** *(UNIX)*
+  - **iowait** *(Linux)*
+  - **irq** *(Linux, FreeBSD)*
+  - **softirq** *(Linux)*
+  - **steal** *(Linux 2.6.11+)*
+  - **guest** *(Linux 2.6.24+)*
+  - **guest_nice** *(Linux 3.2.0+)*
+
+  When *percpu* is ``True`` return a list of nameduples for each logical CPU
+  on the system.
+  First element of the list refers to first CPU, second element to second CPU
+  and so on.
+  The order of the list is consistent across calls.
+  Example output on Linux:
+
+>>> import psutil
+>>> psutil.cpu_times()
+scputimes(user=17411.7, nice=77.99, system=3797.02, idle=51266.57, 
iowait=732.58, irq=0.01, softirq=142.43, steal=0.0, guest=0.0, guest_nice=0.0)
+
+.. function:: cpu_percent(interval=None, percpu=False)
+
+  Return a float representing the current system-wide CPU utilization as a
+  percentage. When *interval* is > ``0.0`` compares system CPU times elapsed
+  before and after the interval (blocking).
+  When *interval* is ``0.0`` or ``None`` compares system CPU times elapsed
+  since last call or module import, returning immediately.
+  That means the first time this is called it will return a meaningless ``0.0``
+  value which you are supposed to ignore.
+  In this case is recommended for accuracy that this function be called with at
+  least ``0.1`` seconds between calls.
+  When *percpu* is ``True`` returns a list of floats representing the
+  utilization as a percentage for each CPU.
+  First element of the list refers to first CPU, second element to second CPU
+  and so on. The order of the list is consistent across calls.
+
+>>> import psutil
+>>> # blocking
+>>> psutil.cpu_percent(interval=1)
+2.0
+>>> # non-blocking (percentage since last call)
+>>> psutil.cpu_percent(interval=None)
+2.9
+>>> # blocking, per-cpu
+>>> psutil.cpu_percent(interval=1, percpu=True)
+[2.0, 1.0]
+>>>
+
+  .. warning::
+
+the first time this function is called with *interval* = ``0.0`` or 
``None``
+it will return a meaningless ``0.0`` value which you are supposed to
+ignore.
+
+.. function:: cpu_times_percent(interval=None, percpu=False)
+
+  Same as :func:`cpu_percent()` but provides utilization percentages for each
+  specific CPU time as is returned by
+  :func:`psutil.cpu_times(percpu=True)`.
+  *interval* and
+  *percpu* arguments have the same me

[10/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
new file mode 100644
index 000..4411be5
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
+  .loadsimulator.net;
+
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.assertEquals;
+
+public class TestRestMetricsSender {
+
+  @Test
+  public void testPushMetrics() throws Exception {
+final UrlService svcMock = createStrictMock(UrlService.class);
+final String payload = "test";
+final String expectedResponse = "mockResponse";
+
+expect(svcMock.send(anyString())).andReturn(expectedResponse);
+svcMock.disconnect();
+expectLastCall();
+
+replay(svcMock);
+
+RestMetricsSender sender = new RestMetricsSender("expectedHostName") {
+  @Override
+  protected UrlService getConnectedUrlService() throws IOException {
+return svcMock;
+  }
+};
+String response = sender.pushMetrics(payload);
+
+verify(svcMock);
+assertEquals("", expectedResponse, response);
+  }
+
+  @Test
+  public void testPushMetricsFailed() throws Exception {
+final UrlService svcMock = createStrictMock(UrlService.class);
+final String payload = "test";
+final String expectedResponse = "mockResponse";
+RestMetricsSender sender = new RestMetricsSender("expectedHostName") {
+  @Override
+  protected UrlService getConnectedUrlService() throws IOException {
+return svcMock;
+  }
+};
+
+expect(svcMock.send(anyString())).andThrow(new IOException());
+svcMock.disconnect();
+expectLastCall();
+
+replay(svcMock);
+
+String response = sender.pushMetrics(payload);
+
+verify(svcMock);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestStdOutMetricsSender.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestStdOutMetricsSender.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestStdOutMetricsSender.java
new file mode 100644
index 000..7e29ae3
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestStdOutMetricsSender.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS

[30/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. 
(swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a52f8a55
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a52f8a55
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a52f8a55

Branch: refs/heads/trunk
Commit: a52f8a5572f689351a22d41f62bd083c27a3bfa8
Parents: c6f5743
Author: Siddharth Wagle 
Authored: Mon Dec 1 14:49:42 2014 -0800
Committer: Siddharth Wagle 
Committed: Tue Dec 2 09:26:41 2014 -0800

--
 .gitignore  |1 +
 .../ambari-metrics-hadoop-sink/pom.xml  |  188 +
 .../src/main/assemblies/empty.xml   |   21 +
 .../src/main/assemblies/sink.xml|   34 +
 .../conf/hadoop-metrics2-hbase.properties.j2|   49 +
 .../src/main/conf/hadoop-metrics2.properties.j2 |   58 +
 .../timeline/AbstractTimelineMetricsSink.java   |  101 +
 .../metrics2/sink/timeline/TimelineMetric.java  |  172 +
 .../metrics2/sink/timeline/TimelineMetrics.java |  102 +
 .../sink/timeline/TimelineMetricsCache.java |  128 +
 .../sink/timeline/TimelineMetricsSink.java  |  211 ++
 .../conf/unix/ambari-metrics-monitor|  203 ++
 .../ambari-metrics-host-monitoring/pom.xml  |  273 ++
 .../src/main/package/rpm/preremove.sh   |   28 +
 .../src/main/python/__init__.py |   21 +
 .../src/main/python/core/__init__.py|   33 +
 .../main/python/core/application_metric_map.py  |  130 +
 .../src/main/python/core/config_reader.py   |  127 +
 .../src/main/python/core/controller.py  |  103 +
 .../src/main/python/core/emitter.py |   88 +
 .../src/main/python/core/event_definition.py|   85 +
 .../src/main/python/core/host_info.py   |  190 +
 .../src/main/python/core/metric_collector.py|   87 +
 .../src/main/python/main.py |   64 +
 .../src/main/python/psutil/LICENSE  |   27 +
 .../src/main/python/psutil/MANIFEST.in  |   14 +
 .../src/main/python/psutil/Makefile |   77 +
 .../src/main/python/psutil/README   |  270 ++
 .../src/main/python/psutil/build.py |   57 +
 .../src/main/python/psutil/docs/Makefile|  177 +
 .../src/main/python/psutil/docs/README  |   15 +
 .../python/psutil/docs/_static/copybutton.js|   57 +
 .../main/python/psutil/docs/_static/sidebar.js  |  161 +
 .../python/psutil/docs/_template/globaltoc.html |   12 +
 .../psutil/docs/_template/indexcontent.html |4 +
 .../psutil/docs/_template/indexsidebar.html |   16 +
 .../main/python/psutil/docs/_template/page.html |   66 +
 .../_themes/pydoctheme/static/pydoctheme.css|  187 +
 .../psutil/docs/_themes/pydoctheme/theme.conf   |   23 +
 .../src/main/python/psutil/docs/conf.py |  253 ++
 .../src/main/python/psutil/docs/index.rst   | 1247 +++
 .../src/main/python/psutil/docs/make.bat|  242 ++
 .../main/python/psutil/examples/disk_usage.py   |   63 +
 .../src/main/python/psutil/examples/free.py |   42 +
 .../src/main/python/psutil/examples/iotop.py|  178 +
 .../src/main/python/psutil/examples/killall.py  |   32 +
 .../src/main/python/psutil/examples/meminfo.py  |   69 +
 .../src/main/python/psutil/examples/netstat.py  |   65 +
 .../src/main/python/psutil/examples/nettop.py   |  165 +
 .../src/main/python/psutil/examples/pmap.py |   58 +
 .../python/psutil/examples/process_detail.py|  162 +
 .../src/main/python/psutil/examples/top.py  |  232 ++
 .../src/main/python/psutil/examples/who.py  |   34 +
 .../src/main/python/psutil/make.bat |  176 +
 .../src/main/python/psutil/psutil/__init__.py   | 1987 +++
 .../src/main/python/psutil/psutil/_common.py|  258 ++
 .../src/main/python/psutil/psutil/_compat.py|  433 +++
 .../src/main/python/psutil/psutil/_psbsd.py |  389 +++
 .../src/main/python/psutil/psutil/_pslinux.py   | 1225 +++
 .../src/main/python/psutil/psutil/_psosx.py |  341 ++
 .../src/main/python/psutil/psutil/_psposix.py   |  157 +
 .../src/main/python/psutil/psutil/_pssunos.py   |  533 +++
 .../src/main/python/psutil/psutil/_psutil_bsd.c | 2212 
 .../src/main/python/psutil/psutil/_psutil_bsd.h |   51 +
 .../main/python/psutil/psutil/_psutil_common.c  |   37 +
 .../main/python/psutil/psutil/_psutil_common.h  |   10 +
 .../main/python/psutil/psutil/_psutil_linux.c   |  510 +++
 .../main/python/psutil/psutil/_psutil_linux.h   |   20 +
 .../src/main/python/psutil/psutil/_psutil_osx.c | 1881 ++
 .../src/main/python/psutil/psutil/_psutil_osx.h |   41 +
 .../main/python/psutil/psutil/_psutil_posix.c   |  128 +
 .../main/python/psutil/psutil/_psutil_posix.h   |   10 +
 .../main/python/psutil/psutil/_psutil_sunos.c   | 1290 +++
 .../main/python/psutil/psutil/_psutil_sunos.h

[13/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java
new file mode 100644
index 000..edd4842
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/LeveldbTimelineStore.java
@@ -0,0 +1,1473 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeMap;
+import java.util.concurrent.locks.ReentrantLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.collections.map.LRUMap;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.WritableComparator;
+import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import 
org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity;
+import 
org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.fusesource.leveldbjni.JniDBFactory;
+import org.iq80.leveldb.DB;
+import org.iq80.leveldb.DBIterator;
+import org.iq80.leveldb.Options;
+import org.iq80.leveldb.ReadOptions;
+import org.iq80.leveldb.WriteBatch;
+import org.iq80.leveldb.WriteOptions;
+
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.readReverseOrderedLong;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.writeReverseOrderedLong;
+
+/**
+ * An implementation of an application timeline store backed by leveldb.
+ *
+ * There are three sections of the db, the start time section,
+ * the entity section, and the indexed entity section.
+ *
+ * The start time section is used to retrieve the unique start time for
+ * a given entity. Its values each contain a start time while its keys are of
+ * the form:
+ * 
+ *   START_TIME_LOOKUP_PREFIX + entity type + entity id
+ *
+ * The entity section is ordered by entity type, then entity start time
+ * descending, then entity ID. There are four sub-sections of the entity
+ * section: events, primary filters, related entities,
+ * and other info. The event entries have event info serialized into their
+ * values. The other info entries have values corresponding to the values of
+ * the other info name/value map for the entry (note the names are contained
+ * in the key). All other entries have empty values. The key structure is as
+ * follows:
+ * 
+ *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id
+ *
+ *   ENTITY_ENTRY_PREFIX + entity type + revstarttime + entity id +
+ * EVENTS_COLUMN +

[04/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/templates/hbase_master_jaas.conf.j2
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/templates/hbase_master_jaas.conf.j2
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/templates/hbase_master_jaas.conf.j2
new file mode 100644
index 000..a93c36c
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/templates/hbase_master_jaas.conf.j2
@@ -0,0 +1,26 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+Client {
+com.sun.security.auth.module.Krb5LoginModule required
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{master_keytab_path}}"
+principal="{{master_jaas_princ}}";
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/templates/hbase_regionserver_jaas.conf.j2
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/templates/hbase_regionserver_jaas.conf.j2
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/templates/hbase_regionserver_jaas.conf.j2
new file mode 100644
index 000..7097481
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/templates/hbase_regionserver_jaas.conf.j2
@@ -0,0 +1,26 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+Client {
+com.sun.security.auth.module.Krb5LoginModule required
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{regionserver_keytab_path}}"
+principal="{{regionserver_jaas_princ}}";
+};

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/templates/metric_groups.conf.j2
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/templates/metric_groups.conf.j2
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/templates/metric_groups.conf.j2
new file mode 100644
index 000..aa03d19
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/package/templates/metric_groups.conf.j2
@@ -0,0 +1,37 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+{
+   "host_metric_groups": {
+  "all": {
+ "collect_every": "10",
+ "metrics": [
+{
+   "name": "bytes_out",
+   "value_threshold": "128"
+}
+ 

[20/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_windows.h
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_windows.h
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_windows.h
new file mode 100644
index 000..546704e
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_windows.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include 
+#include 
+
+// --- per-process functions
+
+static PyObject* psutil_proc_cmdline(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cpu_affinity_get(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cpu_affinity_set(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cpu_times(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cpu_times_2(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_create_time(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_create_time_2(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_cwd(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_exe(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_io_counters(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_io_counters_2(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_is_suspended(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_kill(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_memory_info(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_memory_info_2(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_memory_maps(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_num_ctx_switches(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_num_handles(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_num_handles_2(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_num_threads(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_open_files(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_priority_get(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_priority_set(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_resume(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_suspend(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_threads(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_username(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_wait(PyObject* self, PyObject* args);
+
+#if (PSUTIL_WINVER >= 0x0600)  // Windows Vista
+static PyObject* psutil_proc_io_priority_get(PyObject* self, PyObject* args);
+static PyObject* psutil_proc_io_priority_set(PyObject* self, PyObject* args);
+#endif
+
+// --- system-related functions
+
+static PyObject* psutil_boot_time(PyObject* self, PyObject* args);
+static PyObject* psutil_cpu_count_logical(PyObject* self, PyObject* args);
+static PyObject* psutil_cpu_count_phys(PyObject* self, PyObject* args);
+static PyObject* psutil_cpu_times(PyObject* self, PyObject* args);
+static PyObject* psutil_disk_io_counters(PyObject* self, PyObject* args);
+static PyObject* psutil_disk_partitions(PyObject* self, PyObject* args);
+static PyObject* psutil_disk_usage(PyObject* self, PyObject* args);
+static PyObject* psutil_net_connections(PyObject* self, PyObject* args);
+static PyObject* psutil_net_io_counters(PyObject* self, PyObject* args);
+static PyObject* psutil_per_cpu_times(PyObject* self, PyObject* args);
+static PyObject* psutil_pid_exists(PyObject* self, PyObject* args);
+static PyObject* psutil_pids(PyObject* self, PyObject* args);
+static PyObject* psutil_ppid_map(PyObject* self, PyObject* args);
+static PyObject* psutil_users(PyObject* self, PyObject* args);
+static PyObject* psutil_virtual_mem(PyObject* self, PyObject* args);
+
+// --- windows API bindings
+
+static PyObject* psutil_win32_QueryDosDevice(PyObject* self, PyObject* args);
+
+// --- internal
+
+int psutil_proc_suspend_or_resume(DWORD pid, int suspend);

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_pswindows.py
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_pswindows.py
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_pswindows.py
new file mode 100644
index 000..1a786f1
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_pswind

[14/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationFinishDataPBImpl.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationFinishDataPBImpl.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationFinishDataPBImpl.java
new file mode 100644
index 000..337426d
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/impl/pb/ApplicationFinishDataPBImpl.java
@@ -0,0 +1,226 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb;
+
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
+import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
+import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils;
+import 
org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto;
+import 
org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProtoOrBuilder;
+import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
+import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto;
+import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
+
+import com.google.protobuf.TextFormat;
+
+public class ApplicationFinishDataPBImpl extends ApplicationFinishData {
+
+  ApplicationFinishDataProto proto = ApplicationFinishDataProto
+.getDefaultInstance();
+  ApplicationFinishDataProto.Builder builder = null;
+  boolean viaProto = false;
+
+  private ApplicationId applicationId;
+
+  public ApplicationFinishDataPBImpl() {
+builder = ApplicationFinishDataProto.newBuilder();
+  }
+
+  public ApplicationFinishDataPBImpl(ApplicationFinishDataProto proto) {
+this.proto = proto;
+viaProto = true;
+  }
+
+  @Override
+  public ApplicationId getApplicationId() {
+if (this.applicationId != null) {
+  return this.applicationId;
+}
+ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder;
+if (!p.hasApplicationId()) {
+  return null;
+}
+this.applicationId = convertFromProtoFormat(p.getApplicationId());
+return this.applicationId;
+  }
+
+  @Override
+  public void setApplicationId(ApplicationId applicationId) {
+maybeInitBuilder();
+if (applicationId == null) {
+  builder.clearApplicationId();
+}
+this.applicationId = applicationId;
+  }
+
+  @Override
+  public long getFinishTime() {
+ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder;
+return p.getFinishTime();
+  }
+
+  @Override
+  public void setFinishTime(long finishTime) {
+maybeInitBuilder();
+builder.setFinishTime(finishTime);
+  }
+
+  @Override
+  public String getDiagnosticsInfo() {
+ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder;
+if (!p.hasDiagnosticsInfo()) {
+  return null;
+}
+return p.getDiagnosticsInfo();
+  }
+
+  @Override
+  public void setDiagnosticsInfo(String diagnosticsInfo) {
+maybeInitBuilder();
+if (diagnosticsInfo == null) {
+  builder.clearDiagnosticsInfo();
+  return;
+}
+builder.setDiagnosticsInfo(diagnosticsInfo);
+  }
+
+  @Override
+  public FinalApplicationStatus getFinalApplicationStatus() {
+ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder;
+if (!p.hasFinalApplicationStatus()) {
+  return null;
+}
+return convertFromProtoFormat(p.getFinalApplicationStatus());
+  }
+
+  @Override
+  public void setFinalApplicationStatus(
+  FinalApplicationStatus finalApplicationS

[16/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/DefaultPhoenixDataSource.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/DefaultPhoenixDataSource.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/DefaultPhoenixDataSource.java
new file mode 100644
index 000..652c492
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/DefaultPhoenixDataSource.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
+  .timeline;
+
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+
+public class DefaultPhoenixDataSource implements ConnectionProvider {
+
+  static final Log LOG = LogFactory.getLog(DefaultPhoenixDataSource.class);
+  private static final String ZOOKEEPER_CLIENT_PORT =
+"hbase.zookeeper.property.clientPort";
+  private static final String ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";
+  private static final String ZNODE_PARENT = "zookeeper.znode.parent";
+
+  private static final String connectionUrl = "jdbc:phoenix:%s:%s:%s";
+  private final String url;
+
+  public DefaultPhoenixDataSource(Configuration hbaseConf) {
+String zookeeperClientPort = hbaseConf.getTrimmed(ZOOKEEPER_CLIENT_PORT,
+  "2181");
+String zookeeperQuorum = hbaseConf.getTrimmed(ZOOKEEPER_QUORUM);
+String znodeParent = hbaseConf.getTrimmed(ZNODE_PARENT, "/hbase");
+if (zookeeperQuorum == null || zookeeperQuorum.isEmpty()) {
+  throw new IllegalStateException("Unable to find Zookeeper quorum to " +
+"access HBase store using Phoenix.");
+}
+
+url = String.format(connectionUrl,
+  zookeeperQuorum,
+  zookeeperClientPort,
+  znodeParent);
+  }
+
+  /**
+   * Get JDBC connection to HBase store. Assumption is that the hbase
+   * configuration is present on the classpath and loaded by the caller into
+   * the Configuration object.
+   * Phoenix already caches the HConnection between the client and HBase
+   * cluster.
+   *
+   * @return @java.sql.Connection
+   */
+  public Connection getConnection() throws SQLException {
+
+LOG.debug("Metric store connection url: " + url);
+try {
+  return DriverManager.getConnection(url);
+} catch (SQLException e) {
+  LOG.warn("Unable to connect to HBase store using Phoenix.", e);
+
+  throw e;
+}
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
new file mode 100644
index 000..9364187
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/HBaseTimelineMetricStore.java
@@ -0,0 +1,192 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you m

[22/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_sunos.c
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_sunos.c
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_sunos.c
new file mode 100644
index 000..f02415c
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_sunos.c
@@ -0,0 +1,1290 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * Functions specific to Sun OS Solaris platforms.
+ *
+ * Thanks to Justin Venus who originally wrote a consistent part of
+ * this in Cython which I later on translated in C.
+ */
+
+
+#include 
+
+// fix for "Cannot use procfs in the large file compilation environment"
+// error, see:
+// http://sourceware.org/ml/gdb-patches/2010-11/msg00336.html
+#undef _FILE_OFFSET_BITS
+#define _STRUCTURED_PROC 1
+
+// fix compilation issue on SunOS 5.10, see:
+// https://code.google.com/p/psutil/issues/detail?id=421
+#define NEW_MIB_COMPLIANT
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include   // for MNTTAB
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "_psutil_sunos.h"
+
+
+#define TV2DOUBLE(t)   (((t).tv_nsec * 0.1) + (t).tv_sec)
+
+/*
+ * Read a file content and fills a C structure with it.
+ */
+int
+psutil_file_to_struct(char *path, void *fstruct, size_t size)
+{
+int fd;
+size_t nbytes;
+fd = open(path, O_RDONLY);
+if (fd == -1) {
+PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+return 0;
+}
+nbytes = read(fd, fstruct, size);
+if (nbytes <= 0) {
+close(fd);
+PyErr_SetFromErrno(PyExc_OSError);
+return 0;
+}
+if (nbytes != size) {
+close(fd);
+PyErr_SetString(PyExc_RuntimeError, "structure size mismatch");
+return 0;
+}
+close(fd);
+return nbytes;
+}
+
+
+/*
+ * Return process ppid, rss, vms, ctime, nice, nthreads, status and tty
+ * as a Python tuple.
+ */
+static PyObject *
+psutil_proc_basic_info(PyObject *self, PyObject *args)
+{
+int pid;
+char path[100];
+psinfo_t info;
+
+if (! PyArg_ParseTuple(args, "i", &pid))
+return NULL;
+sprintf(path, "/proc/%i/psinfo", pid);
+if (! psutil_file_to_struct(path, (void *)&info, sizeof(info)))
+return NULL;
+return Py_BuildValue("ikkdiiik",
+ info.pr_ppid,  // parent pid
+ info.pr_rssize,// rss
+ info.pr_size,  // vms
+ TV2DOUBLE(info.pr_start),  // create time
+ info.pr_lwp.pr_nice,   // nice
+ info.pr_nlwp,  // no. of threads
+ info.pr_lwp.pr_state,  // status code
+ info.pr_ttydev // tty nr
+);
+}
+
+
+/*
+ * Return process name and args as a Python tuple.
+ */
+static PyObject *
+psutil_proc_name_and_args(PyObject *self, PyObject *args)
+{
+int pid;
+char path[100];
+psinfo_t info;
+
+if (! PyArg_ParseTuple(args, "i", &pid))
+return NULL;
+sprintf(path, "/proc/%i/psinfo", pid);
+if (! psutil_file_to_struct(path, (void *)&info, sizeof(info)))
+return NULL;
+return Py_BuildValue("ss", info.pr_fname, info.pr_psargs);
+}
+
+
+/*
+ * Return process user and system CPU times as a Python tuple.
+ */
+static PyObject *
+psutil_proc_cpu_times(PyObject *self, PyObject *args)
+{
+int pid;
+char path[100];
+pstatus_t info;
+
+if (! PyArg_ParseTuple(args, "i", &pid))
+return NULL;
+sprintf(path, "/proc/%i/status", pid);
+if (! psutil_file_to_struct(path, (void *)&info, sizeof(info)))
+return NULL;
+// results are more precise than os.times()
+return Py_BuildValue("dd",
+ TV2DOUBLE(info.pr_utime),
+ TV2DOUBLE(info.pr_stime));
+}
+
+
+/*
+ * Return process uids/gids as a Python tuple.
+ */
+static PyObject *
+psutil_proc_cred(PyObject *self, PyObject *args)
+{
+int pid;
+char path[100];
+prcred_t info;
+
+if (! PyArg_ParseTuple(args, "i", &pid))
+return NULL;
+sprintf(path, "/proc/%i/cred", pid);
+if (! psutil_file_to_struct(path, (void *)&info, sizeof(info)))
+return NULL;
+return Py_BuildValue("ii",
+ info.pr_ruid, info.pr_euid, info.pr_suid,
+ info.pr_rgid, info.pr_egid, info.pr_sgid);
+}
+
+
+/*
+ * Return process uids/gids as a Python tuple.
+

[23/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_osx.c
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_osx.c
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_osx.c
new file mode 100644
index 000..0c83345
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_osx.c
@@ -0,0 +1,1881 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * OS X platform-specific module methods for _psutil_osx
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include 
+
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "_psutil_osx.h"
+#include "_psutil_common.h"
+#include "arch/osx/process_info.h"
+
+
+/*
+ * A wrapper around host_statistics() invoked with HOST_VM_INFO.
+ */
+int
+psutil_sys_vminfo(vm_statistics_data_t *vmstat)
+{
+kern_return_t ret;
+mach_msg_type_number_t count = sizeof(*vmstat) / sizeof(integer_t);
+mach_port_t mport = mach_host_self();
+
+ret = host_statistics(mport, HOST_VM_INFO, (host_info_t)vmstat, &count);
+if (ret != KERN_SUCCESS) {
+PyErr_Format(PyExc_RuntimeError,
+ "host_statistics() failed: %s", mach_error_string(ret));
+return 0;
+}
+mach_port_deallocate(mach_task_self(), mport);
+return 1;
+}
+
+
+/*
+ * Return a Python list of all the PIDs running on the system.
+ */
+static PyObject *
+psutil_pids(PyObject *self, PyObject *args)
+{
+kinfo_proc *proclist = NULL;
+kinfo_proc *orig_address = NULL;
+size_t num_processes;
+size_t idx;
+PyObject *pid = NULL;
+PyObject *retlist = PyList_New(0);
+
+if (retlist == NULL)
+return NULL;
+
+if (psutil_get_proc_list(&proclist, &num_processes) != 0) {
+PyErr_SetString(PyExc_RuntimeError,
+"failed to retrieve process list.");
+goto error;
+}
+
+if (num_processes > 0) {
+// save the address of proclist so we can free it later
+orig_address = proclist;
+for (idx = 0; idx < num_processes; idx++) {
+pid = Py_BuildValue("i", proclist->kp_proc.p_pid);
+if (!pid)
+goto error;
+if (PyList_Append(retlist, pid))
+goto error;
+Py_DECREF(pid);
+proclist++;
+}
+free(orig_address);
+}
+return retlist;
+
+error:
+Py_XDECREF(pid);
+Py_DECREF(retlist);
+if (orig_address != NULL)
+free(orig_address);
+return NULL;
+}
+
+
+/*
+ * Return process name from kinfo_proc as a Python string.
+ */
+static PyObject *
+psutil_proc_name(PyObject *self, PyObject *args)
+{
+long pid;
+struct kinfo_proc kp;
+if (! PyArg_ParseTuple(args, "l", &pid)) {
+return NULL;
+}
+if (psutil_get_kinfo_proc(pid, &kp) == -1) {
+return NULL;
+}
+return Py_BuildValue("s", kp.kp_proc.p_comm);
+}
+
+
+/*
+ * Return process current working directory.
+ */
+static PyObject *
+psutil_proc_cwd(PyObject *self, PyObject *args)
+{
+long pid;
+struct proc_vnodepathinfo pathinfo;
+
+if (! PyArg_ParseTuple(args, "l", &pid)) {
+return NULL;
+}
+
+if (! psutil_proc_pidinfo(pid, PROC_PIDVNODEPATHINFO, &pathinfo,
+  sizeof(pathinfo)))
+{
+return NULL;
+}
+return Py_BuildValue("s", pathinfo.pvi_cdir.vip_path);
+}
+
+
+/*
+ * Return path of the process executable.
+ */
+static PyObject *
+psutil_proc_exe(PyObject *self, PyObject *args)
+{
+long pid;
+char buf[PATH_MAX];
+int ret;
+
+if (! PyArg_ParseTuple(args, "l", &pid)) {
+return NULL;
+}
+ret = proc_pidpath(pid, &buf, sizeof(buf));
+if (ret == 0) {
+if (! psutil_pid_exists(pid)) {
+return NoSuchProcess();
+}
+else {
+return AccessDenied();
+}
+}
+return Py_BuildValue("s", buf);
+}
+
+
+/*
+ * Return process cmdline as a Python list of cmdline arguments.
+ */
+static PyObject *
+psutil_proc_cmdline(PyObject *self, PyObject *args)
+{
+long pid;
+PyObject *arglist = NULL;
+
+if (! PyArg_ParseTuple(args, "l", &pid)) {
+return NULL;
+}
+
+// get the commandline, defined in arch/osx/process_info.c
+arglist = psutil_get_arg_list(pid);
+return arglist;
+}
+
+
+/*
+ * Return process parent pid from kinfo_proc as a Python integer.
+ */
+static PyObject *
+psut

[19/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/setup.py
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/setup.py 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/setup.py
new file mode 100644
index 000..98b24a1
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/setup.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""psutil is a cross-platform library for retrieving information on
+running processes and system utilization (CPU, memory, disks, network)
+in Python.
+"""
+
+import os
+import sys
+try:
+from setuptools import setup, Extension
+except ImportError:
+from distutils.core import setup, Extension
+
+
+HERE = os.path.abspath(os.path.dirname(__file__))
+
+
+def get_version():
+INIT = os.path.join(HERE, 'psutil/__init__.py')
+f = open(INIT, 'r')
+try:
+for line in f:
+if line.startswith('__version__'):
+ret = eval(line.strip().split(' = ')[1])
+assert ret.count('.') == 2, ret
+for num in ret.split('.'):
+assert num.isdigit(), ret
+return ret
+else:
+raise ValueError("couldn't find version string")
+finally:
+f.close()
+
+
+def get_description():
+README = os.path.join(HERE, 'README')
+f = open(README, 'r')
+try:
+return f.read()
+finally:
+f.close()
+
+
+# POSIX
+if os.name == 'posix':
+posix_extension = Extension(
+'_psutil_posix',
+sources=['psutil/_psutil_posix.c'],
+)
+# Windows
+if sys.platform.startswith("win32"):
+
+def get_winver():
+maj, min = sys.getwindowsversion()[0:2]
+return '0x0%s' % ((maj * 100) + min)
+
+extensions = [Extension(
+'_psutil_windows',
+sources=[
+'psutil/_psutil_windows.c',
+'psutil/_psutil_common.c',
+'psutil/arch/windows/process_info.c',
+'psutil/arch/windows/process_handles.c',
+'psutil/arch/windows/security.c',
+],
+define_macros=[
+# be nice to mingw, see:
+# http://www.mingw.org/wiki/Use_more_recent_defined_functions
+('_WIN32_WINNT', get_winver()),
+('_AVAIL_WINVER_', get_winver()),
+# see: https://code.google.com/p/psutil/issues/detail?id=348
+('PSAPI_VERSION', 1),
+],
+libraries=[
+"psapi", "kernel32", "advapi32", "shell32", "netapi32", "iphlpapi",
+"wtsapi32",
+],
+# extra_compile_args=["/Z7"],
+# extra_link_args=["/DEBUG"]
+)]
+# OS X
+elif sys.platform.startswith("darwin"):
+extensions = [Extension(
+'_psutil_osx',
+sources=[
+'psutil/_psutil_osx.c',
+'psutil/_psutil_common.c',
+'psutil/arch/osx/process_info.c'
+],
+extra_link_args=[
+'-framework', 'CoreFoundation', '-framework', 'IOKit'
+],
+),
+posix_extension,
+]
+# FreeBSD
+elif sys.platform.startswith("freebsd"):
+extensions = [Extension(
+'_psutil_bsd',
+sources=[
+'psutil/_psutil_bsd.c',
+'psutil/_psutil_common.c',
+'psutil/arch/bsd/process_info.c'
+],
+libraries=["devstat"]),
+posix_extension,
+]
+# Linux
+elif sys.platform.startswith("linux"):
+extensions = [Extension(
+'_psutil_linux',
+sources=['psutil/_psutil_linux.c']),
+posix_extension,
+]
+# Solaris
+elif sys.platform.lower().startswith('sunos'):
+extensions = [Extension(
+'_psutil_sunos',
+sources=['psutil/_psutil_sunos.c'],
+libraries=['kstat', 'nsl'],),
+posix_extension,
+]
+else:
+sys.exit('platform %s is not supported' % sys.platform)
+
+
+def main():
+setup_args = dict(
+name='psutil',
+version=get_version(),
+description=__doc__,
+long_description=get_description(),
+keywords=[
+'ps', 'top', 'kill', 'free', 'lsof', 'netstat', 'nice',
+'tty', 'ionice', 'uptime', 'taskmgr', 'process', 'df',
+'iotop', 'iostat', 'ifconfig', 'taskset', 'who', 'pidof',
+'pmap', 'smem', 'monitoring', 'ulimit', 'prlimit',
+],
+author='Giampaolo Rodola',
+author_email='g.rodola  gmail  com',
+url='http://code.google.com/p/psutil/',
+platforms='Platform Independent',
+license='BSD',
+packages=['psutil'],
+# see: python setup.py register --list-classifiers
+classifiers=[

[26/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_common.py
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_common.py
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_common.py
new file mode 100644
index 000..3d2f27c
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_common.py
@@ -0,0 +1,258 @@
+#/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common objects shared by all _ps* modules."""
+
+from __future__ import division
+import errno
+import os
+import socket
+import stat
+import sys
+import warnings
+try:
+import threading
+except ImportError:
+import dummy_threading as threading
+
+from socket import AF_INET, SOCK_STREAM, SOCK_DGRAM
+
+from psutil._compat import namedtuple, wraps
+
+# --- constants
+
+AF_INET6 = getattr(socket, 'AF_INET6', None)
+AF_UNIX = getattr(socket, 'AF_UNIX', None)
+
+STATUS_RUNNING = "running"
+STATUS_SLEEPING = "sleeping"
+STATUS_DISK_SLEEP = "disk-sleep"
+STATUS_STOPPED = "stopped"
+STATUS_TRACING_STOP = "tracing-stop"
+STATUS_ZOMBIE = "zombie"
+STATUS_DEAD = "dead"
+STATUS_WAKE_KILL = "wake-kill"
+STATUS_WAKING = "waking"
+STATUS_IDLE = "idle"  # BSD
+STATUS_LOCKED = "locked"  # BSD
+STATUS_WAITING = "waiting"  # BSD
+
+CONN_ESTABLISHED = "ESTABLISHED"
+CONN_SYN_SENT = "SYN_SENT"
+CONN_SYN_RECV = "SYN_RECV"
+CONN_FIN_WAIT1 = "FIN_WAIT1"
+CONN_FIN_WAIT2 = "FIN_WAIT2"
+CONN_TIME_WAIT = "TIME_WAIT"
+CONN_CLOSE = "CLOSE"
+CONN_CLOSE_WAIT = "CLOSE_WAIT"
+CONN_LAST_ACK = "LAST_ACK"
+CONN_LISTEN = "LISTEN"
+CONN_CLOSING = "CLOSING"
+CONN_NONE = "NONE"
+
+
+# --- functions
+
+def usage_percent(used, total, _round=None):
+"""Calculate percentage usage of 'used' against 'total'."""
+try:
+ret = (used / total) * 100
+except ZeroDivisionError:
+ret = 0
+if _round is not None:
+return round(ret, _round)
+else:
+return ret
+
+
+def memoize(fun):
+"""A simple memoize decorator for functions supporting (hashable)
+positional arguments.
+It also provides a cache_clear() function for clearing the cache:
+
+>>> @memoize
+... def foo()
+... return 1
+...
+>>> foo()
+1
+>>> foo.cache_clear()
+>>>
+"""
+@wraps(fun)
+def wrapper(*args, **kwargs):
+key = (args, frozenset(sorted(kwargs.items(
+lock.acquire()
+try:
+try:
+return cache[key]
+except KeyError:
+ret = cache[key] = fun(*args, **kwargs)
+finally:
+lock.release()
+return ret
+
+def cache_clear():
+"""Clear cache."""
+lock.acquire()
+try:
+cache.clear()
+finally:
+lock.release()
+
+lock = threading.RLock()
+cache = {}
+wrapper.cache_clear = cache_clear
+return wrapper
+
+
+# http://code.activestate.com/recipes/577819-deprecated-decorator/
+def deprecated(replacement=None):
+"""A decorator which can be used to mark functions as deprecated."""
+def outer(fun):
+msg = "psutil.%s is deprecated" % fun.__name__
+if replacement is not None:
+msg += "; use %s instead" % replacement
+if fun.__doc__ is None:
+fun.__doc__ = msg
+
+@wraps(fun)
+def inner(*args, **kwargs):
+warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
+return fun(*args, **kwargs)
+
+return inner
+return outer
+
+
+def deprecated_method(replacement):
+"""A decorator which can be used to mark a method as deprecated
+'replcement' is the method name which will be called instead.
+"""
+def outer(fun):
+msg = "%s() is deprecated; use %s() instead" % (
+fun.__name__, replacement)
+if fun.__doc__ is None:
+fun.__doc__ = msg
+
+@wraps(fun)
+def inner(self, *args, **kwargs):
+warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
+return getattr(self, replacement)(*args, **kwargs)
+return inner
+return outer
+
+
+def isfile_strict(path):
+"""Same as os.path.isfile() but does not swallow EACCES / EPERM
+exceptions, see:
+http://mail.python.org/pipermail/python-dev/2012-June/120787.html
+"""
+try:
+st = os.stat(path)
+except OSError:
+err = sys.exc_info()[1]
+if err.errno in (errno.EPERM, errno.EACCES):
+raise
+return False
+else:
+return stat.S_ISREG(st.st_mode)
+
+
+# --- Process.connections() 'kind' parameter mapping
+
+conn_tmap = {
+"all": ([AF_INET, 

[21/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_windows.c
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_windows.c
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_windows.c
new file mode 100644
index 000..6694389
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_windows.c
@@ -0,0 +1,3241 @@
+/*
+ * Copyright (c) 2009, Jay Loden, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * Windows platform-specific module methods for _psutil_windows
+ */
+
+// Fixes clash between winsock2.h and windows.h
+#define WIN32_LEAN_AND_MEAN
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+// Link with Iphlpapi.lib
+#pragma comment(lib, "IPHLPAPI.lib")
+
+#include "_psutil_windows.h"
+#include "_psutil_common.h"
+#include "arch/windows/security.h"
+#include "arch/windows/process_info.h"
+#include "arch/windows/process_handles.h"
+#include "arch/windows/ntextapi.h"
+
+#ifdef __MINGW32__
+#include "arch/windows/glpi.h"
+#endif
+
+/*
+ * Return a Python float representing the system uptime expressed in seconds
+ * since the epoch.
+ */
+static PyObject *
+psutil_boot_time(PyObject *self, PyObject *args)
+{
+double  uptime;
+time_t pt;
+FILETIME fileTime;
+long long ll;
+
+GetSystemTimeAsFileTime(&fileTime);
+
+/*
+HUGE thanks to:
+http://johnstewien.spaces.live.com/blog/cns!E6885DB5CEBABBC8!831.entry
+
+This function converts the FILETIME structure to the 32 bit
+Unix time structure.
+The time_t is a 32-bit value for the number of seconds since
+January 1, 1970. A FILETIME is a 64-bit for the number of
+100-nanosecond periods since January 1, 1601. Convert by
+subtracting the number of 100-nanosecond period betwee 01-01-1970
+and 01-01-1601, from time_t the divide by 1e+7 to get to the same
+base granularity.
+*/
+ll = (((LONGLONG)(fileTime.dwHighDateTime)) << 32) \
++ fileTime.dwLowDateTime;
+pt = (time_t)((ll - 1164447360ull) / 1000ull);
+
+// XXX - By using GetTickCount() time will wrap around to zero if the
+// system is run continuously for 49.7 days.
+uptime = GetTickCount() / 1000.00f;
+return Py_BuildValue("d", (double)pt - uptime);
+}
+
+
+/*
+ * Return 1 if PID exists in the current process list, else 0.
+ */
+static PyObject *
+psutil_pid_exists(PyObject *self, PyObject *args)
+{
+long pid;
+int status;
+
+if (! PyArg_ParseTuple(args, "l", &pid)) {
+return NULL;
+}
+
+status = psutil_pid_is_running(pid);
+if (-1 == status) {
+return NULL; // exception raised in psutil_pid_is_running()
+}
+return PyBool_FromLong(status);
+}
+
+
+/*
+ * Return a Python list of all the PIDs running on the system.
+ */
+static PyObject *
+psutil_pids(PyObject *self, PyObject *args)
+{
+DWORD *proclist = NULL;
+DWORD numberOfReturnedPIDs;
+DWORD i;
+PyObject *pid = NULL;
+PyObject *retlist = PyList_New(0);
+
+if (retlist == NULL) {
+return NULL;
+}
+proclist = psutil_get_pids(&numberOfReturnedPIDs);
+if (NULL == proclist) {
+goto error;
+}
+
+for (i = 0; i < numberOfReturnedPIDs; i++) {
+pid = Py_BuildValue("I", proclist[i]);
+if (!pid)
+goto error;
+if (PyList_Append(retlist, pid))
+goto error;
+Py_DECREF(pid);
+}
+
+// free C array allocated for PIDs
+free(proclist);
+return retlist;
+
+error:
+Py_XDECREF(pid);
+Py_DECREF(retlist);
+if (proclist != NULL)
+free(proclist);
+return NULL;
+}
+
+
+/*
+ * Kill a process given its PID.
+ */
+static PyObject *
+psutil_proc_kill(PyObject *self, PyObject *args)
+{
+HANDLE hProcess;
+long pid;
+
+if (! PyArg_ParseTuple(args, "l", &pid)) {
+return NULL;
+}
+if (pid == 0) {
+return AccessDenied();
+}
+
+hProcess = OpenProcess(PROCESS_TERMINATE, FALSE, pid);
+if (hProcess == NULL) {
+if (GetLastError() == ERROR_INVALID_PARAMETER) {
+// see http://code.google.com/p/psutil/issues/detail?id=24
+NoSuchProcess();
+}
+else {
+PyErr_SetFromWindowsErr(0);
+}
+return NULL;
+}
+
+// kill the process
+if (! TerminateProcess(hProcess, 0)) {
+PyErr_SetFromWindowsErr(0);
+CloseHandle(hProcess);
+return NULL;
+}
+
+CloseHandle(hProcess);
+Py_INCREF(Py_None);
+return Py_None;
+}
+
+
+/*
+ * Wait for process to terminate and return its exit code.
+

[11/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/main/resources/scripts/ams_query.py
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/resources/scripts/ams_query.py
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/resources/scripts/ams_query.py
new file mode 100644
index 000..d51357a
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/resources/scripts/ams_query.py
@@ -0,0 +1,209 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import urllib2
+import signal
+import sys
+import optparse
+import time
+
+# http://162.216.148.45:8188/ws/v1/timeline/metrics?
+# metricNames=rpc.rpc.RpcAuthenticationSuccesses
+# &appId=nodemanager&hostname=local.0&startTime=1414152029&endTime=1414155629
+
+AMS_URL = "http://{0}:8188/ws/v1/timeline/metrics?metricNames={1}&appid={"; \
+  "2}&hostname={3}"
+
+# in fact it can be list automatically generated from ambari
+# UI queries
+host_metrics = {
+  'cpu': ['cpu_user', 'cpu_wio', 'cpu_nice', 'cpu_aidle', 'cpu_system', 
'cpu_idle'],
+  'disk': ['disk_total', 'disk_free'],
+  'load': ['load_one', 'load_fifteen', 'load_five'],
+  'mem': ['swap_free', 'mem_shared', 'mem_free', 'mem_cached', 'mem_buffers'],
+  'network': ['bytes_in', 'bytes_out', 'pkts_in', 'pkts_out'],
+  'process': ['proc_total', 'proc_run']
+}
+
+# HDFS_SERVICE
+namenode_metrics = {
+  'dfs.Capacity': ['dfs.FSNamesystem.CapacityRemainingGB',
+   'dfs.FSNamesystem.CapacityUsedGB',
+   'dfs.FSNamesystem.CapacityTotalGB'],
+  'dfs.Replication': ['dfs.FSNamesystem.PendingReplicationBlocks',
+  'dfs.FSNamesystem.UnderReplicatedBlocks'],
+  'dfs.File': ['dfs.namenode.FileInfoOps', 'dfs.namenode.CreateFileOps'],
+  'jvm.gc': ['jvm.JvmMetrics.GcTimeMillis'],
+  'jvm.mem': ['jvm.JvmMetrics.MemNonHeapUsedM',
+  'jvm.JvmMetrics.MemNonHeapCommittedM',
+  'jvm.JvmMetrics.MemHeapUsedM',
+  'jvm.JvmMetrics.MemHeapCommittedM'],
+  'jvm.thread': ['jvm.JvmMetrics.ThreadsRunnable',
+ 'jvm.JvmMetrics.ThreadsBlocked',
+ 'jvm.JvmMetrics.ThreadsWaiting',
+ 'jvm.JvmMetrics.ThreadsTimedWaiting'],
+  'rpc': ['rpc.rpc.RpcQueueTimeAvgTime']
+}
+
+all_metrics = {
+  'HOST': host_metrics,
+  'namenode': namenode_metrics
+}
+
+all_metrics_times = {}
+
+
+# hostnames = ['EPPLKRAW0101.0']  # 'local.0'
+# metrics_test_host = '162.216.150.247' # metricstest-100
+# metrics_test_host = '162.216.148.45'# br-3
+# start_time = int(time.time())   # 1414425208
+
+
+def main(argv=None):
+  # Allow Ctrl-C
+  signal.signal(signal.SIGINT, signal_handler)
+
+  parser = optparse.OptionParser()
+
+  parser.add_option("-H", "--host", dest="host",
+help="AMS host")
+  parser.add_option("-t", "--starttime", dest="start_time_secs",
+default=int(time.time()),
+help="start time in seconds, default value is current 
time")
+  parser.add_option("-n", "--nodes", dest="node_names",
+help="nodes from cluster, used as a param to query for")
+  (options, args) = parser.parse_args()
+
+  if options.host is None:
+print "AMS host name is required (--host or -h)"
+exit(-1)
+
+  if options.node_names is None:
+print "cluster nodes are required (--nodes or -n)"
+exit(3)
+
+  global start_time_secs, metrics_test_host, hostnames
+
+  metrics_test_host = options.host
+  start_time_secs = int(options.start_time_secs)
+  hostnames = [options.node_names]
+
+  while True:
+run()
+time.sleep(15)
+start_time_secs += 15
+
+
+def signal_handler(signal, frame):
+  print('Exiting, Ctrl+C press detected!')
+  print_all_metrics(all_metrics_times)
+  sys.exit(0)
+
+
+def run():
+  hostname = ','.join(hostnames)
+  qs = QuerySender(metrics_test_host, True)
+  for metric_name in all_metrics:
+print
+print 'Querying for ' + metric_name + ' metrics'
+current_time_secs = start_time_secs
+qs.query_all_app_metrics(hostname, metric_name,
+ all_metrics[metric_name],
+

[02/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-server/src/test/resources/ams/multiple_component_metrics.json
--
diff --git 
a/ambari-server/src/test/resources/ams/multiple_component_metrics.json 
b/ambari-server/src/test/resources/ams/multiple_component_metrics.json
new file mode 100644
index 000..3d231fa
--- /dev/null
+++ b/ambari-server/src/test/resources/ams/multiple_component_metrics.json
@@ -0,0 +1,1274 @@
+{"metrics": [
+{
+"timestamp": 1416528819358,
+"type": "Float",
+"metricname": "jvm.JvmMetrics.MemHeapCommittedM",
+"appid": "namenode",
+"hostname": "h1",
+"starttime": 1416528759232,
+"metrics": {
+"1416528759232": 1004.0,
+"1416528769231": 1004.0,
+"1416528779231": 1004.0,
+"1416528789231": 1004.0,
+"1416528799231": 1004.0,
+"1416528809231": 1004.0,
+"1416528819232": 1004.0,
+"1416528829231": 1004.0,
+"1416528839232": 1004.0,
+"1416528849231": 1004.0,
+"1416528859231": 1004.0,
+"1416528869231": 1004.0,
+"1416528879231": 1004.0,
+"1416528889231": 1004.0,
+"1416528899232": 1004.0,
+"1416528909231": 1004.0,
+"1416528919232": 1004.0,
+"1416528929231": 1004.0,
+"1416528939231": 1004.0,
+"1416528949231": 1004.0,
+"1416528959232": 1004.0,
+"1416528969231": 1004.0,
+"1416528979231": 1004.0,
+"1416528989231": 1004.0,
+"1416528999231": 1004.0,
+"1416529009231": 1004.0,
+"1416529019231": 1004.0,
+"1416529029231": 1004.0,
+"1416529039231": 1004.0,
+"1416529049231": 1004.0,
+"1416529059231": 1004.0,
+"1416529069231": 1004.0,
+"1416529079231": 1004.0,
+"1416529089231": 1004.0,
+"1416529099231": 1004.0,
+"1416529109232": 1004.0,
+"1416529119232": 1004.0,
+"1416529129231": 1004.0,
+"1416529139231": 1004.0,
+"1416529149231": 1004.0,
+"1416529159231": 1004.0,
+"1416529169231": 1004.0,
+"1416529179231": 1004.0,
+"1416529189231": 1004.0,
+"1416529199231": 1004.0,
+"1416529209231": 1004.0,
+"1416529219232": 1004.0,
+"1416529229231": 1004.0,
+"1416529239232": 1004.0,
+"1416529249231": 1004.0,
+"1416529259231": 1004.0,
+"1416529269232": 1004.0,
+"1416529279231": 1004.0,
+"1416529289231": 1004.0,
+"1416529299231": 1004.0,
+"1416529309232": 1004.0,
+"1416529319232": 1004.0,
+"1416529329231": 1004.0,
+"1416529339231": 1004.0,
+"1416529349231": 1004.0,
+"1416529359232": 1004.0,
+"1416529369231": 1004.0,
+"1416529379231": 1004.0,
+"1416529389232": 1004.0,
+"1416529399231": 1004.0,
+"1416529409231": 1004.0,
+"1416529419231": 1004.0,
+"1416529429232": 1004.0,
+"1416529439231": 1004.0,
+"1416529449231": 1004.0,
+"1416529459231": 1004.0,
+"1416529469231": 1004.0,
+"1416529479231": 1004.0,
+"1416529489231": 1004.0,
+"1416529499231": 1004.0,
+"1416529509231": 1004.0,
+"1416529519232": 1004.0,
+"1416529529231": 1004.0,
+"1416529539231": 1004.0,
+"1416529549231": 1004.0,
+"1416529559232": 1004.0,
+"1416529569231": 1004.0,
+"1416529579231": 1004.0,
+"1416529589231": 1004.0,
+"1416529599231": 1004.0,
+"1416529609232": 1004.0,
+"1416529619231": 1004.0,
+"1416529629231": 1004.0,
+"1416529639231": 1004.0,
+"1416529649231": 1004.0,
+"1416529659232": 1004.0,
+"1416529669231": 1004.0,
+"1416529679231": 1004.0,
+"1416529689231": 1004.0,
+"1416529699231": 1004.0,
+"1416529709231": 1004.0,
+"1416529719231": 1004.0,
+"1416529729231": 1004.0,
+"1416529739231": 1004.0,
+"1416529749231": 1004.0,
+"1416529759231": 1004.0,
+"1416529769231": 1004.0,
+"1416529779231": 1004.0,
+"1416529789231": 1004.0,
+"1416529799231": 1004.0,
+"1416529809232": 1004.0,
+"1416529819231": 1004.0,
+"1416529829231": 1004.0,
+"1416529839231": 1004.0,
+"1416529849231": 1004.0,
+"1416529859231": 1004.0,
+"

[29/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/application_metric_map.py
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/application_metric_map.py
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/application_metric_map.py
new file mode 100644
index 000..1be6fa2
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/application_metric_map.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import logging
+import json
+from threading import RLock
+
+logger = logging.getLogger()
+
+class ApplicationMetricMap:
+  """
+  A data structure to buffer metrics in memory.
+  The in-memory dict stores metrics as shown below:
+  { application_id : { metric_id : { timestamp :  metric_value } } }
+  application_id => uniquely identify the metrics for an application / host.
+  metric_id  => identify the metric
+  timestamp  => collection time
+  metric_value   => numeric value
+  """
+
+
+  def __init__(self, hostname, ip_address):
+self.hostname = hostname
+self.ip_address = ip_address
+self.lock = RLock()
+self.app_metric_map = {}
+  pass
+
+  def put_metric(self, application_id, metric_id_to_value_map, timestamp):
+with self.lock:
+  for metric_name, value in metric_id_to_value_map.iteritems():
+  
+metric_map = self.app_metric_map.get(application_id)
+if not metric_map:
+  metric_map = { metric_name : { timestamp : value } }
+  self.app_metric_map[ application_id ] = metric_map
+else:
+  metric_id_map = metric_map.get(metric_name)
+  if not metric_id_map:
+metric_id_map = { timestamp : value }
+metric_map[ metric_name ] = metric_id_map
+  else:
+metric_map[ metric_name ].update( { timestamp : value } )
+  pass
+pass
+  pass
+
+  def delete_application_metrics(self, app_id):
+del self.app_metric_map[ app_id ]
+  pass
+
+  def flatten(self, application_id = None):
+"""
+Return flatten dict to caller in json format.
+Json format:
+{"metrics":[{"hostname":"a","metricname":"b","appid":"c",
+"instanceid":"d","starttime":"e","metrics":{"t":"v"}}]}
+"""
+with self.lock:
+  timeline_metrics = { "metrics" : [] }
+  local_metric_map = {}
+  
+  if application_id:
+if self.app_metric_map.has_key(application_id):
+  local_metric_map = { application_id : 
self.app_metric_map[application_id] }
+else:
+  logger.info("application_id: {0}, not present in the 
map.".format(application_id))
+  else:
+local_metric_map = self.app_metric_map.copy()
+  pass
+  
+  for appId, metrics in local_metric_map.iteritems():
+for metricId, metricData in dict(metrics).iteritems():
+  # Create a timeline metric object
+  timeline_metric = {
+"hostname" : self.hostname,
+"metricname" : metricId,
+"appid" : "HOST",
+"instanceid" : "",
+"starttime" : self.get_start_time(appId, metricId),
+"metrics" : metricData
+  }
+  timeline_metrics[ "metrics" ].append( timeline_metric )
+pass
+  pass
+  return json.dumps(timeline_metrics) if len(timeline_metrics[ "metrics" 
]) > 0 else None
+  pass
+
+  def get_start_time(self, app_id, metric_id):
+with self.lock:
+  if self.app_metric_map.has_key(app_id):
+if self.app_metric_map.get(app_id).has_key(metric_id):
+  metrics = self.app_metric_map.get(app_id).get(metric_id)
+  return min(metrics.iterkeys())
+  pass
+
+  def format_app_id(self, app_id, instance_id = None):
+return app_id + "_" + instance_id if instance_id else app_id
+  pass
+
+  def get_app_id(self, app_id):
+return app_id.split("_")[0]
+  pass
+
+  def get_instance_id(self, app_id):
+parts = app_id.split("_")
+return parts[1] if len(parts) > 1 else ''
+  pass
+
+  def clear(self):
+with self.lock:
+  self.app_metric_map.clear()
+ 

[09/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
new file mode 100644
index 000..d760536
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
@@ -0,0 +1,789 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import 
org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity;
+import 
org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field;
+
+public class TimelineStoreTestUtils {
+
+  protected static final List EMPTY_EVENTS =
+  Collections.emptyList();
+  protected static final Map EMPTY_MAP =
+  Collections.emptyMap();
+  protected static final Map> EMPTY_PRIMARY_FILTERS =
+  Collections.emptyMap();
+  protected static final Map> EMPTY_REL_ENTITIES =
+  Collections.emptyMap();
+
+  protected TimelineStore store;
+  protected String entityId1;
+  protected String entityType1;
+  protected String entityId1b;
+  protected String entityId2;
+  protected String entityType2;
+  protected String entityId4;
+  protected String entityType4;
+  protected String entityId5;
+  protected String entityType5;
+  protected Map> primaryFilters;
+  protected Map secondaryFilters;
+  protected Map allFilters;
+  protected Map otherInfo;
+  protected Map> relEntityMap;
+  protected Map> relEntityMap2;
+  protected NameValuePair userFilter;
+  protected NameValuePair numericFilter1;
+  protected NameValuePair numericFilter2;
+  protected NameValuePair numericFilter3;
+  protected Collection goodTestingFilters;
+  protected Collection badTestingFilters;
+  protected TimelineEvent ev1;
+  protected TimelineEvent ev2;
+  protected TimelineEvent ev3;
+  protected TimelineEvent ev4;
+  protected Map eventInfo;
+  protected List events1;
+  protected List events2;
+  protected long beforeTs;
+
+  /**
+   * Load test data into the given store
+   */
+  protected void loadTestData() throws IOException {
+beforeTs = System.currentTimeMillis()-1;
+TimelineEntities entities = new TimelineEntities();
+Map> primaryFilters =
+new HashMap>();
+Set l1 = new HashSet();
+l1.add("username");
+Set l2 = new HashSet();
+l2.add((long)Integer.MAX_VALUE);
+Set l3 = new HashSet();
+l3.add("123abc");
+Set l4 = new HashSet();
+l4.add((long)Integer.MAX_VALUE + 1l);
+primaryFilters.put("user", l1);
+primaryFilters.put("appname", l2);
+primaryFilters.put("other", l3);
+primaryFilters.put("long", l4);
+Map secondaryFilters = new HashMap();
+secondaryFilters.put("startTime", 123456l);
+secondaryFilters.put("status", "RUNNING");
+Map otherInfo1 = new HashMap(

[05/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-env.xml
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-env.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-env.xml
new file mode 100644
index 000..7a61c60
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.2/services/AMS/configuration/ams-hbase-env.xml
@@ -0,0 +1,137 @@
+
+
+
+
+
+  
+hbase_log_dir
+/var/log/ams-hbase/
+Log Directories for HBase.
+  
+  
+hbase_pid_dir
+/var/run/ams-hbase/
+Pid Directory for HBase.
+  
+  
+hbase_regionserver_heapsize
+1024m
+HBase RegionServer Heap Size.
+  
+  
+hbase_regionserver_xmn_max
+512m
+HBase RegionServer maximum value for minimum heap 
size.
+  
+  
+hbase_regionserver_xmn_ratio
+0.2
+HBase RegionServer minimum heap size is calculated as a 
percentage of max heap size.
+  
+  
+hbase_master_heapsize
+1024m
+HBase Master Heap Size
+  
+  
+hbase_user
+hbase
+USER
+HBase User Name.
+  
+
+  
+  
+content
+This is the jinja template for hbase-env.sh file
+
+  # Set environment variables here.
+
+  # The java implementation to use. Java 1.6 required.
+  export JAVA_HOME={{java64_home}}
+
+  # HBase Configuration directory
+  export HBASE_CONF_DIR=${HBASE_CONF_DIR:-{{hbase_conf_dir}}}
+
+  # Extra Java CLASSPATH elements. Optional.
+  export HBASE_CLASSPATH=${HBASE_CLASSPATH}
+
+  if [ -f 
"/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar" ]; then
+export 
HBASE_CLASSPATH=${HBASE_CLASSPATH}:/usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+  fi
+
+  # The maximum amount of heap to use, in MB. Default is 1000.
+  # export HBASE_HEAPSIZE=1000
+
+  # Extra Java runtime options.
+  # Below are what we set by default. May only work with SUN JVM.
+  # For more on why as well as other possible settings,
+  # see http://wiki.apache.org/hadoop/PerformanceTuning
+  export HBASE_OPTS="-XX:+UseConcMarkSweepGC 
-XX:ErrorFile={{hbase_log_dir}}/hs_err_pid%p.log"
+  export SERVER_GC_OPTS="-verbose:gc -XX:+PrintGCDetails 
-XX:+PrintGCDateStamps -Xloggc:{{log_dir}}/gc.log-`date +'%Y%m%d%H%M'`"
+  # Uncomment below to enable java garbage collection logging.
+  # export HBASE_OPTS="$HBASE_OPTS -verbose:gc -XX:+PrintGCDetails 
-XX:+PrintGCDateStamps -Xloggc:$HBASE_HOME/logs/gc-hbase.log"
+
+  # Uncomment and adjust to enable JMX exporting
+  # See jmxremote.password and jmxremote.access in 
$JRE_HOME/lib/management to configure remote password access.
+  # More details at: 
http://java.sun.com/javase/6/docs/technotes/guides/management/agent.html
+  #
+  # export HBASE_JMX_BASE="-Dcom.sun.management.jmxremote.ssl=false 
-Dcom.sun.management.jmxremote.authenticate=false"
+  export HBASE_MASTER_OPTS="-Xmx{{master_heapsize}}"
+  export HBASE_REGIONSERVER_OPTS="-Xmn{{regionserver_xmn_size}} 
-XX:CMSInitiatingOccupancyFraction=70  -Xms{{regionserver_heapsize}} 
-Xmx{{regionserver_heapsize}}"
+  # export HBASE_THRIFT_OPTS="$HBASE_JMX_BASE 
-Dcom.sun.management.jmxremote.port=10103"
+  # export HBASE_ZOOKEEPER_OPTS="$HBASE_JMX_BASE 
-Dcom.sun.management.jmxremote.port=10104"
+
+  # File naming hosts on which HRegionServers will run. 
$HBASE_HOME/conf/regionservers by default.
+  export HBASE_REGIONSERVERS=${HBASE_CONF_DIR}/regionservers
+
+  # Extra ssh options. Empty by default.
+  # export HBASE_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HBASE_CONF_DIR"
+
+  # Where log files are stored. $HBASE_HOME/logs by default.
+  export HBASE_LOG_DIR={{hbase_log_dir}}
+
+  # A string representing this instance of hbase. $USER by default.
+  # export HBASE_IDENT_STRING=$USER
+
+  # The scheduling priority for daemon processes. See 'man nice'.
+  # export HBASE_NICENESS=10
+
+  # The directory where pid files are stored. /tmp by default.
+  export HBASE_PID_DIR={{hbase_pid_dir}}
+
+  # Seconds to sleep between slave commands. Unset by default. This
+  # can be useful in large clusters, where, e.g., slave rsyncs can
+  # otherwise arrive faster than the master can service them.
+  # export HBASE_SLAVE_SLEEP=0.1
+
+  # Tell HBase whether it should manage it's own instance of Zookeeper or 
not.
+  export HBASE_MANAGES_ZK=false
+
+  {% if security_enabled %}
+  export HBASE_OPTS="$HBASE_OPTS 
-Djava.security.auth.login.config={{client_jaas_config_file}}"
+  export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS 
-Djava.security.auth.login.config={{master_jaas_config_file}}"
+  export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS 
-Djava.security.auth.lo

[12/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSController.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSController.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSController.java
new file mode 100644
index 000..4e00bc8
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSController.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
+
+import org.apache.hadoop.yarn.webapp.Controller;
+
+import com.google.inject.Inject;
+
+public class AHSController extends Controller {
+
+  @Inject
+  AHSController(RequestContext ctx) {
+super(ctx);
+  }
+
+  @Override
+  public void index() {
+setTitle("Application History");
+  }
+
+  public void app() {
+render(AppPage.class);
+  }
+
+  public void appattempt() {
+render(AppAttemptPage.class);
+  }
+
+  public void container() {
+render(ContainerPage.class);
+  }
+
+  /**
+   * Render the logs page.
+   */
+  public void logs() {
+render(AHSLogsPage.class);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSLogsPage.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSLogsPage.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSLogsPage.java
new file mode 100644
index 000..8821bc0
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSLogsPage.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
+
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
+
+import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.log.AggregatedLogsBlock;
+
+public class AHSLogsPage extends AHSView {
+  /*
+   * (non-Javadoc)
+   * 
+   * @see
+   * org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSView#
+   * preHead(org.apache.hadoop .yarn.webapp.hamlet.Hamlet.HTML)
+   */
+  @Override
+  protected void preHead(Page.HTML<_> html) {
+String logEntity = $(ENTITY_STRING);
+if (logEntity == null || logEntity.isEmpty()) {
+  logEntity = $(CONTAINER_ID);
+}
+if (logEntity == null || logEntity.isEmpty()) {
+  logEntity = "UNKNOWN";
+}
+commonPreHead(html);
+  }
+
+  /**
+   * The content of this page is the AggregatedLogsBlock
+   * 
+   * @return AggregatedLogsBlock.class
+   */
+  @Override
+  protected Class content() {
+return AggregatedL

[06/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaPropertyProvider.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaPropertyProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaPropertyProvider.java
new file mode 100644
index 000..63533c6
--- /dev/null
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/metrics/ganglia/GangliaPropertyProvider.java
@@ -0,0 +1,618 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.server.controller.metrics.ganglia;
+
+import org.apache.ambari.server.controller.internal.AbstractPropertyProvider;
+import org.apache.ambari.server.configuration.ComponentSSLConfiguration;
+import org.apache.ambari.server.controller.internal.PropertyInfo;
+import org.apache.ambari.server.controller.metrics.MetricHostProvider;
+import org.apache.ambari.server.controller.metrics.MetricsPropertyProvider;
+import org.apache.ambari.server.controller.spi.*;
+import org.apache.ambari.server.controller.utilities.StreamProvider;
+import org.apache.http.client.utils.URIBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import static 
org.apache.ambari.server.controller.metrics.MetricsPropertyProvider.MetricsService.*;
+
+/**
+ * Abstract property provider implementation for a Ganglia source.
+ */
+public abstract class GangliaPropertyProvider extends MetricsPropertyProvider {
+
+  /**
+   * Map of Ganglia cluster names keyed by component type.
+   */
+  static final Map> GANGLIA_CLUSTER_NAME_MAP = new 
HashMap>();
+
+  
+  static {
+GANGLIA_CLUSTER_NAME_MAP.put("NAMENODE",   
Collections.singletonList("HDPNameNode"));
+GANGLIA_CLUSTER_NAME_MAP.put("DATANODE",   
Arrays.asList("HDPDataNode", "HDPSlaves"));
+GANGLIA_CLUSTER_NAME_MAP.put("JOBTRACKER", 
Collections.singletonList("HDPJobTracker"));
+GANGLIA_CLUSTER_NAME_MAP.put("TASKTRACKER",
Arrays.asList("HDPTaskTracker", "HDPSlaves"));
+GANGLIA_CLUSTER_NAME_MAP.put("RESOURCEMANAGER",
Collections.singletonList("HDPResourceManager"));
+GANGLIA_CLUSTER_NAME_MAP.put("NODEMANAGER",
Arrays.asList("HDPNodeManager", "HDPSlaves"));
+GANGLIA_CLUSTER_NAME_MAP.put("HISTORYSERVER",  
Collections.singletonList("HDPHistoryServer"));
+GANGLIA_CLUSTER_NAME_MAP.put("HBASE_MASTER",   
Collections.singletonList("HDPHBaseMaster"));
+GANGLIA_CLUSTER_NAME_MAP.put("HBASE_REGIONSERVER", 
Arrays.asList("HDPHBaseRegionServer", "HDPSlaves"));
+GANGLIA_CLUSTER_NAME_MAP.put("FLUME_HANDLER",  
Arrays.asList("HDPFlumeServer", "HDPSlaves"));
+GANGLIA_CLUSTER_NAME_MAP.put("JOURNALNODE",
Arrays.asList("HDPJournalNode", "HDPSlaves"));
+GANGLIA_CLUSTER_NAME_MAP.put("NIMBUS", 
Collections.singletonList("HDPNimbus"));
+GANGLIA_CLUSTER_NAME_MAP.put("SUPERVISOR", 
Collections.singletonList("HDPSupervisor"));
+  }
+
+  protected final static Logger LOG =
+  LoggerFactory.getLogger(GangliaPropertyProvider.class);
+
+  // - Constructors --
+
+  public GangliaPropertyProvider(Map> 
componentPropertyInfoMap,
+ StreamProvider streamProvider,
+ ComponentSSLConfiguration configuration,
+ MetricHostProvider hostProvider,
+ String clusterNamePropertyId,
+ String hostNamePropertyId,
+ String componentNamePropertyId) {
+
+super(componentPropertyInfoMap, streamProvider,configuration,
+  hostProvider, clusterNamePropertyId, hostNamePropertyId,
+  componentNamePropertyId);
+  }
+
+
+  // - PropertyProvider ---

[24/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_bsd.c
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_bsd.c
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_bsd.c
new file mode 100644
index 000..5a9f9c0
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psutil_bsd.c
@@ -0,0 +1,2212 @@
+/*
+ * Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ *
+ * FreeBSD platform-specific module methods for _psutil_bsd
+ */
+
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include 
+#include // for struct xsocket
+#include 
+#include 
+// for xinpcb struct
+#include 
+#include 
+#include 
+#include 
+#include// for struct xtcpcb
+#include// for TCP connection states
+#include  // for inet_ntop()
+
+#if __FreeBSD_version < 90
+#include  // system users
+#else
+#include 
+#endif
+#include   // get io counters
+#include   // needed for vmtotal struct
+#include   // process open files, shared libs (kinfo_getvmmap)
+#include 
+
+#include// net io counters
+#include 
+#include 
+
+#include// process open files/connections
+#include 
+
+#include "_psutil_bsd.h"
+#include "_psutil_common.h"
+#include "arch/bsd/process_info.h"
+
+
+// convert a timeval struct to a double
+#define TV2DOUBLE(t)((t).tv_sec + (t).tv_usec / 100.0)
+
+
+/*
+ * Utility function which fills a kinfo_proc struct based on process pid
+ */
+static int
+psutil_kinfo_proc(const pid_t pid, struct kinfo_proc *proc)
+{
+int mib[4];
+size_t size;
+mib[0] = CTL_KERN;
+mib[1] = KERN_PROC;
+mib[2] = KERN_PROC_PID;
+mib[3] = pid;
+
+size = sizeof(struct kinfo_proc);
+
+if (sysctl((int *)mib, 4, proc, &size, NULL, 0) == -1) {
+PyErr_SetFromErrno(PyExc_OSError);
+return -1;
+}
+
+// sysctl stores 0 in the size if we can't find the process information.
+if (size == 0) {
+NoSuchProcess();
+return -1;
+}
+return 0;
+}
+
+
+/*
+ * Return a Python list of all the PIDs running on the system.
+ */
+static PyObject *
+psutil_pids(PyObject *self, PyObject *args)
+{
+kinfo_proc *proclist = NULL;
+kinfo_proc *orig_address = NULL;
+size_t num_processes;
+size_t idx;
+PyObject *retlist = PyList_New(0);
+PyObject *pid = NULL;
+
+if (retlist == NULL) {
+return NULL;
+}
+if (psutil_get_proc_list(&proclist, &num_processes) != 0) {
+PyErr_SetString(PyExc_RuntimeError,
+"failed to retrieve process list.");
+goto error;
+}
+
+if (num_processes > 0) {
+orig_address = proclist; // save so we can free it after we're done
+for (idx = 0; idx < num_processes; idx++) {
+pid = Py_BuildValue("i", proclist->ki_pid);
+if (!pid)
+goto error;
+if (PyList_Append(retlist, pid))
+goto error;
+Py_DECREF(pid);
+proclist++;
+}
+free(orig_address);
+}
+
+return retlist;
+
+error:
+Py_XDECREF(pid);
+Py_DECREF(retlist);
+if (orig_address != NULL) {
+free(orig_address);
+}
+return NULL;
+}
+
+
+/*
+ * Return a Python float indicating the system boot time expressed in
+ * seconds since the epoch.
+ */
+static PyObject *
+psutil_boot_time(PyObject *self, PyObject *args)
+{
+// fetch sysctl "kern.boottime"
+static int request[2] = { CTL_KERN, KERN_BOOTTIME };
+struct timeval boottime;
+size_t len = sizeof(boottime);
+
+if (sysctl(request, 2, &boottime, &len, NULL, 0) == -1) {
+PyErr_SetFromErrno(PyExc_OSError);
+return NULL;
+}
+return Py_BuildValue("d", (double)boottime.tv_sec);
+}
+
+
+/*
+ * Return process name from kinfo_proc as a Python string.
+ */
+static PyObject *
+psutil_proc_name(PyObject *self, PyObject *args)
+{
+long pid;
+struct kinfo_proc kp;
+if (! PyArg_ParseTuple(args, "l", &pid)) {
+return NULL;
+}
+if (psutil_kinfo_proc(pid, &kp) == -1) {
+return NULL;
+}
+return Py_BuildValue("s", kp.ki_comm);
+}
+
+
+/*
+ * Return process pathname executable.
+ * Thanks to Robert N. M. Watson:
+ * http://fxr.googlebit.com/source/usr.bin/procstat/procstat_bin.c?v=8-CURRENT
+ */
+static PyObject *
+psutil_proc_exe(PyObject *self, PyObject *args)
+{
+long pid;
+char pathname[PATH_MAX];
+int error;
+int mib[4];
+size_t size;
+
+if (! PyArg_ParseTuple(args, "l", &pid)) {
+   

[18/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
new file mode 100644
index 000..e15198b
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ipc.Server;
+import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
+import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest;
+import 
org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse;
+import 
org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
+import 
org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
+import 
org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
+import 
org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenResponse;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.ContainerReport;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException;
+import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
+import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException;
+import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.ipc.YarnRPC;
+
+public class ApplicationHistoryClientService extends AbstractService {
+  private static final Log LOG = LogFactory
+.getLog(ApplicationHistoryClientService.class);
+  private ApplicationHistoryManager history;
+  private ApplicationHistoryProtocol protocolHandler;
+  private Server server;
+  private InetSocketAddress bindAddress;
+
+  public ApplicationHistoryClientService(ApplicationHistoryManager history) {
+super("ApplicationHistoryClientService");
+this.history = history;
+this.protocolHandler = new ApplicationHSClientProtocolHandler();
+  }
+
+  protected void serviceStart() throws Exception {
+Configuration conf = getConfig();
+YarnRPC rpc = YarnRPC.create(conf);
+InetSocketAddress address =
+   

[08/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaComponentPropertyProvider.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaComponentPropertyProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaComponentPropertyProvider.java
deleted file mode 100644
index 1e01aa0..000
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaComponentPropertyProvider.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.server.controller.ganglia;
-
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.ambari.server.configuration.ComponentSSLConfiguration;
-import org.apache.ambari.server.controller.internal.PropertyInfo;
-import org.apache.ambari.server.controller.spi.Resource;
-import org.apache.ambari.server.controller.utilities.StreamProvider;
-
-/**
- * Ganglia property provider implementation for component resources.
- */
-public class GangliaComponentPropertyProvider extends GangliaPropertyProvider {
-
-
-  // - Constructors --
-
-  public GangliaComponentPropertyProvider(Map> componentMetrics,
-  StreamProvider streamProvider,
-  ComponentSSLConfiguration 
configuration,
-  GangliaHostProvider hostProvider,
-  String clusterNamePropertyId,
-  String componentNamePropertyId) {
-
-super(componentMetrics, streamProvider, configuration, hostProvider,
-clusterNamePropertyId, null, componentNamePropertyId);
-  }
-
-
-  // - GangliaPropertyProvider ---
-
-  @Override
-  protected String getHostName(Resource resource) {
-return "__SummaryInfo__";
-  }
-
-  @Override
-  protected String getComponentName(Resource resource) {
-return (String) resource.getPropertyValue(getComponentNamePropertyId());
-  }
-
-  @Override
-  protected Set getGangliaClusterNames(Resource resource, String 
clusterName) {
-String component = getComponentName(resource);
-
-return new HashSet(GANGLIA_CLUSTER_NAME_MAP.containsKey(component) 
?
-GANGLIA_CLUSTER_NAME_MAP.get(component) :
-  Collections.emptyList());
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaHostComponentPropertyProvider.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaHostComponentPropertyProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaHostComponentPropertyProvider.java
deleted file mode 100644
index 9411cab..000
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/ganglia/GangliaHostComponentPropertyProvider.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.se

[25/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psosx.py
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psosx.py
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psosx.py
new file mode 100644
index 000..8953867
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/psutil/_psosx.py
@@ -0,0 +1,341 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""OSX platform implementation."""
+
+import errno
+import os
+import sys
+
+from psutil import _common
+from psutil import _psposix
+from psutil._common import conn_tmap, usage_percent, isfile_strict
+from psutil._compat import namedtuple, wraps
+import _psutil_osx as cext
+import _psutil_posix
+
+
+__extra__all__ = []
+
+# --- constants
+
+PAGESIZE = os.sysconf("SC_PAGE_SIZE")
+
+# http://students.mimuw.edu.pl/lxr/source/include/net/tcp_states.h
+TCP_STATUSES = {
+cext.TCPS_ESTABLISHED: _common.CONN_ESTABLISHED,
+cext.TCPS_SYN_SENT: _common.CONN_SYN_SENT,
+cext.TCPS_SYN_RECEIVED: _common.CONN_SYN_RECV,
+cext.TCPS_FIN_WAIT_1: _common.CONN_FIN_WAIT1,
+cext.TCPS_FIN_WAIT_2: _common.CONN_FIN_WAIT2,
+cext.TCPS_TIME_WAIT: _common.CONN_TIME_WAIT,
+cext.TCPS_CLOSED: _common.CONN_CLOSE,
+cext.TCPS_CLOSE_WAIT: _common.CONN_CLOSE_WAIT,
+cext.TCPS_LAST_ACK: _common.CONN_LAST_ACK,
+cext.TCPS_LISTEN: _common.CONN_LISTEN,
+cext.TCPS_CLOSING: _common.CONN_CLOSING,
+cext.PSUTIL_CONN_NONE: _common.CONN_NONE,
+}
+
+PROC_STATUSES = {
+cext.SIDL: _common.STATUS_IDLE,
+cext.SRUN: _common.STATUS_RUNNING,
+cext.SSLEEP: _common.STATUS_SLEEPING,
+cext.SSTOP: _common.STATUS_STOPPED,
+cext.SZOMB: _common.STATUS_ZOMBIE,
+}
+
+scputimes = namedtuple('scputimes', ['user', 'nice', 'system', 'idle'])
+
+svmem = namedtuple(
+'svmem', ['total', 'available', 'percent', 'used', 'free',
+  'active', 'inactive', 'wired'])
+
+pextmem = namedtuple('pextmem', ['rss', 'vms', 'pfaults', 'pageins'])
+
+pmmap_grouped = namedtuple(
+'pmmap_grouped',
+'path rss private swapped dirtied ref_count shadow_depth')
+
+pmmap_ext = namedtuple(
+'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields))
+
+# set later from __init__.py
+NoSuchProcess = None
+AccessDenied = None
+TimeoutExpired = None
+
+
+# --- functions
+
+def virtual_memory():
+"""System virtual memory as a namedtuple."""
+total, active, inactive, wired, free = cext.virtual_mem()
+avail = inactive + free
+used = active + inactive + wired
+percent = usage_percent((total - avail), total, _round=1)
+return svmem(total, avail, percent, used, free,
+ active, inactive, wired)
+
+
+def swap_memory():
+"""Swap system memory as a (total, used, free, sin, sout) tuple."""
+total, used, free, sin, sout = cext.swap_mem()
+percent = usage_percent(used, total, _round=1)
+return _common.sswap(total, used, free, percent, sin, sout)
+
+
+def cpu_times():
+"""Return system CPU times as a namedtuple."""
+user, nice, system, idle = cext.cpu_times()
+return scputimes(user, nice, system, idle)
+
+
+def per_cpu_times():
+"""Return system CPU times as a named tuple"""
+ret = []
+for cpu_t in cext.per_cpu_times():
+user, nice, system, idle = cpu_t
+item = scputimes(user, nice, system, idle)
+ret.append(item)
+return ret
+
+
+def cpu_count_logical():
+"""Return the number of logical CPUs in the system."""
+return cext.cpu_count_logical()
+
+
+def cpu_count_physical():
+"""Return the number of physical CPUs in the system."""
+return cext.cpu_count_phys()
+
+
+def boot_time():
+"""The system boot time expressed in seconds since the epoch."""
+return cext.boot_time()
+
+
+def disk_partitions(all=False):
+retlist = []
+partitions = cext.disk_partitions()
+for partition in partitions:
+device, mountpoint, fstype, opts = partition
+if device == 'none':
+device = ''
+if not all:
+if not os.path.isabs(device) or not os.path.exists(device):
+continue
+ntuple = _common.sdiskpart(device, mountpoint, fstype, opts)
+retlist.append(ntuple)
+return retlist
+
+
+def users():
+retlist = []
+rawlist = cext.users()
+for item in rawlist:
+user, tty, hostname, tstamp = item
+if tty == '~':
+continue  # reboot or shutdown
+if not tstamp:
+continue
+nt = _common.suser(user, tty or None, hostname or None, tstamp)
+retlist.append(nt)
+return retlist
+
+
+def net_connections(kind='inet'):

[15/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricClusterAggregator.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricClusterAggregator.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricClusterAggregator.java
new file mode 100644
index 000..654c188
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricClusterAggregator.java
@@ -0,0 +1,220 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
+
+
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import java.io.IOException;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import static java.util.concurrent.TimeUnit.SECONDS;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.GET_METRIC_SQL;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_RECORD_TABLE_NAME;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.NATIVE_TIME_RANGE_DELTA;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_MINUTE_CHECKPOINT_CUTOFF_MULTIPLIER;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_MINUTE_DISABLED;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_MINUTE_SLEEP_INTERVAL;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.CLUSTER_AGGREGATOR_TIMESLICE_INTERVAL;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.DEFAULT_CHECKPOINT_LOCATION;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.TIMELINE_METRICS_AGGREGATOR_CHECKPOINT_DIR;
+
+/**
+ * Aggregates a metric across all hosts in the cluster. Reads metrics from
+ * the precision table and saves into the aggregate.
+ */
+public class TimelineMetricClusterAggregator extends 
AbstractTimelineAggregator {
+  private static final Log LOG = 
LogFactory.getLog(TimelineMetricClusterAggregator.class);
+  private static final String CLUSTER_AGGREGATOR_CHECKPOINT_FILE =
+"timeline-metrics-cluster-aggregator-checkpoint";
+  private final String checkpointLocation;
+  private final Long sleepIntervalMillis;
+  public final int timeSliceIntervalMillis;
+  private final Integer checkpointCutOffMultiplier;
+
+  public TimelineMetricClusterAggregator(PhoenixHBaseAccessor hBaseAccessor,
+ Configuration metricsConf) {
+super(hBaseAccessor, metricsConf);
+
+String checkpointDir = metricsConf.get(
+  TIMELINE_METRICS_AGGREGATOR_CHECKPOINT_DIR, DEFAULT_CHECKPOINT_LOCATION);
+
+checkpointLocation = FilenameUtils.concat(checkpointDir,
+  CLUSTER_AGGREGATOR_CHECKPOINT_FILE);
+
+sleepIntervalMillis = SECONDS.toMillis(metricsConf.getLong
+  (CLUSTER_AGGREGATOR_MINUTE_SLEEP_INTERVAL, 120l));
+timeSliceIntervalMillis = (int)SECONDS.toMill

[27/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/examples/top.py
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/examples/top.py
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/examples/top.py
new file mode 100644
index 000..479c797
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/examples/top.py
@@ -0,0 +1,232 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A clone of top / htop.
+
+Author: Giampaolo Rodola' 
+
+$ python examples/top.py
+ CPU0  [|   ]   4.9%
+ CPU1  [||| ]   7.8%
+ CPU2  []   2.0%
+ CPU3  [|   ]  13.9%
+ Mem   [||| ]  49.8%  4920M/9888M
+ Swap  []   0.0% 0M/0M
+ Processes: 287 (running=1 sleeping=286)
+ Load average: 0.34 0.54 0.46  Uptime: 3 days, 10:16:37
+
+PIDUSER   NI  VIRT   RES   CPU% MEM% TIME+  NAME
+
+989giampaol0   66M   12M7.4  0.1   0:00.61  python
+2083   root0  506M  159M6.5  1.6   0:29.26  Xorg
+4503   giampaol0  599M   25M6.5  0.3   3:32.60  gnome-terminal
+3868   giampaol0  358M8M2.8  0.1  23:12.60  pulseaudio
+3936   giampaol01G  111M2.8  1.1  33:41.67  compiz
+4401   giampaol0  536M  141M2.8  1.4  35:42.73  skype
+4047   giampaol0  743M   76M1.8  0.8  42:03.33  unity-panel-service
+13155  giampaol01G  280M1.8  2.8  41:57.34  chrome
+10 root00B0B0.9  0.0   4:01.81  rcu_sched
+339giampaol01G  113M0.9  1.1   8:15.73  chrome
+...
+"""
+
+import os
+import sys
+if os.name != 'posix':
+sys.exit('platform not supported')
+import atexit
+import curses
+import time
+from datetime import datetime, timedelta
+
+import psutil
+
+
+# --- curses stuff
+def tear_down():
+win.keypad(0)
+curses.nocbreak()
+curses.echo()
+curses.endwin()
+
+win = curses.initscr()
+atexit.register(tear_down)
+curses.endwin()
+lineno = 0
+
+
+def print_line(line, highlight=False):
+"""A thin wrapper around curses's addstr()."""
+global lineno
+try:
+if highlight:
+line += " " * (win.getmaxyx()[1] - len(line))
+win.addstr(lineno, 0, line, curses.A_REVERSE)
+else:
+win.addstr(lineno, 0, line, 0)
+except curses.error:
+lineno = 0
+win.refresh()
+raise
+else:
+lineno += 1
+# --- /curses stuff
+
+
+def bytes2human(n):
+"""
+>>> bytes2human(1)
+'9K'
+>>> bytes2human(11221)
+'95M'
+"""
+symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
+prefix = {}
+for i, s in enumerate(symbols):
+prefix[s] = 1 << (i + 1) * 10
+for s in reversed(symbols):
+if n >= prefix[s]:
+value = int(float(n) / prefix[s])
+return '%s%s' % (value, s)
+return "%sB" % n
+
+
+def poll(interval):
+# sleep some time
+time.sleep(interval)
+procs = []
+procs_status = {}
+for p in psutil.process_iter():
+try:
+p.dict = p.as_dict(['username', 'nice', 'memory_info',
+'memory_percent', 'cpu_percent',
+'cpu_times', 'name', 'status'])
+try:
+procs_status[p.dict['status']] += 1
+except KeyError:
+procs_status[p.dict['status']] = 1
+except psutil.NoSuchProcess:
+pass
+else:
+procs.append(p)
+
+# return processes sorted by CPU percent usage
+processes = sorted(procs, key=lambda p: p.dict['cpu_percent'],
+   reverse=True)
+return (processes, procs_status)
+
+
+def print_header(procs_status, num_procs):
+"""Print system-related info, above the process list."""
+
+def get_dashes(perc):
+dashes = "|" * int((float(perc) / 10 * 4))
+empty_dashes = " " * (40 - len(dashes))
+return dashes, empty_dashes
+
+# cpu usage
+percs = psutil.cpu_percent(interval=0, percpu=True)
+for cpu_num, perc in enumerate(percs):
+dashes, empty_dashes = get_dashes(perc)
+print_line(" CPU%-2s [%s%s] %5s%%" % (cpu_num, dashes, empty_dashes,
+  perc))
+mem = psutil.virtual_memory()
+dashes, empty_dashes = get_dashes(mem.percent)
+used = mem.total - mem.available
+line = " Mem   [%s%s] %5s%% %6s/%s" % (
+d

[01/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. Rat check fix. (swagle)

2014-12-02 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk c6f574349 -> 93b8348fa


AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. 
Rat check fix. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/93b8348f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/93b8348f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/93b8348f

Branch: refs/heads/trunk
Commit: 93b8348fabb4a50e2ed95ed1b98fea7fff4b2ab1
Parents: a52f8a5
Author: Siddharth Wagle 
Authored: Tue Dec 2 09:26:32 2014 -0800
Committer: Siddharth Wagle 
Committed: Tue Dec 2 09:26:41 2014 -0800

--
 .../server/controller/internal/AbstractProviderModule.java   | 2 +-
 pom.xml  | 4 
 2 files changed, 5 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/93b8348f/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
index d1a2330..7b734cf 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractProviderModule.java
@@ -547,7 +547,7 @@ public abstract class AbstractProviderModule implements 
ProviderModule, Resource
 ComponentSSLConfiguration.instance(),
 this,
 PropertyHelper.getPropertyId("Clusters", "cluster_name")));
-providers.add(new AlertSummaryPropertyProvider(type,
+  providers.add(new AlertSummaryPropertyProvider(type,
 "Clusters/cluster_name", null));
   break;
 case Service:

http://git-wip-us.apache.org/repos/asf/ambari/blob/93b8348f/pom.xml
--
diff --git a/pom.xml b/pom.xml
index fa2ca31..87fb4b8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -255,10 +255,14 @@
 
 **/velocity.log*
 
+
 
 
ambari-metrics/ambari-metrics-host-monitoring/src/main/python/psutil/**
+
ambari-metrics/target/rpm/ambari-metrics/SPECS/ambari-metrics.spec
 
ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.pom
 
ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/maven-metadata-local.xml
+ambari-metrics/*/target/**
+
   
 
 



[03/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/JMXPropertyProviderTest.java
--
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/JMXPropertyProviderTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/JMXPropertyProviderTest.java
index 6bb7c69..a640ec2 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/JMXPropertyProviderTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/controller/metrics/JMXPropertyProviderTest.java
@@ -47,7 +47,7 @@ public class JMXPropertyProviderTest {
   public void testPopulateResources() throws Exception {
 TestStreamProvider  streamProvider = new TestStreamProvider();
 TestJMXHostProvider hostProvider = new TestJMXHostProvider(false);
-TestMetricsHostProvider metricsHostProvider = new 
TestMetricsHostProvider();
+TestMetricHostProvider metricsHostProvider = new TestMetricHostProvider();
 
 JMXPropertyProvider propertyProvider = new JMXPropertyProvider(
 PropertyHelper.getJMXPropertyIds(Resource.Type.HostComponent),
@@ -234,7 +234,7 @@ public class JMXPropertyProviderTest {
   public void testPopulateResources_singleProperty() throws Exception {
 TestStreamProvider  streamProvider = new TestStreamProvider();
 TestJMXHostProvider hostProvider = new TestJMXHostProvider(false);
-TestMetricsHostProvider metricsHostProvider = new 
TestMetricsHostProvider();
+TestMetricHostProvider metricsHostProvider = new TestMetricHostProvider();
 
 JMXPropertyProvider propertyProvider = new JMXPropertyProvider(
 PropertyHelper.getJMXPropertyIds(Resource.Type.HostComponent),
@@ -270,7 +270,7 @@ public class JMXPropertyProviderTest {
   public void testPopulateResources_category() throws Exception {
 TestStreamProvider  streamProvider = new TestStreamProvider();
 TestJMXHostProvider hostProvider = new TestJMXHostProvider(false);
-TestMetricsHostProvider metricsHostProvider = new 
TestMetricsHostProvider();
+TestMetricHostProvider metricsHostProvider = new TestMetricHostProvider();
 
 JMXPropertyProvider propertyProvider = new JMXPropertyProvider(
 PropertyHelper.getJMXPropertyIds(Resource.Type.HostComponent),
@@ -308,7 +308,7 @@ public class JMXPropertyProviderTest {
   public void testPopulateResourcesWithUnknownPort() throws Exception {
 TestStreamProvider  streamProvider = new TestStreamProvider();
 TestJMXHostProvider hostProvider = new TestJMXHostProvider(true);
-TestMetricsHostProvider metricsHostProvider = new 
TestMetricsHostProvider();
+TestMetricHostProvider metricsHostProvider = new TestMetricHostProvider();
 
 JMXPropertyProvider propertyProvider = new JMXPropertyProvider(
 PropertyHelper.getJMXPropertyIds(Resource.Type.HostComponent),
@@ -346,7 +346,7 @@ public class JMXPropertyProviderTest {
   public void testPopulateResourcesUnhealthyResource() throws Exception {
 TestStreamProvider  streamProvider = new TestStreamProvider();
 TestJMXHostProvider hostProvider = new TestJMXHostProvider(true);
-TestMetricsHostProvider metricsHostProvider = new 
TestMetricsHostProvider();
+TestMetricHostProvider metricsHostProvider = new TestMetricHostProvider();
 
 JMXPropertyProvider propertyProvider = new JMXPropertyProvider(
 PropertyHelper.getJMXPropertyIds(Resource.Type.HostComponent),
@@ -379,7 +379,7 @@ public class JMXPropertyProviderTest {
 // Set the provider to take 50 millis to return the JMX values
 TestStreamProvider  streamProvider = new TestStreamProvider(50L);
 TestJMXHostProvider hostProvider = new TestJMXHostProvider(true);
-TestMetricsHostProvider metricsHostProvider = new 
TestMetricsHostProvider();
+TestMetricHostProvider metricsHostProvider = new TestMetricHostProvider();
 Set resources = new HashSet();
 
 JMXPropertyProvider propertyProvider = new JMXPropertyProvider(
@@ -425,7 +425,7 @@ public class JMXPropertyProviderTest {
 // Set the provider to take 100 millis to return the JMX values
 TestStreamProvider  streamProvider = new TestStreamProvider(100L);
 TestJMXHostProvider hostProvider = new TestJMXHostProvider(true);
-TestMetricsHostProvider metricsHostProvider = new 
TestMetricsHostProvider();
+TestMetricHostProvider metricsHostProvider = new TestMetricHostProvider();
 Set resources = new HashSet();
 
 JMXPropertyProvider propertyProvider = new JMXPropertyProvider(
@@ -512,11 +512,31 @@ public class JMXPropertyProviderTest {
 
   }
 
-  public static class TestMetricsHostProvider implements MetricsHostProvider {
+  public static class TestMetricHostProvider implements MetricHostProvider {
+
+@Override
+public String getCollectorHostName(String clusterName, 
MetricsPropertyProvider.MetricsService service) 

[07/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProvider.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProvider.java
index 217e216..5896a88 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackDefinedPropertyProvider.java
@@ -17,25 +17,17 @@
  */
 package org.apache.ambari.server.controller.internal;
 
-import java.lang.reflect.Constructor;
-import java.lang.reflect.Method;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-
+import com.google.inject.Inject;
+import com.google.inject.Injector;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.ComponentSSLConfiguration;
-import 
org.apache.ambari.server.controller.ganglia.GangliaComponentPropertyProvider;
-import 
org.apache.ambari.server.controller.ganglia.GangliaHostComponentPropertyProvider;
-import org.apache.ambari.server.controller.ganglia.GangliaHostProvider;
-import org.apache.ambari.server.controller.ganglia.GangliaPropertyProvider;
 import org.apache.ambari.server.controller.jmx.JMXHostProvider;
 import org.apache.ambari.server.controller.jmx.JMXPropertyProvider;
-import org.apache.ambari.server.controller.metrics.MetricsHostProvider;
+import org.apache.ambari.server.controller.metrics.MetricHostProvider;
+import org.apache.ambari.server.controller.metrics.MetricsPropertyProvider;
+import 
org.apache.ambari.server.controller.metrics.ganglia.GangliaComponentPropertyProvider;
+import 
org.apache.ambari.server.controller.metrics.ganglia.GangliaHostComponentPropertyProvider;
+import 
org.apache.ambari.server.controller.metrics.ganglia.GangliaPropertyProvider;
 import org.apache.ambari.server.controller.spi.Predicate;
 import org.apache.ambari.server.controller.spi.PropertyProvider;
 import org.apache.ambari.server.controller.spi.Request;
@@ -50,8 +42,17 @@ import org.apache.ambari.server.state.stack.MetricDefinition;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.inject.Inject;
-import com.google.inject.Injector;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import static 
org.apache.ambari.server.controller.metrics.MetricsPropertyProvider.MetricsService;
 
 /**
  * This class analyzes a service's metrics to determine if additional
@@ -77,11 +78,11 @@ public class StackDefinedPropertyProvider implements 
PropertyProvider {
   private ComponentSSLConfiguration sslConfig = null;
   private StreamProvider streamProvider = null;
   private JMXHostProvider jmxHostProvider;
-  private GangliaHostProvider gangliaHostProvider;
   private PropertyProvider defaultJmx = null;
   private PropertyProvider defaultGanglia = null;
 
-  private final MetricsHostProvider metricsHostProvider;
+  private final MetricHostProvider metricHostProvider;
+  private MetricsService metricsService = MetricsService.GANGLIA;
 
   /**
* PropertyHelper/AbstractPropertyProvider expect map of maps,
@@ -97,19 +98,17 @@ public class StackDefinedPropertyProvider implements 
PropertyProvider {
   }
 
   public StackDefinedPropertyProvider(Resource.Type type,
-  JMXHostProvider jmxHostProvider,
-  GangliaHostProvider gangliaHostProvider,
-  MetricsHostProvider metricsHostProvider,
-  StreamProvider streamProvider,
-  String clusterPropertyId,
-  String hostPropertyId,
-  String componentPropertyId,
-  String resourceStatePropertyId,
-  PropertyProvider defaultJmxPropertyProvider,
-  PropertyProvider defaultGangliaPropertyProvider
-  ) {
-
-this.metricsHostProvider = metricsHostProvider;
+  JMXHostProvider jmxHostProvider,
+  MetricHostProvider metricHostProvider,
+  StreamProvider streamProvider,
+  String clusterPropertyId,
+  String hostPropertyId,
+  String componentPropertyId,
+  String resourceStatePropertyId,
+  PropertyProvider 
defaultJmxPropertyProvider,
+  PropertyProvider 
defaultGan

[17/30] ambari git commit: AMBARI-5707. Replace Ganglia with high performant and pluggable Metrics System. (swagle)

2014-12-02 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/MemoryApplicationHistoryStore.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/MemoryApplicationHistoryStore.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/MemoryApplicationHistoryStore.java
new file mode 100644
index 000..c226ad3
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/MemoryApplicationHistoryStore.java
@@ -0,0 +1,274 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
+
+/**
+ * In-memory implementation of {@link ApplicationHistoryStore}. This
+ * implementation is for test purpose only. If users improperly instantiate it,
+ * they may encounter reading and writing history data in different memory
+ * store.
+ * 
+ */
+@Private
+@Unstable
+public class MemoryApplicationHistoryStore extends AbstractService implements
+ApplicationHistoryStore {
+
+  private final ConcurrentMap 
applicationData =
+  new ConcurrentHashMap();
+  private final ConcurrentMap> 
applicationAttemptData =
+  new ConcurrentHashMap>();
+  private final ConcurrentMap> containerData =
+  new ConcurrentHashMap>();
+
+  public MemoryApplicationHistoryStore() {
+super(MemoryApplicationHistoryStore.class.getName());
+  }
+
+  @Override
+  public Map getAllApplications() {
+return new HashMap(applicationData);
+  }
+
+  @Override
+  public ApplicationHistoryData getApplication(ApplicationId appId) {
+return applicationData.get(appId);
+  }
+
+  @Override
+  public Map
+  getApplicationAttempts(ApplicationId appId) {
+ConcurrentMap subMap =
+applicationAttemptData.get(appId);
+if (subMap == null) {
+  return Collections
+. emptyMap();
+} else {
+  return new HashMap(
+subMap);
+}
+  }
+
+  @Override
+  public ApplicationAttemptHistoryData getApplicationAttempt(
+  ApplicationAttemptId appAttemptId) {
+ConcurrentMap subMap =
+applicationAttemptData.get(appAttemptId.getApplicationId());
+if (subMap == null) {
+  return null;
+} else {
+  return subMap.get(appAttemptId);
+}
+  }
+
+  @Override
+  public ContainerHistoryData getAMContainer(ApplicationAttemptId 
appAttemptId) {
+ApplicationAttemptHistoryData appAttempt =
+getApplicationAttempt(appAttemptId);
+if (

[13/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/NameValuePair.java
--
diff --git 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/NameValuePair.java
 
b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/NameValuePair.java
deleted file mode 100644
index d8dabd2..000
--- 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/NameValuePair.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * A class holding a name and value pair, used for specifying filters in
- * {@link TimelineReader}.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class NameValuePair {
-  String name;
-  Object value;
-
-  public NameValuePair(String name, Object value) {
-this.name = name;
-this.value = value;
-  }
-
-  /**
-   * Get the name.
-   * @return The name.
-   */
-  public String getName() {
-
-return name;
-  }
-
-  /**
-   * Get the value.
-   * @return The value.
-   */
-  public Object getValue() {
-return value;
-  }
-
-  @Override
-  public String toString() {
-return "{ name: " + name + ", value: " + value + " }";
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineReader.java
--
diff --git 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineReader.java
 
b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineReader.java
deleted file mode 100644
index 9ae9954..000
--- 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineReader.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.EnumSet;
-import java.util.Set;
-import java.util.SortedSet;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
-
-/**
- * This interface is for retrieving timeline information.
- */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public interface TimelineReader {
-
-  /**
-   * Possible fields to retrieve for {@link #getEntities} and {

[06/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixTransactSQL.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixTransactSQL.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixTransactSQL.java
new file mode 100644
index 000..0d53f5f
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixTransactSQL.java
@@ -0,0 +1,528 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * Encapsulate all metrics related SQL queries.
+ */
+public class PhoenixTransactSQL {
+
+  static final Log LOG = LogFactory.getLog(PhoenixTransactSQL.class);
+  // TODO: Configurable TTL values
+  /**
+   * Create table to store individual metric records.
+   */
+  public static final String CREATE_METRICS_TABLE_SQL = "CREATE TABLE IF NOT " 
+
+"EXISTS METRIC_RECORD (METRIC_NAME VARCHAR, " +
+"HOSTNAME VARCHAR, " +
+"SERVER_TIME UNSIGNED_LONG NOT NULL, " +
+"APP_ID VARCHAR, " +
+"INSTANCE_ID VARCHAR, " +
+"START_TIME UNSIGNED_LONG, " +
+"UNITS CHAR(20), " +
+"METRIC_SUM DOUBLE, " +
+"METRIC_COUNT UNSIGNED_INT, " +
+"METRIC_MAX DOUBLE, " +
+"METRIC_MIN DOUBLE, " +
+"METRICS VARCHAR CONSTRAINT pk " +
+"PRIMARY KEY (METRIC_NAME, HOSTNAME, SERVER_TIME, APP_ID, " +
+"INSTANCE_ID)) DATA_BLOCK_ENCODING='%s', IMMUTABLE_ROWS=true, " +
+"TTL=%s, COMPRESSION='%s'";
+
+  public static final String CREATE_METRICS_AGGREGATE_HOURLY_TABLE_SQL =
+"CREATE TABLE IF NOT EXISTS METRIC_RECORD_HOURLY " +
+  "(METRIC_NAME VARCHAR, " +
+  "HOSTNAME VARCHAR, " +
+  "APP_ID VARCHAR, " +
+  "INSTANCE_ID VARCHAR, " +
+  "SERVER_TIME UNSIGNED_LONG NOT NULL, " +
+  "UNITS CHAR(20), " +
+  "METRIC_SUM DOUBLE," +
+  "METRIC_COUNT UNSIGNED_INT, " +
+  "METRIC_MAX DOUBLE," +
+  "METRIC_MIN DOUBLE CONSTRAINT pk " +
+  "PRIMARY KEY (METRIC_NAME, HOSTNAME, APP_ID, INSTANCE_ID, " +
+  "SERVER_TIME)) DATA_BLOCK_ENCODING='%s', IMMUTABLE_ROWS=true, " +
+  "TTL=%s, COMPRESSION='%s'";
+
+  public static final String CREATE_METRICS_AGGREGATE_MINUTE_TABLE_SQL =
+"CREATE TABLE IF NOT EXISTS METRIC_RECORD_MINUTE " +
+  "(METRIC_NAME VARCHAR, " +
+  "HOSTNAME VARCHAR, " +
+  "APP_ID VARCHAR, " +
+  "INSTANCE_ID VARCHAR, " +
+  "SERVER_TIME UNSIGNED_LONG NOT NULL, " +
+  "UNITS CHAR(20), " +
+  "METRIC_SUM DOUBLE," +
+  "METRIC_COUNT UNSIGNED_INT, " +
+  "METRIC_MAX DOUBLE," +
+  "METRIC_MIN DOUBLE CONSTRAINT pk " +
+  "PRIMARY KEY (METRIC_NAME, HOSTNAME, APP_ID, INSTANCE_ID, " +
+  "SERVER_TIME)) DATA_BLOCK_ENCODING='%s', IMMUTABLE_ROWS=true, TTL=%s," +
+  " COMPRESSION='%s'";
+
+  public static final String CREATE_METRICS_CLUSTER_AGGREGATE_TABLE_SQL =
+"CREATE TABLE IF NOT EXISTS METRIC_AGGREGATE " +
+  "(METRIC_NAME VARCHAR, " +
+  "APP_ID VARCHAR, " +
+  "INSTANCE_ID VARCHAR, " +
+  "SERVER_TIME UNSIGNED_LONG NOT NULL, " +
+  "UNITS CHAR(20), " +
+  "METRIC_SUM DOUBLE, " +
+  "HOSTS_COUNT UNSIGNED_INT, " +
+  "METRIC_MAX DOUBLE, " +
+  "METRIC_MIN DOUBLE " +
+  "CONSTRAINT pk PRIMARY KEY (METRIC_NAME, APP_ID, INSTANCE_ID, " +
+  "SERVER_TIME)) DATA_BLOCK_ENCODING='%s', IMMUTABLE_ROWS=true, " +
+  "TTL=%s, COMPRESSION='%s'";
+
+  public static final String CREATE_METRICS_CLUSTER_AGGREGATE_HOURLY_TABLE_SQL 
=
+"CREATE TABLE I

[05/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationStartData.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationStartData.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationStartData.java
new file mode 100644
index 000..6bc1323
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ApplicationStartData.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.records;
+
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.util.Records;
+
+/**
+ * The class contains the fields that can be determined when RMApp
+ * starts, and that need to be stored persistently.
+ */
+@Public
+@Unstable
+public abstract class ApplicationStartData {
+
+  @Public
+  @Unstable
+  public static ApplicationStartData newInstance(ApplicationId applicationId,
+  String applicationName, String applicationType, String queue,
+  String user, long submitTime, long startTime) {
+ApplicationStartData appSD = Records.newRecord(ApplicationStartData.class);
+appSD.setApplicationId(applicationId);
+appSD.setApplicationName(applicationName);
+appSD.setApplicationType(applicationType);
+appSD.setQueue(queue);
+appSD.setUser(user);
+appSD.setSubmitTime(submitTime);
+appSD.setStartTime(startTime);
+return appSD;
+  }
+
+  @Public
+  @Unstable
+  public abstract ApplicationId getApplicationId();
+
+  @Public
+  @Unstable
+  public abstract void setApplicationId(ApplicationId applicationId);
+
+  @Public
+  @Unstable
+  public abstract String getApplicationName();
+
+  @Public
+  @Unstable
+  public abstract void setApplicationName(String applicationName);
+
+  @Public
+  @Unstable
+  public abstract String getApplicationType();
+
+  @Public
+  @Unstable
+  public abstract void setApplicationType(String applicationType);
+
+  @Public
+  @Unstable
+  public abstract String getUser();
+
+  @Public
+  @Unstable
+  public abstract void setUser(String user);
+
+  @Public
+  @Unstable
+  public abstract String getQueue();
+
+  @Public
+  @Unstable
+  public abstract void setQueue(String queue);
+
+  @Public
+  @Unstable
+  public abstract long getSubmitTime();
+
+  @Public
+  @Unstable
+  public abstract void setSubmitTime(long submitTime);
+
+  @Public
+  @Unstable
+  public abstract long getStartTime();
+
+  @Public
+  @Unstable
+  public abstract void setStartTime(long startTime);
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ContainerFinishData.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ContainerFinishData.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ContainerFinishData.java
new file mode 100644
index 000..5eb9ddb
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/records/ContainerFinishData.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ *

[02/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/resources/scripts/ams_query.py
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/resources/scripts/ams_query.py
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/resources/scripts/ams_query.py
new file mode 100644
index 000..d51357a
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/resources/scripts/ams_query.py
@@ -0,0 +1,209 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import urllib2
+import signal
+import sys
+import optparse
+import time
+
+# http://162.216.148.45:8188/ws/v1/timeline/metrics?
+# metricNames=rpc.rpc.RpcAuthenticationSuccesses
+# &appId=nodemanager&hostname=local.0&startTime=1414152029&endTime=1414155629
+
+AMS_URL = "http://{0}:8188/ws/v1/timeline/metrics?metricNames={1}&appid={"; \
+  "2}&hostname={3}"
+
+# in fact it can be list automatically generated from ambari
+# UI queries
+host_metrics = {
+  'cpu': ['cpu_user', 'cpu_wio', 'cpu_nice', 'cpu_aidle', 'cpu_system', 
'cpu_idle'],
+  'disk': ['disk_total', 'disk_free'],
+  'load': ['load_one', 'load_fifteen', 'load_five'],
+  'mem': ['swap_free', 'mem_shared', 'mem_free', 'mem_cached', 'mem_buffers'],
+  'network': ['bytes_in', 'bytes_out', 'pkts_in', 'pkts_out'],
+  'process': ['proc_total', 'proc_run']
+}
+
+# HDFS_SERVICE
+namenode_metrics = {
+  'dfs.Capacity': ['dfs.FSNamesystem.CapacityRemainingGB',
+   'dfs.FSNamesystem.CapacityUsedGB',
+   'dfs.FSNamesystem.CapacityTotalGB'],
+  'dfs.Replication': ['dfs.FSNamesystem.PendingReplicationBlocks',
+  'dfs.FSNamesystem.UnderReplicatedBlocks'],
+  'dfs.File': ['dfs.namenode.FileInfoOps', 'dfs.namenode.CreateFileOps'],
+  'jvm.gc': ['jvm.JvmMetrics.GcTimeMillis'],
+  'jvm.mem': ['jvm.JvmMetrics.MemNonHeapUsedM',
+  'jvm.JvmMetrics.MemNonHeapCommittedM',
+  'jvm.JvmMetrics.MemHeapUsedM',
+  'jvm.JvmMetrics.MemHeapCommittedM'],
+  'jvm.thread': ['jvm.JvmMetrics.ThreadsRunnable',
+ 'jvm.JvmMetrics.ThreadsBlocked',
+ 'jvm.JvmMetrics.ThreadsWaiting',
+ 'jvm.JvmMetrics.ThreadsTimedWaiting'],
+  'rpc': ['rpc.rpc.RpcQueueTimeAvgTime']
+}
+
+all_metrics = {
+  'HOST': host_metrics,
+  'namenode': namenode_metrics
+}
+
+all_metrics_times = {}
+
+
+# hostnames = ['EPPLKRAW0101.0']  # 'local.0'
+# metrics_test_host = '162.216.150.247' # metricstest-100
+# metrics_test_host = '162.216.148.45'# br-3
+# start_time = int(time.time())   # 1414425208
+
+
+def main(argv=None):
+  # Allow Ctrl-C
+  signal.signal(signal.SIGINT, signal_handler)
+
+  parser = optparse.OptionParser()
+
+  parser.add_option("-H", "--host", dest="host",
+help="AMS host")
+  parser.add_option("-t", "--starttime", dest="start_time_secs",
+default=int(time.time()),
+help="start time in seconds, default value is current 
time")
+  parser.add_option("-n", "--nodes", dest="node_names",
+help="nodes from cluster, used as a param to query for")
+  (options, args) = parser.parse_args()
+
+  if options.host is None:
+print "AMS host name is required (--host or -h)"
+exit(-1)
+
+  if options.node_names is None:
+print "cluster nodes are required (--nodes or -n)"
+exit(3)
+
+  global start_time_secs, metrics_test_host, hostnames
+
+  metrics_test_host = options.host
+  start_time_secs = int(options.start_time_secs)
+  hostnames = [options.node_names]
+
+  while True:
+run()
+time.sleep(15)
+start_time_secs += 15
+
+
+def signal_handler(signal, frame):
+  print('Exiting, Ctrl+C press detected!')
+  print_all_metrics(all_metrics_times)
+  sys.exit(0)
+
+
+def run():
+  hostname = ','.join(hostnames)
+  qs = QuerySender(metrics_test_host, True)
+  for metric_name in all_metrics:
+print
+print 'Querying for ' + metric_name + ' metrics'
+current_time_secs = start_time_secs
+qs.query_all_app_metrics(hostname, metric_name,
+ all_metrics[metric_name],
+

[16/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricHostAggregate.java
--
diff --git 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricHostAggregate.java
 
b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricHostAggregate.java
deleted file mode 100644
index 02cc207..000
--- 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricHostAggregate.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
-
-
-import org.codehaus.jackson.annotate.JsonCreator;
-import org.codehaus.jackson.annotate.JsonProperty;
-
-/**
- * Represents a collection of minute based aggregation of values for
- * resolution greater than a minute.
- */
-public class MetricHostAggregate extends MetricAggregate {
-
-  private long numberOfSamples = 0;
-
-  @JsonCreator
-  public MetricHostAggregate() {
-super(0.0, 0.0, Double.MIN_VALUE, Double.MAX_VALUE);
-  }
-
-  public MetricHostAggregate(Double sum, int numberOfSamples,
- Double deviation,
- Double max, Double min) {
-super(sum, deviation, max, min);
-this.numberOfSamples = numberOfSamples;
-  }
-
-  @JsonProperty("numberOfSamples")
-  long getNumberOfSamples() {
-return numberOfSamples == 0 ? 1 : numberOfSamples;
-  }
-
-  void updateNumberOfSamples(long count) {
-this.numberOfSamples += count;
-  }
-
-  public void setNumberOfSamples(long numberOfSamples) {
-this.numberOfSamples = numberOfSamples;
-  }
-
-  public double getAvg() {
-return sum / numberOfSamples;
-  }
-
-  /**
-   * Find and update min, max and avg for a minute
-   */
-  void updateAggregates(MetricHostAggregate hostAggregate) {
-updateMax(hostAggregate.getMax());
-updateMin(hostAggregate.getMin());
-updateSum(hostAggregate.getSum());
-updateNumberOfSamples(hostAggregate.getNumberOfSamples());
-  }
-
-  @Override
-  public String toString() {
-return "MetricHostAggregate{" +
-  "sum=" + sum +
-  ", numberOfSamples=" + numberOfSamples +
-  ", deviation=" + deviation +
-  ", max=" + max +
-  ", min=" + min +
-  '}';
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsInitializationException.java
--
diff --git 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsInitializationException.java
 
b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsInitializationException.java
deleted file mode 100644
index 88a427a..000
--- 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/MetricsInitializationException.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under

[21/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestMetric.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestMetric.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestMetric.java
new file mode 100644
index 000..a0572a2
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestMetric.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data;
+
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util.Json;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.entry;
+import static org.junit.Assert.assertEquals;
+
+public class TestMetric {
+  private static final String SAMPLE_METRIC_IN_JSON = "{\n" +
+"  \"instanceid\" : \"\",\n" +
+"  \"hostname\" : \"localhost\",\n" +
+"  \"metrics\" : {\n" +
+"\"0\" : \"5.35\",\n" +
+"\"5000\" : \"5.35\",\n" +
+"\"1\" : \"5.35\",\n" +
+"\"15000\" : \"5.35\"\n" +
+"  },\n" +
+"  \"starttime\" : \"0\",\n" +
+"  \"appid\" : \"HOST\",\n" +
+"  \"metricname\" : \"disk_free\"\n" +
+"}";
+
+  @Test
+  public void testSerializeToJson() throws IOException {
+Metric diskOnHostMetric = new Metric(new ApplicationInstance("localhost", 
AppID.HOST, ""), "disk_free", 0);
+
+long timestamp = 0;
+double value = 5.35;
+
+diskOnHostMetric.putMetric(timestamp, Double.toString(value));
+diskOnHostMetric.putMetric(timestamp + 5000, Double.toString(value));
+diskOnHostMetric.putMetric(timestamp + 1, Double.toString(value));
+diskOnHostMetric.putMetric(timestamp + 15000, Double.toString(value));
+
+String expected = SAMPLE_METRIC_IN_JSON;
+String s = new Json(true).serialize(diskOnHostMetric);
+
+assertEquals("Json should match", expected, s);
+  }
+
+  @Test
+  public void testDeserializeObjectFromString() throws IOException {
+String source = SAMPLE_METRIC_IN_JSON;
+
+Metric m = new Json().deserialize(source, Metric.class);
+
+assertEquals("localhost", m.getHostname());
+assertEquals("HOST", m.getAppid());
+assertEquals("", m.getInstanceid());
+assertEquals("disk_free", m.getMetricname());
+assertEquals("0", m.getStarttime());
+
+assertThat(m.getMetrics()).isNotEmpty().hasSize(4).contains(
+  entry("0", "5.35"),
+  entry("5000", "5.35"),
+  entry("1", "5.35"),
+  entry("15000", "5.35"));
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
new file mode 100644
index 000..4411be5
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work fo

[18/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
--
diff --git 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
 
b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
deleted file mode 100644
index 85a5e3a..000
--- 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
+++ /dev/null
@@ -1,250 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.applicationhistoryservice;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.service.AbstractService;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerReport;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
-import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.MemoryTimelineStore;
-import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
-
-import com.google.common.annotations.VisibleForTesting;
-
-import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.DISABLE_APPLICATION_TIMELINE_STORE;
-
-public class ApplicationHistoryManagerImpl extends AbstractService implements
-ApplicationHistoryManager {
-  private static final Log LOG = LogFactory
-.getLog(ApplicationHistoryManagerImpl.class);
-  private static final String UNAVAILABLE = "N/A";
-
-  private ApplicationHistoryStore historyStore;
-  private String serverHttpAddress;
-
-  public ApplicationHistoryManagerImpl() {
-super(ApplicationHistoryManagerImpl.class.getName());
-  }
-
-  @Override
-  protected void serviceInit(Configuration conf) throws Exception {
-LOG.info("ApplicationHistory Init");
-historyStore = createApplicationHistoryStore(conf);
-historyStore.init(conf);
-serverHttpAddress = WebAppUtils.getHttpSchemePrefix(conf) +
-WebAppUtils.getAHSWebAppURLWithoutScheme(conf);
-super.serviceInit(conf);
-  }
-
-  @Override
-  protected void serviceStart() throws Exception {
-LOG.info("Starting ApplicationHistory");
-historyStore.start();
-super.serviceStart();
-  }
-
-  @Override
-  protected void serviceStop() throws Exception {
-LOG.info("Stopping ApplicationHistory");
-historyStore.stop();
-super.serviceStop();
-  }
-
-  protected ApplicationHistoryStore createApplicationHistoryStore(
-  Configuration conf) {
-if (conf.getBoolean(DISABLE_APPLICATION_TIMELINE_STORE, true)) {
-  LOG.info("Explicitly disabled application timeline store.");
-  return new NullApplicationHistoryStore();
-}
-return ReflectionUtils.newInstance(conf.getClass(
-  YarnConfiguration.APPLICATION_HISTORY_STORE,
-  NullApplicationHistoryStore.class,
-  ApplicationHistoryStore.class), conf);
-  }
-
-  @Override
-  public ContainerReport getAMContainer(ApplicationAttemptId 

[03/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSController.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSController.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSController.java
new file mode 100644
index 000..4e00bc8
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSController.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
+
+import org.apache.hadoop.yarn.webapp.Controller;
+
+import com.google.inject.Inject;
+
+public class AHSController extends Controller {
+
+  @Inject
+  AHSController(RequestContext ctx) {
+super(ctx);
+  }
+
+  @Override
+  public void index() {
+setTitle("Application History");
+  }
+
+  public void app() {
+render(AppPage.class);
+  }
+
+  public void appattempt() {
+render(AppAttemptPage.class);
+  }
+
+  public void container() {
+render(ContainerPage.class);
+  }
+
+  /**
+   * Render the logs page.
+   */
+  public void logs() {
+render(AHSLogsPage.class);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSLogsPage.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSLogsPage.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSLogsPage.java
new file mode 100644
index 000..8821bc0
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSLogsPage.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
+
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
+
+import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.log.AggregatedLogsBlock;
+
+public class AHSLogsPage extends AHSView {
+  /*
+   * (non-Javadoc)
+   * 
+   * @see
+   * org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSView#
+   * preHead(org.apache.hadoop .yarn.webapp.hamlet.Hamlet.HTML)
+   */
+  @Override
+  protected void preHead(Page.HTML<_> html) {
+String logEntity = $(ENTITY_STRING);
+if (logEntity == null || logEntity.isEmpty()) {
+  logEntity = $(CONTAINER_ID);
+}
+if (logEntity == null || logEntity.isEmpty()) {
+  logEntity = "UNKNOWN";
+}
+commonPreHead(html);
+  }
+
+  /**
+   * The content of this page is the AggregatedLogsBlock
+   * 
+   * @return AggregatedLogsBlock.class
+   */
+  @Override
+  protected Class content() {
+return AggregatedL

[17/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/LoadRunner.java
--
diff --git 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/LoadRunner.java
 
b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/LoadRunner.java
deleted file mode 100644
index 7974a5f..000
--- 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/LoadRunner.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator;
-
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.data.AppID;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.data.ApplicationInstance;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.data.HostMetricsGenerator;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.data.MetricsGeneratorConfigurer;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.net.MetricsSender;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.net.RestMetricsSender;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.net.StdOutMetricsSender;
-import org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.util.TimeStampProvider;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.List;
-import java.util.concurrent.*;
-
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.data.AppID.MASTER_APPS;
-import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
-  .loadsimulator.data.AppID.SLAVE_APPS;
-
-/**
- *
- */
-public class LoadRunner {
-  private final static Logger LOG = LoggerFactory.getLogger(LoadRunner.class);
-
-  private final ScheduledExecutorService timer;
-  private final ExecutorService workersPool;
-  private final Collection> workers;
-  private final long startTime = new Date().getTime();
-  private final int collectIntervalMillis;
-  private final int sendIntervalMillis;
-
-  public LoadRunner(String hostName,
-int threadCount,
-String metricsHostName,
-int collectIntervalMillis,
-int sendIntervalMillis,
-boolean createMaster) {
-this.collectIntervalMillis = collectIntervalMillis;
-this.workersPool = Executors.newFixedThreadPool(threadCount);
-this.timer = Executors.newScheduledThreadPool(1);
-this.sendIntervalMillis = sendIntervalMillis;
-
-workers = prepareWorkers(hostName, threadCount, metricsHostName, 
createMaster);
-  }
-
-  private Collection> prepareWorkers(String hostName,
-  int threadCount,
-  String metricsHost,
-  Boolean createMaster) {
-Collection> senderWorkers =
-  new ArrayList>(threadCount);
-
-int startIndex = 0;
-if (createMaster) {
-  String simHost = hostName + ".0";
-  addMetricsWorkers(senderWorkers, simHost, metricsHost, MASTER_APPS);
-  startIndex++;
-}
-
-for (int i = startIndex; i < threadCount; i++) {
-  String simHost = hostName + "." + i;
-  addMetricsWorkers(senderWorkers, simHost, metricsHost, SLAVE_APPS);
-}
-
-return senderWorkers;
-  }
-
-  private void addMetricsWorkers(Collection> senderWorkers,
- Stri

[07/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
new file mode 100644
index 000..5130ae3
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/RestMetricsSender.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.net;
+
+import com.google.common.base.Stopwatch;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.ProtocolException;
+
+/**
+ * Implements MetricsSender and provides a way of pushing metrics to 
application metrics history service using REST
+ * endpoint.
+ */
+public class RestMetricsSender implements MetricsSender {
+  private final static Logger LOG = 
LoggerFactory.getLogger(RestMetricsSender.class);
+
+  private final static String COLLECTOR_URL = 
"http://%s:8188/ws/v1/timeline/metrics";;
+  private final String collectorServiceAddress;
+
+  /**
+   * Creates unconnected RestMetricsSender with endpoint configured as
+   * http://${metricsHost}:8188/ws/v1/timeline/metrics,
+   * where ${metricsHost} is specified by metricHost param.
+   *
+   * @param metricsHost the hostname that will be used to access application 
metrics history service.
+   */
+  public RestMetricsSender(String metricsHost) {
+collectorServiceAddress = String.format(COLLECTOR_URL, metricsHost);
+  }
+
+  /**
+   * Push metrics to the REST endpoint. Connection is always open and closed 
on every call.
+   *
+   * @param payload the payload with metrics to be sent to metrics service
+   * @return response message either acknowledgement or error, empty on 
exception
+   */
+  @Override
+  public String pushMetrics(String payload) {
+String responseString = "";
+UrlService svc = null;
+Stopwatch timer = new Stopwatch().start();
+
+try {
+  LOG.info("server: {}", collectorServiceAddress);
+
+  svc = getConnectedUrlService();
+  responseString = svc.send(payload);
+
+  timer.stop();
+  LOG.info("http response time: " + timer.elapsedMillis() + " ms");
+
+  if (responseString.length() > 0) {
+LOG.debug("POST response from server: " + responseString);
+  }
+} catch (MalformedURLException e) {
+  LOG.error("", e);
+} catch (ProtocolException e) {
+  LOG.error("", e);
+} catch (IOException e) {
+  LOG.error("", e);
+} finally {
+  if (svc != null) {
+svc.disconnect();
+  }
+}
+
+return responseString;
+  }
+
+  /**
+   * Relaxed to protected for testing.
+   */
+  protected UrlService getConnectedUrlService() throws IOException {
+return UrlService.newConnection(collectorServiceAddress);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/StdOutMetricsSender.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/StdOutMetricsSender.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/StdOutMetricsSender.java
new file mode 100644
index 000..aeb4ca8
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apach

[01/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/branch-metrics-dev 11b9c2392 -> c20904e41


http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
new file mode 100644
index 000..2b93190
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
@@ -0,0 +1,391 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
+
+import static org.junit.Assert.assertEquals;
+
+import javax.ws.rs.core.MediaType;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TestTimelineMetricStore;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TestMemoryTimelineStore;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
+import org.junit.Test;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.api.client.config.DefaultClientConfig;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+
+public class TestTimelineWebServices extends JerseyTest {
+
+  private static TimelineStore store;
+  private static TimelineMetricStore metricStore;
+  private long beforeTime;
+
+  private Injector injector = Guice.createInjector(new ServletModule() {
+
+@Override
+protected void configureServlets() {
+  bind(YarnJacksonJaxbJsonProvider.class);
+  bind(TimelineWebServices.class);
+  bind(GenericExceptionHandler.class);
+  try{
+store = mockTimelineStore();
+metricStore = new TestTimelineMetricStore();
+  } catch (Exception e) {
+Assert.fail();
+  }
+  bind(TimelineStore.class).toInstance(store);
+  bind(TimelineMetricStore.class).toInstance(metricStore);
+  serve("/*").with(GuiceContainer.class);
+}
+
+  });
+
+  public class GuiceServletConfig extends GuiceServletContextListener {
+
+@Override
+protected Injector getInjector() {
+  return injector;
+}
+  }
+
+  private TimelineStore mockTimelineStore()
+  throws Exception {
+beforeTime = System.currentTimeMillis() - 1;
+TestMemoryTimelineStore store = new TestMemoryTimelineStore();
+store.setup();
+return store.getTimelineStore();
+  }
+
+  public TestTimelineWebServices() {
+super(new WebAppDescriptor.Builder(
+"org.apache.hadoop.yarn.server.applicationhistoryservice.webapp")
+.contextListenerClass(GuiceServletConfig.class)
+.filterClass(com.google.inject.servlet.GuiceFilter.class)
+.contextPath("jersey-guice-filter")
+.servletPath

[15/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
--
diff --git 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
 
b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
deleted file mode 100644
index 60833d0..000
--- 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TimelineMetricConfiguration.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * Configuration class that reads properties from ams-site.xml. All values
- * for time or intervals are given in seconds.
- */
-@InterfaceAudience.Public
-@InterfaceStability.Evolving
-public interface TimelineMetricConfiguration {
-  public static final String HBASE_SITE_CONFIGURATION_FILE = "hbase-site.xml";
-  public static final String METRICS_SITE_CONFIGURATION_FILE = "ams-site.xml";
-
-  public static final String TIMELINE_METRICS_AGGREGATOR_CHECKPOINT_DIR =
-"timeline.metrics.aggregator.checkpoint.dir";
-
-  public static final String DEFAULT_CHECKPOINT_LOCATION =
-System.getProperty("java.io.tmpdir");
-
-  public static final String HBASE_ENCODING_SCHEME =
-"timeline.metrics.hbase.data.block.encoding";
-
-  public static final String HBASE_COMPRESSION_SCHEME =
-"timeline.metrics.hbase.compression.scheme";
-
-  public static final String PRECISION_TABLE_TTL =
-"timeline.metrics.host.aggregator.ttl";
-  public static final String HOST_MINUTE_TABLE_TTL =
-"timeline.metrics.host.aggregator.minute.ttl";
-  public static final String HOST_HOUR_TABLE_TTL =
-"timeline.metrics.host.aggregator.hourly.ttl";
-  public static final String CLUSTER_MINUTE_TABLE_TTL =
-"timeline.metrics.cluster.aggregator.minute.ttl";
-  public static final String CLUSTER_HOUR_TABLE_TTL =
-"timeline.metrics.cluster.aggregator.hourly.ttl";
-
-  public static final String CLUSTER_AGGREGATOR_TIMESLICE_INTERVAL =
-"timeline.metrics.cluster.aggregator.minute.timeslice.interval";
-
-  public static final String AGGREGATOR_CHECKPOINT_DELAY =
-"timeline.metrics.service.checkpointDelay";
-
-  public static final String RESULTSET_FETCH_SIZE =
-"timeline.metrics.service.resultset.fetchSize";
-
-  public static final String HOST_AGGREGATOR_MINUTE_SLEEP_INTERVAL =
-"timeline.metrics.host.aggregator.minute.interval";
-
-  public static final String HOST_AGGREGATOR_HOUR_SLEEP_INTERVAL =
-"timeline.metrics.host.aggregator.hourly.interval";
-
-  public static final String CLUSTER_AGGREGATOR_MINUTE_SLEEP_INTERVAL =
-"timeline.metrics.cluster.aggregator.minute.interval";
-
-  public static final String CLUSTER_AGGREGATOR_HOUR_SLEEP_INTERVAL =
-"timeline.metrics.cluster.aggregator.hourly.interval";
-
-  public static final String 
HOST_AGGREGATOR_MINUTE_CHECKPOINT_CUTOFF_MULTIPLIER =
-"timeline.metrics.host.aggregator.minute.checkpointCutOffMultiplier";
-
-  public static final String HOST_AGGREGATOR_HOUR_CHECKPOINT_CUTOFF_MULTIPLIER 
=
-"timeline.metrics.host.aggregator.hourly.checkpointCutOffMultiplier";
-
-  public static final String 
CLUSTER_AGGREGATOR_MINUTE_CHECKPOINT_CUTOFF_MULTIPLIER =
-"timeline.metrics.cluster.aggregator.minute.checkpointCutOffMultiplier";
-
-  public static final String 
CLUSTER_AGGREGATOR_HOUR_CHECKPOINT_CUTOFF_MULTIPLIER =
-"timeline.metrics.cluster.aggregator.hourly.checkpointCutOffMultiplier";
-
-  public static final String 
CLUSTER_AGGREGATOR_HOUR_CHECKPOINT_CUTOFF_INTERVAL =
-"timeline.metrics.cluster.a

[08/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java
new file mode 100644
index 000..4c8d745
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/FileSystemApplicationHistoryStore.java
@@ -0,0 +1,784 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice;
+
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.file.tfile.TFile;
+import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import 
org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto;
+import 
org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto;
+import 
org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto;
+import 
org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto;
+import 
org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto;
+import 
org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationAttemptFinishDataPBImpl;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationAttemptStartDataPBImpl;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationFinishDataPBImpl;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationStartDataPBI

[10/22] ambari git commit: AMBARI-5707. Renaming a module. (swagle)

2014-12-01 Thread swagle
http://git-wip-us.apache.org/repos/asf/ambari/blob/ba3d6926/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
--
diff --git 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
 
b/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
deleted file mode 100644
index 9b27309..000
--- 
a/ambari-metrics/ambari-metrics-hadoop-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
+++ /dev/null
@@ -1,253 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileContext;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
-import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.iq80.leveldb.DBIterator;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.writeReverseOrderedLong;
-import static org.junit.Assert.assertEquals;
-
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
-public class TestLeveldbTimelineStore extends TimelineStoreTestUtils {
-  private FileContext fsContext;
-  private File fsPath;
-
-  @Before
-  public void setup() throws Exception {
-fsContext = FileContext.getLocalFSFileContext();
-Configuration conf = new Configuration();
-fsPath = new File("target", this.getClass().getSimpleName() +
-"-tmpDir").getAbsoluteFile();
-fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
-conf.set(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH,
-fsPath.getAbsolutePath());
-conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_TTL_ENABLE, false);
-store = new LeveldbTimelineStore();
-store.init(conf);
-store.start();
-loadTestData();
-loadVerificationData();
-  }
-
-  @After
-  public void tearDown() throws Exception {
-store.stop();
-fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
-  }
-
-  @Test
-  public void testGetSingleEntity() throws IOException {
-super.testGetSingleEntity();
-((LeveldbTimelineStore)store).clearStartTimeCache();
-super.testGetSingleEntity();
-loadTestData();
-  }
-
-  @Test
-  public void testGetEntities() throws IOException {
-super.testGetEntities();
-  }
-
-  @Test
-  public void testGetEntitiesWithFromId() throws IOException {
-super.testGetEntitiesWithFromId();
-  }
-
-  @Test
-  public void testGetEntitiesWithFromTs() throws IOException {
-super.testGetEntitiesWithFromTs();
-  }
-
-  @Test
-  public void testGetEntitiesWithPrimaryFilters() throws IOException {
-super.testGetEntitiesWithPrimaryFilters();
-  }
-
-  @Test
-  public void testGetEntitiesWithSecondaryFilters() throws IOException {
-super.testGetEntitiesWithSecondaryFilters();
-  }
-
-  @Test
-  public void testGetEvents() throws IOException {
-super.testGetEvents();
-  }
-
-  @Test
-  public void testCacheSizes() {
-Configuration conf = new Configuration();
-assertEquals(1, LeveldbTimelineStore.getStartTimeReadCacheSize(conf));
-assertEquals(1, LeveldbTimelineStore.getStartTimeWriteCacheSize(conf));
-conf.setInt(
-YarnConfiguration.TIMELINE_SERVICE_L

<    4   5   6   7   8   9   10   11   12   13   >