[2/3] ambari git commit: AMBARI-8876: Common Services: Refactor HDPWIN 2.1 stack to use common services (Jayush Luniya)

2014-12-30 Thread jluniya
http://git-wip-us.apache.org/repos/asf/ambari/blob/af6f6e87/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/configuration/hdfs-site.xml
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/configuration/hdfs-site.xml
 
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/configuration/hdfs-site.xml
index f267e51..63e4c95 100644
--- 
a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/configuration/hdfs-site.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HDFS/configuration/hdfs-site.xml
@@ -1,8 +1,26 @@
 
 
+
+
+
 
 
-  
+
   
 dfs.namenode.name.dir
 file:///c:/hdpdata/hdfs/nn
@@ -13,24 +31,6 @@
 true
   
   
-dfs.support.append
-true
-to enable dfs append
-true
-  
-  
-dfs.webhdfs.enabled
-true
-to enable webhdfs
-true
-  
-  
-dfs.datanode.failed.volumes.tolerated
-0
-#of failed disks dn would tolerate
-true
-  
-  
 dfs.datanode.data.dir
 file:///c:/hdpdata/hdfs/dn
 Determines where on the local filesystem an DFS data node
@@ -42,231 +42,41 @@
 true
   
   
-dfs.checksum.type
-CRC32
-The checksum method to be used by default. To maintain
-compatibility, it is being set to CRC32. Once all migration steps
-are complete, we can change it to CRC32C and take advantage of the
-additional performance benefit.
-  
-  
-dfs.replication.max
-50
-Maximal block replication.
-  
-  
-  
-dfs.heartbeat.interval
-3
-Determines datanode heartbeat interval in 
seconds.
-  
-  
-dfs.namenode.safemode.threshold-pct
-1.0f
-
-Specifies the percentage of blocks that should satisfy
-the minimal replication requirement defined by dfs.replication.min.
-Values less than or equal to 0 mean not to start in safe mode.
-Values greater than 1 will make safe mode permanent.
-
-  
-  
-dfs.datanode.balance.bandwidthPerSec
-625
-
-Specifies the maximum amount of bandwidth that each datanode
-can utilize for the balancing purpose in term of
-the number of bytes per second.
-  
-  
-  
-dfs.datanode.address
-0.0.0.0:50010
-  
-  
-dfs.datanode.http.address
-0.0.0.0:50075
-  
-  
-dfs.datanode.https.address
-0.0.0.0:50076
-  
-  
-dfs.blocksize
-134217728
-The default block size for new files, in bytes.
-  You can use the following suffix (case insensitive): k(kilo),
-  m(mega), g(giga), t(tera), p(peta), e(exa) to specify the
-  size (such as 128k, 512m, 1g, etc.), Or provide complete size
-  in bytes (such as 134217728 for 128 MB).
-  
-  
-dfs.namenode.http-address
-localhost:50070
-The address and the base port where the dfs namenode
-  web ui will listen on. If the port is 0 then the server will
-  start on a free port.
-true
-  
-  
-dfs.https.port
-50070
-true
-  
-  
-dfs.datanode.du.reserved
-1073741824
-Reserved space in bytes per volume. Always leave this much 
space free for non dfs use.
-
-  
-  
-dfs.datanode.ipc.address
-0.0.0.0:8010
-The datanode ipc server address and port.
-  If the port is 0 then the server will start on a free port.
-
-  
-  
-dfs.blockreport.initialDelay
-120
-Delay for first block report in seconds.
-  
-  
-dfs.datanode.du.pct
-0.85f
-When calculating remaining space, only use this percentage of 
the real available space
-
+dfs.hosts.exclude
+c:\hdp\hadoop\etc\hadoop\dfs.exclude
+Names a file that contains a list of hosts that are
+  not permitted to connect to the namenode.  The full pathname of the
+  file must be specified.  If the value is empty, no hosts are
+  excluded.
   
   
-dfs.namenode.handler.count
-40
-The number of server threads for the namenode.
+dfs.hosts
+c:\hdp\hadoop\etc\hadoop\dfs.include
+Names a file that contains a list of hosts that are
+  permitted to connect to the namenode. The full pathname of the file
+  must be specified.  If the value is empty, all hosts are
+  permitted.
   
   
 dfs.namenode.checkpoint.dir
-file:///c:/hdpdata/hdfs/snn
+file:///c:/hadoop/hdfs/snn
 Determines where on the local filesystem the DFS secondary
-name node should store the temporary images to merge.
-If this is a comma-delimited list of directories then the image is
-replicated in all of the directories for redundancy.
+  name node should store the temporary images to merge.
+  If this is a comma-delimited list of directories then the image is
+  replicated in all of the directories for redundancy.
 
   
+
   
-dfs.namenode.checkpoint.edits.dir
-file:///c:/hadoop/hdfs/namesecondary
-Determines where on the local filesystem the DFS secondary

[3/3] ambari git commit: AMBARI-8876: Common Services: Refactor HDPWIN 2.1 stack to use common services (Jayush Luniya)

2014-12-30 Thread jluniya
AMBARI-8876: Common Services: Refactor HDPWIN 2.1 stack to use common services 
(Jayush Luniya)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/af6f6e87
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/af6f6e87
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/af6f6e87

Branch: refs/heads/trunk
Commit: af6f6e877209d5ab5d25fe2858259c234137eae5
Parents: 8567905
Author: Jayush Luniya 
Authored: Tue Dec 30 21:44:28 2014 -0800
Committer: Jayush Luniya 
Committed: Tue Dec 30 21:44:28 2014 -0800

--
 .../FALCON/configuration/falcon-env.xml |  68 +-
 .../configuration/falcon-runtime.properties.xml |  47 --
 .../configuration/falcon-startup.properties.xml | 207 ---
 .../FALCON/configuration/oozie-site.xml | 167 -
 .../HDPWIN/2.1/services/FALCON/metainfo.xml |  70 +--
 .../services/HBASE/configuration/hbase-env.xml  |  29 +-
 .../HBASE/configuration/hbase-policy.xml|  53 --
 .../services/HBASE/configuration/hbase-site.xml | 292 +
 .../HDPWIN/2.1/services/HBASE/metainfo.xml  |  93 +--
 .../services/HDFS/configuration/core-site.xml   | 213 +--
 .../services/HDFS/configuration/hadoop-env.xml  | 119 
 .../HDFS/configuration/hadoop-policy.xml| 219 ---
 .../services/HDFS/configuration/hdfs-site.xml   | 274 ++---
 .../HDPWIN/2.1/services/HDFS/metainfo.xml   | 128 +---
 .../services/HIVE/configuration/hcat-env.xml|  31 +
 .../services/HIVE/configuration/hive-env.xml|  42 +-
 .../services/HIVE/configuration/hive-site.xml   | 282 -
 .../services/HIVE/configuration/webhcat-env.xml |  31 +
 .../HIVE/configuration/webhcat-site.xml |  51 +-
 .../HDPWIN/2.1/services/HIVE/metainfo.xml   | 165 +
 .../services/OOZIE/configuration/oozie-env.xml  |  21 +-
 .../OOZIE/configuration/oozie-log4j.xml |  96 ---
 .../services/OOZIE/configuration/oozie-site.xml | 540 +++-
 .../HDPWIN/2.1/services/OOZIE/metainfo.xml  |  91 +--
 .../services/PIG/configuration/pig-log4j.xml|  61 --
 .../PIG/configuration/pig-properties.xml| 262 
 .../stacks/HDPWIN/2.1/services/PIG/metainfo.xml |  39 +-
 .../services/SQOOP/configuration/sqoop-env.xml  |  10 +-
 .../HDPWIN/2.1/services/SQOOP/metainfo.xml  |  45 +-
 .../services/STORM/configuration/storm-env.xml  |  47 ++
 .../services/STORM/configuration/storm-site.xml | 616 +--
 .../HDPWIN/2.1/services/STORM/metainfo.xml  |  66 +-
 .../2.1/services/TEZ/configuration/tez-env.xml  |  36 ++
 .../2.1/services/TEZ/configuration/tez-site.xml | 193 +-
 .../stacks/HDPWIN/2.1/services/TEZ/metainfo.xml |  28 +-
 .../HDPWIN/2.1/services/YARN/metainfo.xml   |   4 +-
 .../ZOOKEEPER/configuration/zoo.cfg.xml |  22 +-
 .../ZOOKEEPER/configuration/zookeeper-env.xml   |  50 +-
 .../ZOOKEEPER/configuration/zookeeper-log4j.xml | 100 ---
 .../HDPWIN/2.1/services/ZOOKEEPER/metainfo.xml  |  42 +-
 ambari-web/app/utils/validator.js   |   8 +-
 41 files changed, 641 insertions(+), 4317 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/af6f6e87/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/FALCON/configuration/falcon-env.xml
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/FALCON/configuration/falcon-env.xml
 
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/FALCON/configuration/falcon-env.xml
index 0a12051..6ececc6 100644
--- 
a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/FALCON/configuration/falcon-env.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/FALCON/configuration/falcon-env.xml
@@ -21,89 +21,35 @@
 
   
 falcon_user
-falcon
-Falcon user.
-  
-  
-falcon_port
-15000
-Port the Falcon Server listens on.
+true
   
   
 falcon_log_dir
-/var/log/falcon
+c:\hadoop\logs\falcon
 Falcon log directory.
   
   
 falcon_pid_dir
-/var/run/falcon
+c:\hadoop\run\falcon
 Falcon pid-file directory.
   
   
 falcon_local_dir
-/hadoop/falcon
+c:\hadoop\falcon
 Directory where Falcon data, such as activemq data, is 
stored.
   
   
   
 falcon.embeddedmq.data
-/hadoop/falcon/embeddedmq/data
+c:\hadoop\falcon\embeddedmq\data
 Directory in which embeddedmq data is stored.
   
-  
-falcon.embeddedmq
-true
-Whether embeddedmq is enabled or not.
-  
-  
-falcon.emeddedmq.port
-61616
-Port that embeddedmq will listen on.
-  
 
-  
+  
   
 content
-falcon-env.sh content
+This is the jinja template for falcon-env.cmd 
file
 
-# The java implementation to use. If JAVA_HOME is not found we expect java and 
jar to be in path
-expor

[1/3] ambari git commit: AMBARI-8876: Common Services: Refactor HDPWIN 2.1 stack to use common services (Jayush Luniya)

2014-12-30 Thread jluniya
Repository: ambari
Updated Branches:
  refs/heads/trunk 856790517 -> af6f6e877


http://git-wip-us.apache.org/repos/asf/ambari/blob/af6f6e87/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/PIG/configuration/pig-log4j.xml
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/PIG/configuration/pig-log4j.xml
 
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/PIG/configuration/pig-log4j.xml
deleted file mode 100644
index cbdd452..000
--- 
a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/PIG/configuration/pig-log4j.xml
+++ /dev/null
@@ -1,61 +0,0 @@
-
-
-
-
-
-
-  
-content
-
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-#
-
-# * Set root logger level to DEBUG and its only appender to A.
-log4j.logger.org.apache.pig=info, A
-
-# * A is set to be a ConsoleAppender.
-log4j.appender.A=org.apache.log4j.ConsoleAppender
-# * A uses PatternLayout.
-log4j.appender.A.layout=org.apache.log4j.PatternLayout
-log4j.appender.A.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
-
-  
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/af6f6e87/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/PIG/configuration/pig-properties.xml
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/PIG/configuration/pig-properties.xml
 
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/PIG/configuration/pig-properties.xml
deleted file mode 100644
index 88e2fea..000
--- 
a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/PIG/configuration/pig-properties.xml
+++ /dev/null
@@ -1,262 +0,0 @@
-
-
-
-
-
-
-  
-pig-content
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#  http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Pig configuration file. All values can be overwritten by command line 
arguments.
-
-# Use the "-h properties" command to see description of the properties
-
-# log4jconf log4j configuration file
-# log4jconf=./conf/log4j.properties
-
-# a file that contains pig script
-#file=
-
-# load jarfile, colon separated
-#jar=
-
-#verbose print all log messages to screen (default to print only INFO and 
above to screen)
-#verbose=true
-
-#exectype local|mapreduce, mapreduce is default
-#exectype=local
-
-#the default timezone: if it is not set, the default timezone for this host is 
used.
-#the correct timezone format is the UTC offset: e.g., +08:00.
-#pig.datetime.default.tz=
-
-#pig.logfile=
-
-#Do not spill temp files smaller than this size (bytes)
-#pig.spill.size.threshold=500
-
-#EXPERIMENT: Activate garbage collection when spilling a file bigger than this 
size (bytes)
-#This should help reduce the number of files being spilled.
-#pig.spill.gc.activation.size=4000
-
-#the following two parameters are to help estimate the reducer number
-#pig.exec.reducers.bytes.per.reducer=10
-#pig.exec.reducers.max=999
-
-#Logging properties
-#verbose=false
-#brief=false
-#debug=INFO
-#aggregate.warning=true
-
-#Performance tuning properties
-#pig.cachedbag.memusage=0.2
-#pig.skewedjoin.reduce.memusage=0.3
-#pig.exec.nocombiner=false
-#opt.multiquery=true
-
-#Following parameters are for configuring intermediate storage format
-#Supported storage types are seqfile and tfile
-#Supported codec types: tfile supports gz(gzip) and lzo, seqfile support 
gz(gzip), lzo, snappy, bzip2
-#pig.tmpfilecompre

ambari git commit: AMBARI-8841 - Validate Ambari on Win with hdp-2.2.msi

2014-12-30 Thread abaranchuk
Repository: ambari
Updated Branches:
  refs/heads/trunk b01fdc5bd -> 856790517


AMBARI-8841 - Validate Ambari on Win with hdp-2.2.msi


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/85679051
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/85679051
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/85679051

Branch: refs/heads/trunk
Commit: 85679051769b83d863868e3e2f8e57ffb925382c
Parents: b01fdc5
Author: Artem Baranchuk 
Authored: Fri Dec 19 23:53:57 2014 +0200
Committer: Artem Baranchuk 
Committed: Wed Dec 31 01:23:04 2014 +0200

--
 .../libraries/functions/install_hdp_msi.py  | 21 +---
 .../libraries/script/script.py  |  3 ++-
 .../HIVE/package/scripts/hive_client.py |  3 ++-
 3 files changed, 22 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/85679051/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
--
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
index a7c2fe2..c016fbc 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/install_hdp_msi.py
@@ -28,6 +28,7 @@ from resource_management.core.logger import Logger
 from resource_management.core.exceptions import Fail
 from resource_management.libraries.functions.reload_windows_env import 
reload_windows_env
 from resource_management.libraries.functions.windows_service_utils import 
check_windows_service_exists
+from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
 import socket
 import os
 import glob
@@ -40,12 +41,20 @@ hdp_log_dir = "c:\\hadoop\\logs"
 hdp_data_dir = "c:\\hadoopDefaultData"
 local_host = socket.getfqdn()
 db_flavor = "DERBY"
+hdp_22 = """#Namenode Data directory
+HDFS_NAMENODE_DATA_DIR={hdp_data_dir}\\hdpdatann
+
+#Datanode Data directory
+HDFS_DATANODE_DATA_DIR={hdp_data_dir}\\hdpdatadn
+"""
 cluster_properties = """#Log directory
 HDP_LOG_DIR={hdp_log_dir}
 
 #Data directory
 HDP_DATA_DIR={hdp_data_dir}
 
+{hdp_22_specific_props}
+
 #hosts
 NAMENODE_HOST={local_host}
 SECONDARY_NAMENODE_HOST={local_host}
@@ -82,7 +91,7 @@ OOZIE_DB_PASSWORD=oozie
 
 INSTALL_MSI_CMD = 'cmd /C start /wait msiexec /qn /i  {hdp_msi_path} /lv 
{hdp_log_path} MSIUSEREALADMINDETECTION=1 ' \
   'HDP_LAYOUT={hdp_layout_path} DESTROY_DATA=yes 
HDP_USER_PASSWORD={hadoop_password_arg} HDP=yes ' \
-  'KNOX=yes KNOX_MASTER_SECRET="AmbariHDP2Windows" FALCON=yes 
STORM=yes HBase=yes STORM=yes FLUME=yes'
+  'KNOX=yes KNOX_MASTER_SECRET="AmbariHDP2Windows" FALCON=yes 
STORM=yes HBase=yes STORM=yes FLUME=yes RANGER=no'
 CREATE_SERVICE_SCRIPT = os.path.abspath("sbin\createservice.ps1")
 CREATE_SERVICE_CMD = 'cmd /C powershell -File "{script}" -username hadoop 
-password "{password}" -servicename ' \
  '{servicename} -hdpresourcesdir "{resourcedir}" 
-servicecmdpath "{servicecmd}"'
@@ -140,7 +149,7 @@ def _write_marker():
 open(os.path.join(_working_dir, INSTALL_MARKER_FAILED), "w").close()
 
 
-def install_windows_msi(msi_url, save_dir, save_file, hadoop_password):
+def install_windows_msi(msi_url, save_dir, save_file, hadoop_password, 
stack_version):
   global _working_dir
   _working_dir = save_dir
   save_dir = os.path.abspath(save_dir)
@@ -157,12 +166,18 @@ def install_windows_msi(msi_url, save_dir, save_file, 
hadoop_password):
   Logger.info("hdp.msi already installed")
   return
 
+hdp_stack_version = format_hdp_stack_version(stack_version)
+hdp_22_specific_props = ''
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') 
>= 0:
+  hdp_22_specific_props = hdp_22
+
 # install msi
 download_file(msi_url, os.path.join(msi_save_dir, save_file))
 File(os.path.join(msi_save_dir, "properties.txt"), 
content=cluster_properties.format(hdp_log_dir=hdp_log_dir,

  hdp_data_dir=hdp_data_dir,

  local_host=local_host,
-   
  db_flavor=db_flavor))
+   
  db_flavor=db_flavor,
+   
  hdp_22_specific_props=hd

ambari git commit: AMBARI-8962. Fix unit tests in resource_management.TestSecurityCommons.TestSecurityCommons. (robert levas via jaimin)

2014-12-30 Thread jaimin
Repository: ambari
Updated Branches:
  refs/heads/trunk 4e31964a7 -> b01fdc5bd


AMBARI-8962. Fix unit tests in 
resource_management.TestSecurityCommons.TestSecurityCommons. (robert levas via 
jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b01fdc5b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b01fdc5b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b01fdc5b

Branch: refs/heads/trunk
Commit: b01fdc5bd9db7d8a8f5b4cd6b1b5964259418dae
Parents: 4e31964
Author: Jaimin Jetly 
Authored: Tue Dec 30 15:07:29 2014 -0800
Committer: Jaimin Jetly 
Committed: Tue Dec 30 15:07:29 2014 -0800

--
 .../python/resource_management/TestSecurityCommons.py | 10 ++
 1 file changed, 6 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/b01fdc5b/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py
--
diff --git 
a/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py 
b/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py
index c1a7d69..196ae34 100644
--- a/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py
+++ b/ambari-agent/src/test/python/resource_management/TestSecurityCommons.py
@@ -123,8 +123,10 @@ class TestSecurityCommons(TestCase):
   def test_get_params_from_filesystem(self, et_parser_mock):
 
 conf_dir = gettempdir()
-config_file = []
-config_file.append("config.xml")
+config_file = {
+  "config.xml": FILE_TYPE_XML
+}
+
 
 prop1_name_mock = MagicMock()
 prop1_name_mock.text.return_value = 'property1'
@@ -173,7 +175,7 @@ class TestSecurityCommons(TestCase):
 self.assertEquals(not result['config'].items(), True)
 
 #Testing that returns an empty dictionary if is called with empty 
config_files
-empty_config_file = []
+empty_config_file = {}
 
 result = get_params_from_filesystem(conf_dir, empty_config_file)
 
@@ -218,7 +220,7 @@ class TestSecurityCommons(TestCase):
 
 json_load_mock.return_value = output
 
-self.assertEquals(cached_kinit_executor(kinit_path, user, keytab_file, 
principal, hostname, temp_dir, expiration_time), True)
+cached_kinit_executor(kinit_path, user, keytab_file, principal, hostname, 
temp_dir, expiration_time)
 os_path_exists_mock.assert_called_with(file_path)
 os_isfile_mock.assert_called_with(file_path + os.sep + filename)
 open_file_mock.assert_called_with(file_path + os.sep + filename, 'r')



ambari git commit: AMBARI-8964. RU failed on Core Masters/ Restarting NAMENODE (ncole)

2014-12-30 Thread ncole
Repository: ambari
Updated Branches:
  refs/heads/trunk b768b9e95 -> 4e31964a7


AMBARI-8964. RU failed on Core Masters/ Restarting NAMENODE (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4e31964a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4e31964a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4e31964a

Branch: refs/heads/trunk
Commit: 4e31964a744a35187511458627eb99e8afda0b25
Parents: b768b9e
Author: Nate Cole 
Authored: Tue Dec 30 16:59:15 2014 -0500
Committer: Nate Cole 
Committed: Tue Dec 30 17:01:30 2014 -0500

--
 .../HDFS/2.1.0.2.0/package/scripts/utils.py  | 19 ++-
 1 file changed, 14 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/4e31964a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
index 659233e..f185ea0 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py
@@ -73,11 +73,20 @@ def failover_namenode():
 
 # Wait until it transitions to standby
 check_standby_cmd = format("hdfs haadmin -getServiceState {namenode_id} | 
grep standby")
-Execute(check_standby_cmd,
-user=params.hdfs_user,
-tries=30,
-try_sleep=6,
-logoutput=True)
+
+# process may already be down.  try one time, then proceed
+code, out = call(check_standby_cmd, user=params.hdfs_user, logoutput=True)
+Logger.info(format("Rolling Upgrade - check for standby returned {code}"))
+
+if code == 255 and out:
+  Logger.info("Rolling Upgrade - namenode is already down")
+else:
+  Execute(check_standby_cmd,
+  user=params.hdfs_user,
+  tries=30,
+  try_sleep=6,
+  logoutput=True)
+
   else:
 Logger.info("Rolling Upgrade - Host %s is the standby namenode." % 
str(params.hostname))
 



ambari git commit: AMBARI-8963 Metric Monitor start fails on CentOS after WIN changes

2014-12-30 Thread fbarca
Repository: ambari
Updated Branches:
  refs/heads/trunk 321cdbf36 -> b768b9e95


AMBARI-8963 Metric Monitor start fails on CentOS after WIN changes

Fixed the psutils path


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b768b9e9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b768b9e9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b768b9e9

Branch: refs/heads/trunk
Commit: b768b9e9562aa81ad7bfc80516f6e342083652de
Parents: 321cdbf
Author: Florian Barca 
Authored: Tue Dec 30 13:23:57 2014 -0800
Committer: Florian Barca 
Committed: Tue Dec 30 13:25:22 2014 -0800

--
 .../ambari-metrics-assembly/src/main/assembly/monitor-windows.xml  | 2 +-
 .../src/main/python/core/__init__.py   | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/b768b9e9/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor-windows.xml
--
diff --git 
a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor-windows.xml 
b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor-windows.xml
index 65a936b..3b877a7 100644
--- 
a/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor-windows.xml
+++ 
b/ambari-metrics/ambari-metrics-assembly/src/main/assembly/monitor-windows.xml
@@ -52,7 +52,7 @@
 
 
   ${monitor.dir}/target/psutil_build
-  /sbin/core/psutil/build
+  /sbin/psutil/build
   
 *.egg
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/b768b9e9/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/__init__.py
--
diff --git 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/__init__.py
 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/__init__.py
index 15ad117..eb87cb5 100644
--- 
a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/__init__.py
+++ 
b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/__init__.py
@@ -21,7 +21,7 @@ limitations under the License.
 import os, sys
 
 path = os.path.abspath(__file__)
-path = os.path.normpath(os.path.join(os.path.dirname(path), "psutil", "build"))
+path = os.path.join(os.path.dirname(os.path.dirname(path)), "psutil", "build")
 
 for dir in os.walk(path).next()[1]:
   if 'lib' in dir:



ambari git commit: AMBARI-8955. Upgrading to Ambari 1.6.1 with external postgres DB breaks Hive metastore startup.(vbrodetskyi)

2014-12-30 Thread vbrodetskyi
Repository: ambari
Updated Branches:
  refs/heads/trunk e3c9aa7a4 -> 321cdbf36


AMBARI-8955. Upgrading to Ambari 1.6.1 with external postgres DB breaks Hive 
metastore startup.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/321cdbf3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/321cdbf3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/321cdbf3

Branch: refs/heads/trunk
Commit: 321cdbf3673430c6578011cd5c4541938cca0e24
Parents: e3c9aa7
Author: Vitaly Brodetskyi 
Authored: Tue Dec 30 21:07:56 2014 +0200
Committer: Vitaly Brodetskyi 
Committed: Tue Dec 30 21:07:56 2014 +0200

--
 .../server/upgrade/UpgradeCatalog200.java   | 54 ++---
 .../server/upgrade/UpgradeCatalog200Test.java   | 64 ++--
 2 files changed, 80 insertions(+), 38 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/321cdbf3/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
index e18331f..e86ca1a 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog200.java
@@ -18,12 +18,10 @@
 
 package org.apache.ambari.server.upgrade;
 
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
+import com.google.inject.Inject;
+import com.google.inject.Injector;
 import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.orm.DBAccessor;
 import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo;
 import org.apache.ambari.server.orm.dao.ClusterDAO;
@@ -40,13 +38,19 @@ import 
org.apache.ambari.server.orm.entities.HostComponentStateEntity;
 import 
org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity;
 import 
org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK;
 import org.apache.ambari.server.orm.entities.ServiceDesiredStateEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.SecurityState;
 import org.apache.ambari.server.state.UpgradeState;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.inject.Inject;
-import com.google.inject.Injector;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 
 /**
@@ -255,6 +259,42 @@ public class UpgradeCatalog200 extends 
AbstractUpgradeCatalog {
   protected void executeDMLUpdates() throws AmbariException, SQLException {
 // remove NAGIOS to make way for the new embedded alert framework
 removeNagiosService();
+updateHiveDatabaseType();
+  }
+
+  protected void updateHiveDatabaseType() throws AmbariException {
+final String PROPERTY_NAME = "hive_database_type";
+final String PROPERTY_VALUE_OLD = "postgresql";
+final String PROPERTY_VALUE_NEW = "postgres";
+final String PROPERTY_CONFIG_NAME = "hive-env";
+
+AmbariManagementController ambariManagementController = 
injector.getInstance(
+AmbariManagementController.class);
+Clusters clusters = ambariManagementController.getClusters();
+
+if (clusters != null) {
+  Map clusterMap = clusters.getClusters();
+  Map prop = new HashMap();
+  String hive_database_type = null;
+
+  if (clusterMap != null && !clusterMap.isEmpty()) {
+for (final Cluster cluster : clusterMap.values()) {
+  hive_database_type = null;
+
+  if (cluster.getDesiredConfigByType(PROPERTY_CONFIG_NAME) != null) {
+hive_database_type = cluster.getDesiredConfigByType(
+PROPERTY_CONFIG_NAME).getProperties().get(PROPERTY_NAME);
+  }
+
+  if (hive_database_type != null && !hive_database_type.isEmpty() &&
+  hive_database_type.equals(PROPERTY_VALUE_OLD)) {
+prop.put(PROPERTY_NAME, PROPERTY_VALUE_NEW);
+updateConfigurationPropertiesForCluster(cluster, 
PROPERTY_CONFIG_NAME, prop, true, false);
+  }
+}
+  }
+
+}
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/321cdbf3/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog200Test.java
--
diff --git 
a/ambari-server/src/test/java/org/apache/am

ambari git commit: AMBARI-8478. Falcon service components should indicate security state. (robert levas via jaimin)

2014-12-30 Thread jaimin
Repository: ambari
Updated Branches:
  refs/heads/trunk 6c21b0942 -> e3c9aa7a4


AMBARI-8478. Falcon service components should indicate security state. (robert 
levas via jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e3c9aa7a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e3c9aa7a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e3c9aa7a

Branch: refs/heads/trunk
Commit: e3c9aa7a4336a43be61342c80395b45288979d5f
Parents: 6c21b09
Author: Jaimin Jetly 
Authored: Tue Dec 30 11:01:50 2014 -0800
Committer: Jaimin Jetly 
Committed: Tue Dec 30 11:01:50 2014 -0800

--
 .../libraries/functions/security_commons.py |  52 +
 .../0.5.0.2.1/package/scripts/falcon_client.py  |   9 ++
 .../0.5.0.2.1/package/scripts/falcon_server.py  |  62 +++
 .../0.5.0.2.1/package/scripts/status_params.py  |  11 ++
 .../HDFS/2.1.0.2.0/package/scripts/datanode.py  |  15 ++-
 .../2.1.0.2.0/package/scripts/hdfs_client.py|  15 +--
 .../2.1.0.2.0/package/scripts/journalnode.py|  16 +--
 .../HDFS/2.1.0.2.0/package/scripts/namenode.py  |  16 +--
 .../HDFS/2.1.0.2.0/package/scripts/snamenode.py |  16 +--
 .../2.1.0.2.0/package/scripts/zkfc_slave.py |  16 +--
 .../stacks/2.1/FALCON/test_falcon_client.py |  25 +
 .../stacks/2.1/FALCON/test_falcon_server.py | 106 +++
 .../src/test/python/stacks/utils/RMFTestCase.py |   8 +-
 13 files changed, 300 insertions(+), 67 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/e3c9aa7a/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py
--
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py
index d3cd1a2..535a53b 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/security_commons.py
@@ -23,6 +23,8 @@ from tempfile import mkstemp
 import os
 import json
 
+FILE_TYPE_XML = 'XML'
+FILE_TYPE_PROPERTIES = 'PROPERTIES'
 
 def validate_security_config_properties(params, configuration_rules):
   """
@@ -103,29 +105,47 @@ def build_expectations(config_file, value_checks, 
empty_checks, read_checks):
 def get_params_from_filesystem(conf_dir, config_files):
   """
   Used to retrieve properties from xml config files and build a dict
+
+  The dictionary of configuration files to file types should contain one of 
the following values"
+'XML'
+'PROPERTIES'
+
   :param conf_dir:  directory where the configuration files sit
-  :param config_files: list of configuration file names
-  :return:
+  :param config_files: dictionary of configuration file names to (supported) 
file types
+  :return: a dictionary of config-type to a dictionary of key/value pairs for
   """
   result = {}
   from xml.etree import ElementTree as ET
-
-  for config_file in config_files:
-configuration = ET.parse(conf_dir + os.sep + config_file)
-props = configuration.getroot().getchildren()
-config_file_id = config_file[:-4] if len(config_file) > 4 else config_file
-result[config_file_id] = {}
-for prop in props:
-  result[config_file_id].update({prop[0].text: prop[1].text})
+  import ConfigParser, StringIO
+  for config_file, file_type in config_files.iteritems():
+file_name, file_ext = os.path.splitext(config_file)
+
+if file_type == FILE_TYPE_XML:
+  configuration = ET.parse(conf_dir + os.sep + config_file)
+  props = configuration.getroot().getchildren()
+  config_file_id = file_name if file_name else config_file
+  result[config_file_id] = {}
+  for prop in props:
+result[config_file_id].update({prop[0].text: prop[1].text})
+
+elif file_type == FILE_TYPE_PROPERTIES:
+  with open(conf_dir + os.sep + config_file, 'r') as f:
+config_string = '[root]\n' + f.read()
+  ini_fp = StringIO.StringIO(config_string)
+  config = ConfigParser.RawConfigParser()
+  config.readfp(ini_fp)
+  props = config.items('root')
+  result[file_name] = {}
+  for key, value in props:
+result[file_name].update({key : value})
   return result
 
 
 def cached_kinit_executor(kinit_path, exec_user, keytab_file, principal, 
hostname, temp_dir,
-  expiration_time):
+  expiration_time=5):
   """
   Main cached kinit executor - Uses a temporary file on the FS to cache 
executions. Each command
   will have its own file and only one entry (last successful execution) will 
be stored
-  :return:
   """
   key = str(hash("%s|%s" % (principal, key

[1/2] ambari git commit: Windows build for 2 Ambari Metrics service: Host Monitoring and Timeline Service (Collector).

2014-12-30 Thread fbarca
Repository: ambari
Updated Branches:
  refs/heads/trunk 9884cbdd5 -> 6c21b0942


http://git-wip-us.apache.org/repos/asf/ambari/blob/6c21b094/ambari-metrics/ambari-metrics-timelineservice/src/main/python/ambari_metrics_collector/serviceConfiguration.py
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/python/ambari_metrics_collector/serviceConfiguration.py
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/python/ambari_metrics_collector/serviceConfiguration.py
new file mode 100644
index 000..5e73e24
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/python/ambari_metrics_collector/serviceConfiguration.py
@@ -0,0 +1,152 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import os
+
+from ambari_commons.exceptions import FatalException
+from ambari_commons.logging_utils import print_info_msg
+from ambari_commons.os_utils import search_file
+from ambari_metrics_collector.properties import Properties
+
+
+AMS_CONF_VAR = "AMS_CONF"
+DEFAULT_CONF_DIR = "conf"
+AMS_PROPERTIES_FILE = "ams.properties"
+
+JAVA_HOME = "JAVA_HOME"
+
+DEBUG_MODE_KEY = "ams.server.debug"
+SUSPEND_START_MODE_KEY = "ams.server.debug.suspend.start"
+
+SERVER_OUT_FILE_KEY = "ams.output.file.path"
+
+DEFAULT_LIBS_DIR = "lib"
+
+EMBEDDED_HBASE_MASTER_SERVICE = "ams_hbase_master"
+
+EMBEDDED_HBASE_SUBDIR = "hbase"
+
+JAVA_EXE_SUBPATH = "bin\\java.exe"
+
+JAVA_HEAP_MAX_DEFAULT = "-Xmx1g"
+
+HADOOP_HEAPSIZE = "HADOOP_HEAPSIZE"
+HADOOP_HEAPSIZE_DEFAULT = "1024"
+
+DEBUG_MODE = False
+SUSPEND_START_MODE = False
+
+OUT_DIR = "\\var\\log\\ambari-metrics-collector"
+SERVER_OUT_FILE = OUT_DIR + "\\ambari-metrics-collector.out"
+SERVER_LOG_FILE = OUT_DIR + "\\ambari-metrics-collector.log"
+
+PID_DIR = "\\var\\run\\ambari-metrics-collector"
+PID_OUT_FILE = PID_DIR + "\\ambari-metrics-collector.pid"
+EXITCODE_OUT_FILE = PID_DIR + "\\ambari-metrics-collector.exitcode"
+
+SERVICE_USERNAME_KEY = "TMP_AMC_USERNAME"
+SERVICE_PASSWORD_KEY = "TMP_AMC_PASSWORD"
+
+SETUP_ACTION = "setup"
+START_ACTION = "start"
+STOP_ACTION = "stop"
+RESTART_ACTION = "restart"
+STATUS_ACTION = "status"
+
+def get_conf_dir():
+  try:
+conf_dir = os.environ[AMS_CONF_VAR]
+  except KeyError:
+conf_dir = DEFAULT_CONF_DIR
+  return conf_dir
+
+def find_properties_file():
+  conf_file = search_file(AMS_PROPERTIES_FILE, get_conf_dir())
+  if conf_file is None:
+err = 'File %s not found in search path $%s: %s' % (AMS_PROPERTIES_FILE,
+AMS_CONF_VAR, 
get_conf_dir())
+print err
+raise FatalException(1, err)
+  else:
+print_info_msg('Loading properties from ' + conf_file)
+  return conf_file
+
+# Load AMC properties and return dict with values
+def get_properties():
+  conf_file = find_properties_file()
+
+  properties = None
+  try:
+properties = Properties()
+properties.load(open(conf_file))
+  except (Exception), e:
+print 'Could not read "%s": %s' % (conf_file, e)
+return -1
+  return properties
+
+def get_value_from_properties(properties, key, default=""):
+  try:
+value = properties.get_property(key)
+if not value:
+  value = default
+  except:
+return default
+  return value
+
+def get_java_cp():
+  conf_dir = get_conf_dir()
+  conf_dir = os.path.abspath(conf_dir) + os.pathsep + 
os.path.join(os.path.abspath(DEFAULT_LIBS_DIR), "*")
+  if conf_dir.find(' ') != -1:
+conf_dir = '"' + conf_dir + '"'
+  return conf_dir
+
+def find_jdk():
+  try:
+java_home = os.environ[JAVA_HOME]
+  except Exception:
+# No JAVA_HOME set
+err = "ERROR: JAVA_HOME is not set and could not be found."
+raise FatalException(1, err)
+
+  if not os.path.isdir(java_home):
+err = "ERROR: JAVA_HOME {0} does not exist.".format(java_home)
+raise FatalException(1, err)
+
+  java_exe = os.path.join(java_home, JAVA_EXE_SUBPATH)
+  if not os.path.isfile(java_exe):
+err = "ERROR: {0} is not executable.".format(java_exe)
+raise FatalException(1, err)
+
+  return java_home
+
+def get_java_exe_path():
+  jdk_path = find_jdk()
+  java_exe = os.path.join(jdk_path, JAVA_EXE_SUBPATH)
+  return java_exe
+
+def buil

[2/2] ambari git commit: Windows build for 2 Ambari Metrics service: Host Monitoring and Timeline Service (Collector).

2014-12-30 Thread fbarca
Windows build for 2 Ambari Metrics service: Host Monitoring and Timeline 
Service (Collector).

+ Added Windows profiles to the Maven project files
+ Added the necessary Windows assemblies
+ Created Windows service skeletons
+ Host Monitoring: added OS-independent process termination handler
+ Collector: added debugging support for the Java process
+ Fixed services shutdown, especially when joining spawned threads
+ Fixed unit tests
+ Added support for unit testing on MacOS and Windows

Windows-specific:
+ Moved the assembly descriptors to ambari-metrics-assembly
+ Fixed comments in the configuration files
+ Added soft dependencies on the embedded HBase service
+ Added support for the embedded HBase service setup


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6c21b094
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6c21b094
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6c21b094

Branch: refs/heads/trunk
Commit: 6c21b0942f40791ab4a461048d55e975593eab85
Parents: 9884cbd
Author: Florian Barca 
Authored: Tue Dec 30 10:35:45 2014 -0800
Committer: Florian Barca 
Committed: Tue Dec 30 10:35:45 2014 -0800

--
 ambari-agent/conf/windows/service_wrapper.py|   2 +-
 .../src/main/python/ambari_commons/os_utils.py  |  17 +-
 .../main/python/ambari_commons/os_windows.py|  21 +-
 .../src/main/python/ambari_commons/xml_utils.py |  33 +++
 ambari-metrics/ambari-metrics-assembly/pom.xml  |  77 ++-
 .../src/main/assembly/collector-windows.xml | 101 
 .../src/main/assembly/monitor-windows.xml   |  84 +++
 .../src/main/assembly/monitor.xml   |   2 +-
 .../src/main/assembly/sink-windows.xml  |  60 +
 .../conf/windows/ambari-metrics-monitor.cmd |  17 ++
 .../conf/windows/metric_groups.conf |  19 ++
 .../conf/windows/metric_monitor.ini |  30 +++
 .../ambari-metrics-host-monitoring/pom.xml  | 146 
 .../src/main/python/amhm_service.py | 231 +++
 .../src/main/python/core/__init__.py|   3 +-
 .../src/main/python/core/config_reader.py   |  66 +-
 .../src/main/python/core/controller.py  |  27 ++-
 .../src/main/python/core/emitter.py |  17 +-
 .../src/main/python/core/stop_handler.py| 138 +++
 .../src/main/python/main.py |  58 -
 .../src/test/python/core/TestEmitter.py |  41 ++--
 .../conf/windows/ambari-metrics-collector.cmd   |  17 ++
 .../conf/windows/ams-env.cmd|  16 ++
 .../conf/windows/ams-site.xml   |  25 ++
 .../conf/windows/ams.properties |  17 ++
 .../conf/windows/log4j.properties   |  29 +++
 .../ambari-metrics-timelineservice/pom.xml  |  31 +++
 .../python/ambari_metrics_collector/__init__.py |  21 ++
 .../ambari_metrics_collector/properties.py  | 223 ++
 .../serviceConfiguration.py | 152 
 .../src/main/python/amc_service.py  | 174 ++
 .../src/main/python/embedded_hbase_service.py   | 201 
 .../src/main/python/main.py | 214 +
 ambari-metrics/pom.xml  |  27 +++
 .../main/python/ambari-server-state/Entities.py |  17 +-
 pom.xml |   2 +
 36 files changed, 2238 insertions(+), 118 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/6c21b094/ambari-agent/conf/windows/service_wrapper.py
--
diff --git a/ambari-agent/conf/windows/service_wrapper.py 
b/ambari-agent/conf/windows/service_wrapper.py
index 40be1d0..5eb06c4 100644
--- a/ambari-agent/conf/windows/service_wrapper.py
+++ b/ambari-agent/conf/windows/service_wrapper.py
@@ -92,7 +92,7 @@ class AmbariAgentService(AmbariService):
 # Soft dependency on the Windows Time service
 ensure_time_service_is_started()
 
-self.heartbeat_stop_handler = HeartbeatStopHandlers(self._heventSvcStop)
+self.heartbeat_stop_handler = 
HeartbeatStopHandlers(AmbariAgentService._heventSvcStop)
 
 self.ReportServiceStatus(win32service.SERVICE_RUNNING)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c21b094/ambari-common/src/main/python/ambari_commons/os_utils.py
--
diff --git a/ambari-common/src/main/python/ambari_commons/os_utils.py 
b/ambari-common/src/main/python/ambari_commons/os_utils.py
index 3f4819d..942a920 100644
--- a/ambari-common/src/main/python/ambari_commons/os_utils.py
+++ b/ambari-common/src/main/python/ambari_commons/os_utils.py
@@ -48,14 +48,17 @@ def quote_path(filepath):
 filepath_ret = filepath
   return file

ambari git commit: AMBARI-8961. Add alert description for AMS. (swagle)

2014-12-30 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 8cf0e9155 -> 9884cbdd5


AMBARI-8961. Add alert description for AMS. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9884cbdd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9884cbdd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9884cbdd

Branch: refs/heads/trunk
Commit: 9884cbdd51f6d465044d4573a265f124424ec821
Parents: 8cf0e91
Author: Siddharth Wagle 
Authored: Tue Dec 30 10:16:33 2014 -0800
Committer: Siddharth Wagle 
Committed: Tue Dec 30 10:16:45 2014 -0800

--
 .../common-services/AMS/0.1.0/alerts.json | 18 --
 1 file changed, 12 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/9884cbdd/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
--
diff --git 
a/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json 
b/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
index 93b224a..ad22f59 100644
--- a/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
+++ b/ambari-server/src/main/resources/common-services/AMS/0.1.0/alerts.json
@@ -3,7 +3,8 @@
 "service": [
   {
 "name": "ams_metric_monitor_process_percent",
-"label": "Percent AMS Metric Monitors Available",
+"label": "Percent Metric Monitors Available",
+"description": "This alert is triggered if a percentage of Metric 
Monitor processes are not up and listening on the network for the configured 
warning and critical thresholds.",
 "interval": 1,
 "scope": "SERVICE",
 "enabled": true,
@@ -29,7 +30,8 @@
 "METRIC_COLLECTOR": [
   {
 "name": "ams_metric_collector_process",
-"label": "AMS Metric Collector Process",
+"label": "Metric Collector Process",
+"description": "This alert is triggered if the Metric Collector cannot 
be confirmed to be up and listening on the configured port for number of 
seconds equal to threshold.",
 "interval": 1,
 "scope": "ANY",
 "enabled": true,
@@ -54,7 +56,8 @@
   },
   {
 "name": "ams_metric_collector_hbase_master_process",
-"label": "AMS Metric Collector HBase Master Process",
+"label": "Metric Collector - HBase Master Process",
+"description": "This alert is triggered if the Metric Collector's 
HBase master processes cannot be confirmed to be up and listening on the 
network for the configured critical threshold, given in seconds.",
 "interval": 1,
 "scope": "ANY",
 "source": {
@@ -78,7 +81,8 @@
   },
   {
 "name": "ams_metric_collector_hbase_master_cpu",
-"label": "AMS Metric Collector HBase Maser CPU Utilization",
+"label": "Metric Collector HBase Maser CPU Utilization",
+"description": "This host-level alert is triggered if CPU utilization 
of the Metric Collector's HBase Master exceeds certain warning and critical 
thresholds. It checks the HBase Master JMX Servlet for the SystemCPULoad 
property. The threshold values are in percent.",
 "interval": 5,
 "scope": "ANY",
 "enabled": true,
@@ -116,7 +120,8 @@
   },
   {
 "name": "ams_metric_collector_zookeeper_server_process",
-"label": "AMS Metric Collector ZooKeeper Server Process",
+"label": "Metric Collector - ZooKeeper Server Process",
+"description": "This host-level alert is triggered if the Metric 
Collector's ZooKeeper server process cannot be determined to be up and 
listening on the network.",
 "interval": 1,
 "scope": "ANY",
 "source": {
@@ -142,7 +147,8 @@
 "METRIC_MONITOR": [
   {
 "name": "ams_metric_monitor_process",
-"label": "AMS Metric Monitor Status",
+"label": "Metric Monitor Status",
+"description": "This alert indicates the status of the Metric Monitor 
process as determined by the monitor status script.",
 "interval": 1,
 "scope": "ANY",
 "source": {



[2/3] ambari git commit: Revert "AMBARI-8932. Creating hdfs directories on deploy takes too long, Part 2, reduces deploy time by ~6min (aonishuk)"

2014-12-30 Thread aonishuk
http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
index 846bab7..c02bf74 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
@@ -20,15 +20,37 @@ Ambari Agent
 """
 import sys
 import os.path
+import glob
 
 from resource_management import *
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 
 
-def webhcat(env):
+def webhcat():
   import params
 
+  if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, "2.2.0.0") < 0:
+params.HdfsDirectory(params.webhcat_apps_dir,
+ action="create_delayed",
+ owner=params.webhcat_user,
+ mode=0755
+)
+  
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+params.HdfsDirectory(params.hcat_hdfs_user_dir,
+ action="create_delayed",
+ owner=params.hcat_user,
+ mode=params.hcat_hdfs_user_mode
+)
+  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
+   action="create_delayed",
+   owner=params.webhcat_user,
+   mode=params.webhcat_hdfs_user_mode
+  )
+  params.HdfsDirectory(None, action="create")
+
   Directory(params.templeton_pid_dir,
 owner=params.webhcat_user,
 mode=0755,
@@ -57,6 +79,55 @@ def webhcat(env):
 path='/bin'
 )
 
+  # TODO, these checks that are specific to HDP 2.2 and greater should really 
be in a script specific to that stack.
+  if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, "2.2.0.0") >= 0:
+copy_tarballs_to_hdfs('hive', params.webhcat_user, params.hdfs_user, 
params.user_group)
+copy_tarballs_to_hdfs('pig', params.webhcat_user, params.hdfs_user, 
params.user_group)
+copy_tarballs_to_hdfs('hadoop-streaming', params.webhcat_user, 
params.hdfs_user, params.user_group)
+copy_tarballs_to_hdfs('sqoop', params.webhcat_user, params.hdfs_user, 
params.user_group)
+  else:
+CopyFromLocal(params.hadoop_streeming_jars,
+  owner=params.webhcat_user,
+  mode=0755,
+  dest_dir=params.webhcat_apps_dir,
+  kinnit_if_needed=kinit_if_needed,
+  hdfs_user=params.hdfs_user,
+  hadoop_bin_dir=params.hadoop_bin_dir,
+  hadoop_conf_dir=params.hadoop_conf_dir
+)
+
+if (os.path.isfile(params.pig_tar_file)):
+  CopyFromLocal(params.pig_tar_file,
+owner=params.webhcat_user,
+mode=0755,
+dest_dir=params.webhcat_apps_dir,
+kinnit_if_needed=kinit_if_needed,
+hdfs_user=params.hdfs_user,
+hadoop_bin_dir=params.hadoop_bin_dir,
+hadoop_conf_dir=params.hadoop_conf_dir
+  )
+
+CopyFromLocal(params.hive_tar_file,
+  owner=params.webhcat_user,
+  mode=0755,
+  dest_dir=params.webhcat_apps_dir,
+  kinnit_if_needed=kinit_if_needed,
+  hdfs_user=params.hdfs_user,
+  hadoop_bin_dir=params.hadoop_bin_dir,
+  hadoop_conf_dir=params.hadoop_conf_dir
+)
+
+if (len(glob.glob(params.sqoop_tar_file)) > 0):
+  CopyFromLocal(params.sqoop_tar_file,
+owner=params.webhcat_user,
+mode=0755,
+dest_dir=params.webhcat_apps_dir,
+kinnit_if_needed=kinit_if_needed,
+hdfs_user=params.hdfs_user,
+hadoop_bin_dir=params.hadoop_bin_dir,
+hadoop_conf_dir=params.hadoop_conf_dir
+  )
+
   XmlConfig("webhcat-site.xml",
 conf_dir=params.config_dir,
 configurations=params.config['configurations']['webhcat-site'],

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12

[1/3] ambari git commit: Revert "AMBARI-8932. Creating hdfs directories on deploy takes too long, Part 2, reduces deploy time by ~6min (aonishuk)"

2014-12-30 Thread aonishuk
Repository: ambari
Updated Branches:
  refs/heads/trunk 72881097d -> 8cf0e9155


http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
--
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py 
b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index ccd477b..da316b7 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -21,7 +21,6 @@ import socket
 import subprocess
 
 from mock.mock import MagicMock, patch
-from resource_management.libraries.functions import version
 from resource_management.core import shell
 from stacks.utils.RMFTestCase import *
 
@@ -171,154 +170,89 @@ class TestHiveServer(RMFTestCase):
 self.assertFalse(socket_mock.called)
 
   def assert_configure_default(self):
-
-self.assertResourceCalled('HdfsResource', '/apps/tez/',
-security_enabled = False,
-hadoop_conf_dir = '/etc/hadoop/conf',
-keytab = UnknownConfigurationMock(),
-hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-kinit_path_local = '/usr/bin/kinit',
-user = 'hdfs',
-owner = 'tez',
-hadoop_bin_dir = '/usr/bin',
-type = 'directory',
-action = ['create_delayed'],
-mode = 0755,
-)
-self.assertResourceCalled('HdfsResource', '/apps/tez/lib/',
-security_enabled = False,
-hadoop_conf_dir = '/etc/hadoop/conf',
-keytab = UnknownConfigurationMock(),
-hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-kinit_path_local = '/usr/bin/kinit',
-user = 'hdfs',
-owner = 'tez',
-hadoop_bin_dir = '/usr/bin',
-type = 'directory',
-action = ['create_delayed'],
-mode = 0755,
-)
-self.assertResourceCalled('HdfsResource', None,
-security_enabled = False,
-hadoop_bin_dir = '/usr/bin',
-keytab = UnknownConfigurationMock(),
-hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-kinit_path_local = '/usr/bin/kinit',
-user = 'hdfs',
-action = ['execute'],
-hadoop_conf_dir = '/etc/hadoop/conf',
-)
-self.assertResourceCalled('HdfsResource', '/apps/tez/',
-security_enabled = False,
-hadoop_conf_dir = '/etc/hadoop/conf',
-keytab = UnknownConfigurationMock(),
-hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-kinit_path_local = '/usr/bin/kinit',
-user = 'hdfs',
-owner = 'tez',
-hadoop_bin_dir = '/usr/bin',
-type = 'directory',
-action = ['create_delayed'],
-mode = 0755,
-)
-self.assertResourceCalled('HdfsResource', '/apps/tez/lib/',
-security_enabled = False,
-hadoop_conf_dir = '/etc/hadoop/conf',
-keytab = UnknownConfigurationMock(),
-hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-kinit_path_local = '/usr/bin/kinit',
-user = 'hdfs',
-owner = 'tez',
-hadoop_bin_dir = '/usr/bin',
-type = 'directory',
-action = ['create_delayed'],
-mode = 0755,
-)
-self.assertResourceCalled('HdfsResource', None,
-security_enabled = False,
-hadoop_bin_dir = '/usr/bin',
-keytab = UnknownConfigurationMock(),
-hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-kinit_path_local = '/usr/bin/kinit',
-user = 'hdfs',
-action = ['execute'],
-hadoop_conf_dir = '/etc/hadoop/conf',
-)
-self.assertResourceCalled('HdfsResource', '/apps/webhcat',
-security_enabled = False,
-hadoop_conf_dir = '/etc/hadoop/conf',
-keytab = UnknownConfigurationMock(),
-hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-kinit_path_local = '/usr/bin/kinit',
-user = 'hdfs',
-owner = 'hcat',
-hadoop_bin_dir = '/usr/bin',
-type = 'directory',
-action = ['create_delayed'],
-mode = 0755,
+self.assertResourceCalled('HdfsDirectory', '/apps/tez/',
+  action = ['create_delayed'],
+  mode = 0755,
+  owner = 'tez',
+  security_enabled = False,
+  keytab = UnknownConfigurationMock(),
+  conf_dir = '/etc/hadoop/conf',
+  hdfs_user = 'hdfs',
+  bin_dir = '/usr/bin',
+  kinit_path_local = "/usr/bin/kinit"
 )
-self.assertResourceCalled('HdfsResource', '/user/hcat',
-security_enabled = False,
-hadoop_conf_dir = '/etc/hadoop/conf',
-keytab = UnknownConfigurationMock(),
-hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-kinit_

[3/3] ambari git commit: Revert "AMBARI-8932. Creating hdfs directories on deploy takes too long, Part 2, reduces deploy time by ~6min (aonishuk)"

2014-12-30 Thread aonishuk
Revert "AMBARI-8932. Creating hdfs directories on deploy takes too long, Part 
2, reduces deploy time by ~6min (aonishuk)"


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8cf0e915
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8cf0e915
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8cf0e915

Branch: refs/heads/trunk
Commit: 8cf0e91550619ec4101427d721bf859cdbeafb9d
Parents: 7288109
Author: Andrew Onishuk 
Authored: Tue Dec 30 19:51:33 2014 +0200
Committer: Andrew Onishuk 
Committed: Tue Dec 30 19:52:21 2014 +0200

--
 ambari-agent/pom.xml|   7 -
 .../resource_management/TestContentSources.py   |   6 +-
 .../resource_management/TestCopyFromLocal.py|  68 +++
 .../TestPropertiesFileResource.py   |  10 +-
 .../TestRepositoryResource.py   |   4 +-
 .../TestXmlConfigResource.py|  10 +-
 .../python/resource_management/core/source.py   |   2 +-
 .../libraries/functions/__init__.py |   1 -
 .../libraries/functions/get_namenode_states.py  |  72 
 .../libraries/functions/version.py  |  29 +-
 .../libraries/providers/__init__.py |   3 +-
 .../libraries/providers/copy_from_local.py  |  89 
 .../libraries/providers/hdfs_directory.py   | 112 +
 .../libraries/providers/hdfs_resource.py| 109 -
 .../libraries/resources/__init__.py |   3 +-
 .../libraries/resources/copy_from_local.py  |  40 ++
 .../libraries/resources/hdfs_directory.py   |  45 ++
 .../libraries/resources/hdfs_resource.py|  77 
 .../FALCON/0.5.0.2.1/package/scripts/falcon.py  |   8 +-
 .../FALCON/0.5.0.2.1/package/scripts/params.py  |  16 +-
 .../HBASE/0.96.0.2.0/package/scripts/hbase.py   |  10 +-
 .../HBASE/0.96.0.2.0/package/scripts/params.py  |  16 +-
 .../0.96.0.2.0/package/scripts/service_check.py |   3 +-
 .../package/files/fast-hdfs-resource.jar| Bin 19284916 -> 0 bytes
 .../HDFS/2.1.0.2.0/package/scripts/hdfs.py  |   5 -
 .../2.1.0.2.0/package/scripts/hdfs_namenode.py  |  11 +-
 .../2.1.0.2.0/package/scripts/hdfs_snamenode.py |   2 +
 .../HDFS/2.1.0.2.0/package/scripts/params.py|  21 +-
 .../2.1.0.2.0/package/scripts/service_check.py  |  54 ++-
 .../0.12.0.2.0/package/files/templetonSmoke.sh  |  30 +-
 .../HIVE/0.12.0.2.0/package/scripts/hive.py |  86 +---
 .../0.12.0.2.0/package/scripts/hive_server.py   |   9 +-
 .../0.12.0.2.0/package/scripts/install_jars.py  |  55 ++-
 .../HIVE/0.12.0.2.0/package/scripts/params.py   |  66 +--
 .../HIVE/0.12.0.2.0/package/scripts/webhcat.py  |  73 +++-
 .../package/scripts/webhcat_server.py   |   2 +-
 .../4.0.0.2.0/package/files/oozieSmoke2.sh  |  33 +-
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py|   6 +-
 .../OOZIE/4.0.0.2.0/package/scripts/params.py   |  15 +-
 .../PIG/0.12.0.2.0/package/scripts/params.py|  20 +-
 .../0.12.0.2.0/package/scripts/service_check.py |  75 ++--
 .../TEZ/0.4.0.2.1/package/scripts/params.py |  16 +-
 .../YARN/package/scripts/historyserver.py   |  24 +-
 .../services/YARN/package/scripts/params.py |  28 +-
 .../2.0.6/services/YARN/package/scripts/yarn.py |  27 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py | 195 -
 .../2.0.6/HBASE/test_hbase_regionserver.py  |  90 
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |   6 -
 .../stacks/2.0.6/HDFS/test_journalnode.py   |   6 -
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 414 +--
 .../stacks/2.0.6/HDFS/test_service_check.py |  70 ++--
 .../python/stacks/2.0.6/HDFS/test_snamenode.py  |   6 -
 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py  |  12 -
 .../stacks/2.0.6/HIVE/test_hive_server.py   | 272 
 .../stacks/2.0.6/HIVE/test_webhcat_server.py| 134 ++
 .../stacks/2.0.6/OOZIE/test_oozie_server.py |  66 +--
 .../stacks/2.0.6/PIG/test_pig_service_check.py  | 112 +
 .../stacks/2.0.6/YARN/test_historyserver.py | 288 ++---
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   2 -
 .../stacks/2.0.6/YARN/test_nodemanager.py   | 132 ++
 .../stacks/2.0.6/YARN/test_yarn_client.py   |   2 -
 .../python/stacks/2.0.6/configs/default.json|   4 +-
 .../stacks/2.1/FALCON/test_falcon_server.py |  43 +-
 ambari-server/src/test/python/unitTests.py  |   1 -
 ambari-web/app/config.js|   2 +-
 .../dependency-reduced-pom.xml  |  58 ---
 .../ambari/fast_hdfs_resource/Resource.java |   8 +-
 67 files changed, 1611 insertions(+), 1710 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-agent/pom.xml
--
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml

ambari git commit: AMBARI-8872. Support point in time queries.

2014-12-30 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk f186c4bad -> 72881097d


AMBARI-8872. Support point in time queries.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/72881097
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/72881097
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/72881097

Branch: refs/heads/trunk
Commit: 72881097dc0f4c9432c62427f2ce5ae6c54c966d
Parents: f186c4b
Author: Siddharth Wagle 
Authored: Tue Dec 30 09:45:14 2014 -0800
Committer: Siddharth Wagle 
Committed: Tue Dec 30 09:45:14 2014 -0800

--
 .../metrics/timeline/PhoenixHBaseAccessor.java  | 475 +++---
 .../metrics/timeline/PhoenixTransactSQL.java| 491 ++-
 .../timeline/TimelineMetricAggregator.java  |   3 +-
 .../TimelineMetricClusterAggregator.java|   3 +-
 .../TimelineMetricClusterAggregatorHourly.java  |   3 +-
 .../metrics/timeline/ITClusterAggregator.java   |   5 +-
 .../metrics/timeline/ITMetricAggregator.java|   7 +-
 .../timeline/TestPhoenixTransactSQL.java|  21 +-
 .../metrics/timeline/AMSPropertyProvider.java   |  76 +--
 .../timeline/AMSPropertyProviderTest.java   |  92 +++-
 10 files changed, 835 insertions(+), 341 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/72881097/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
--
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
index b5226ee..c3a418a 100644
--- 
a/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/PhoenixHBaseAccessor.java
@@ -17,7 +17,6 @@
  */
 package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
 
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -35,6 +34,7 @@ import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -48,6 +48,7 @@ import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.ti
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_CLUSTER_AGGREGATE_TABLE_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.CREATE_METRICS_TABLE_SQL;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.SplitByMetricNamesCondition;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DEFAULT_ENCODING;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DEFAULT_TABLE_COMPRESSION;
 import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.METRICS_AGGREGATE_HOURLY_TABLE_NAME;
@@ -134,7 +135,6 @@ public class PhoenixHBaseAccessor {
 }
   }
 
-
   /**
* Get JDBC connection to HBase store. Assumption is that the hbase
* configuration is present on the classpath and loaded by the caller into
@@ -148,13 +148,28 @@ public class PhoenixHBaseAccessor {
 return dataSource.getConnection();
   }
 
-  public static Map readMetricFromJSON(String json) throws IOException {
-return mapper.readValue(json, metricValuesTypeRef);
+  private static TimelineMetric getLastTimelineMetricFromResultSet(ResultSet 
rs)
+throws SQLException, IOException {
+TimelineMetric metric = getTimelineMetricCommonsFromResultSet(rs);
+
metric.setMetricValues(readLastMetricValueFromJSON(rs.getString("METRICS")));
+
+return metric;
   }
 
-  @SuppressWarnings("unchecked")
   static TimelineMetric getTimelineMetricFromResultSet(ResultSet rs)
 throws SQLException, IOException {
+TimelineMetric metric = getTimelineMetricCommonsFromResultSet(rs);
+Map sortedByTimeMetrics =
+  new TreeMap(rea

ambari git commit: AMBARI-8960. Separate AlertConfig logic from AlertConfigsController (onechiporenko)

2014-12-30 Thread onechiporenko
Repository: ambari
Updated Branches:
  refs/heads/trunk f1e86c999 -> f186c4bad


AMBARI-8960. Separate AlertConfig logic from AlertConfigsController 
(onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f186c4ba
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f186c4ba
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f186c4ba

Branch: refs/heads/trunk
Commit: f186c4bad15aac50244fb24f65c9dfa5e621a582
Parents: f1e86c9
Author: Oleg Nechiporenko 
Authored: Tue Dec 30 18:05:21 2014 +0200
Committer: Oleg Nechiporenko 
Committed: Tue Dec 30 18:33:15 2014 +0200

--
 .../alerts/definition_configs_controller.js |  11 +-
 ambari-web/app/models/alert_config.js   | 128 +--
 ambari-web/test/models/alert_config_test.js |  12 +-
 3 files changed, 92 insertions(+), 59 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/f186c4ba/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
--
diff --git 
a/ambari-web/app/controllers/main/alerts/definition_configs_controller.js 
b/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
index 9f3be2e..264f6d1 100644
--- a/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
+++ b/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
@@ -103,11 +103,7 @@ App.MainAlertDefinitionConfigsController = 
Em.Controller.extend({
*/
   getThresholdsProperty: function (type, property) {
 var warning = this.get('content.reporting').findProperty('type', type);
-if (warning && warning.get(property)) {
-  return warning.get(property);
-} else {
-  return null;
-}
+return warning && warning.get(property) ? warning.get(property) : null;
   },
 
   /**
@@ -138,7 +134,6 @@ App.MainAlertDefinitionConfigsController = 
Em.Controller.extend({
 }
 
 configs.setEach('isDisabled', !this.get('canEdit'));
-configs.setEach('configsController', this);
 
 this.set('configs', configs);
   },
@@ -315,12 +310,12 @@ App.MainAlertDefinitionConfigsController = 
Em.Controller.extend({
 text: isWizard ? '' : this.getThresholdsProperty('ok', 'text'),
 value: isWizard ? '' : this.getThresholdsProperty('ok', 'value')
   }),
-  App.AlertConfigProperties.Thresholds.WarningThreshold.create({
+  
App.AlertConfigProperties.Thresholds.WarningThreshold.create(App.AlertConfigProperties.Thresholds.PercentageMixin,
 {
 text: isWizard ? '' : this.getThresholdsProperty('warning', 'text'),
 value: isWizard ? '' : this.getThresholdsProperty('warning', 'value'),
 valueMetric: '%'
   }),
-  App.AlertConfigProperties.Thresholds.CriticalThreshold.create({
+  
App.AlertConfigProperties.Thresholds.CriticalThreshold.create(App.AlertConfigProperties.Thresholds.PercentageMixin,
 {
 text: isWizard ? '' : this.getThresholdsProperty('critical', 'text'),
 value: isWizard ? '' : this.getThresholdsProperty('critical', 'value'),
 valueMetric: '%'

http://git-wip-us.apache.org/repos/asf/ambari/blob/f186c4ba/ambari-web/app/models/alert_config.js
--
diff --git a/ambari-web/app/models/alert_config.js 
b/ambari-web/app/models/alert_config.js
index 4b198db..990963f 100644
--- a/ambari-web/app/models/alert_config.js
+++ b/ambari-web/app/models/alert_config.js
@@ -149,15 +149,7 @@ App.AlertConfigProperty = Ember.Object.extend({
*/
   isValid: function () {
 return true;
-  }.property(),
-
-  /**
-   * Configs controller
-   * Should be set in controller in rendering configs function
-   * Used to get access to other configs properties of one group or definition 
properties
-   * @type {App.MainAlertDefinitionConfigsController}
-   */
-  configsController: null
+  }.property()
 
 });
 
@@ -170,24 +162,28 @@ App.AlertConfigProperties = {
 classNames: 'alert-text-input',
 apiProperty: 'name'
   }),
+
   AlertNameSelected: App.AlertConfigProperty.extend({
 name: 'alert_name',
 label: 'Alert Name',
 displayType: 'select',
 apiProperty: 'name'
   }),
+
   ServiceAlertType: App.AlertConfigProperty.extend({
 name: 'alert_type_service',
 label: 'Service Alert Definition',
 displayType: 'radioButton',
 group: 'alert_type'
   }),
+
   HostAlertType: App.AlertConfigProperty.extend({
 name: 'alert_type_host',
 label: 'Host Alert Definition',
 displayType: 'radioButton',
 group: 'alert_type'
   }),
+
   Service: App.AlertConfigProperty.extend({
 name: 'service',
 label: 'Service',
@@ -197,6 +193,7 @@ App.AlertConfigProperties = {
   return App.StackService.find().findProperty('display

[2/3] ambari git commit: AMBARI-8959. UI uses old sub-resource names for API requests to /stacks/. (akovalenko)

2014-12-30 Thread akovalenko
http://git-wip-us.apache.org/repos/asf/ambari/blob/f1e86c99/ambari-web/app/assets/data/wizard/stack/stacks.json
--
diff --git a/ambari-web/app/assets/data/wizard/stack/stacks.json 
b/ambari-web/app/assets/data/wizard/stack/stacks.json
index 7cb9d9e..7e93847 100644
--- a/ambari-web/app/assets/data/wizard/stack/stacks.json
+++ b/ambari-web/app/assets/data/wizard/stack/stacks.json
@@ -1,8 +1,8 @@
 {
-  "href" : 
"http://192.168.56.101:8080/api/v1/stacks2/HDP/versions?fields=stackServices/StackServices,Versions";,
+  "href" : 
"http://192.168.56.101:8080/api/v1/stacks/HDP/versions?fields=services/StackServices,Versions";,
   "items" : [
 {
-  "href" : "http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0";,
+  "href" : "http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.0";,
   "Versions" : {
 "active" : true,
 "min_upgrade_version" : null,
@@ -18,9 +18,9 @@
   }
 }
   },
-  "stackServices" : [
+  "services" : [
 {
-  "href" : 
"http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0/stackServices/FLUME";,
+  "href" : 
"http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.0/services/FLUME";,
   "StackServices" : {
 "comments" : "Data management and processing platform",
 "custom_commands" : [ ],
@@ -57,7 +57,7 @@
   }
 },
 {
-  "href" : 
"http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0/stackServices/GANGLIA";,
+  "href" : 
"http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.0/services/GANGLIA";,
   "StackServices" : {
 "comments" : "Ganglia Metrics Collection system (http://oss.oetiker.ch/rrdtool/\"; target=\"_blank\">RRDTool will be 
installed too)",
 "custom_commands" : [ ],
@@ -80,7 +80,7 @@
   }
 },
 {
-  "href" : 
"http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0/stackServices/HBASE";,
+  "href" : 
"http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.0/services/HBASE";,
   "StackServices" : {
 "comments" : "Non-relational distributed database and centralized 
service for configuration management &\nsynchronization\n  ",
 "custom_commands" : [ ],
@@ -124,7 +124,7 @@
   }
 },
 {
-  "href" : 
"http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0/stackServices/HDFS";,
+  "href" : 
"http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.0/services/HDFS";,
   "StackServices" : {
 "comments" : "Apache Hadoop Distributed File System",
 "custom_commands" : [ ],
@@ -173,7 +173,7 @@
   }
 },
 {
-  "href" : 
"http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0/stackServices/HIVE";,
+  "href" : 
"http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.0/services/HIVE";,
   "StackServices" : {
 "comments" : "Data warehouse system for ad-hoc queries & analysis 
of large datasets and table & storage management service",
 "custom_commands" : [ ],
@@ -236,7 +236,7 @@
   }
 },
 {
-  "href" : 
"http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0/stackServices/MAPREDUCE2";,
+  "href" : 
"http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.0/services/MAPREDUCE2";,
   "StackServices" : {
 "comments" : "Apache Hadoop NextGen MapReduce (YARN)",
 "custom_commands" : [ ],
@@ -267,7 +267,7 @@
   }
 },
 {
-  "href" : 
"http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0/stackServices/NAGIOS";,
+  "href" : 
"http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.0/services/NAGIOS";,
   "StackServices" : {
 "comments" : "Nagios Monitoring and Alerting system",
 "custom_commands" : [ ],
@@ -290,7 +290,7 @@
   }
 },
 {
-  "href" : 
"http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0/stackServices/OOZIE";,
+  "href" : 
"http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.0/services/OOZIE";,
   "StackServices" : {
 "comments" : "System for workflow coordination and execution of 
Apache Hadoop jobs.  This also includes the installation of the optional Oozie 
Web Console which relies on and will install the http://www.sencha.com/legal/open-source-faq/\";>ExtJS Library.\n  
",
 "custom_commands" : [ ],
@@ -327,7 +327,7 @@
   }
 },
 {
-  "href" : 
"http://192.168.56.101:8080/api/v1/stacks2/HDP/versions/2.0/stackServices/PIG";,
+  "href" : 
"http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.0/services/PIG";,
   "StackServices" : {
 "comments" : "Scripting platform for analyzing large datasets",

[3/3] ambari git commit: AMBARI-8959. UI uses old sub-resource names for API requests to /stacks/. (akovalenko)

2014-12-30 Thread akovalenko
AMBARI-8959. UI uses old sub-resource names for API requests to /stacks/. 
(akovalenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f1e86c99
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f1e86c99
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f1e86c99

Branch: refs/heads/trunk
Commit: f1e86c999f3bf2075feb2b7207d29b84e4c4c11d
Parents: cef3f2c
Author: Aleksandr Kovalenko 
Authored: Tue Dec 30 17:39:39 2014 +0200
Committer: Aleksandr Kovalenko 
Committed: Tue Dec 30 17:39:39 2014 +0200

--
 .../stackVersions/StackVersionsCreateCtrl.js|   4 +-
 .../stackVersions/StackVersionsEditCtrl.js  |   4 +-
 .../data/stacks/HDP-2.1/service_components.json |  34 +--
 .../assets/data/wizard/stack/HDP_versions.json  | 264 +--
 .../app/assets/data/wizard/stack/stacks.json| 198 +++---
 .../repo_version_management_controller.js   |   6 +-
 .../app/mappers/repository_version_mapper.js|   2 +-
 ambari-web/app/mappers/stack_mapper.js  |   4 +-
 ambari-web/app/mappers/stack_service_mapper.js  |   4 +-
 ambari-web/app/models/stack_service.js  |   2 +-
 ambari-web/app/utils/ajax/ajax.js   |   6 +-
 ambari-web/test/mappers/stack_mapper_test.js|   6 +-
 ambari-web/test/service_components.js   | 194 +++---
 ambari-web/test/stack.js|   6 +-
 14 files changed, 372 insertions(+), 362 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/f1e86c99/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
--
diff --git 
a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
 
b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
index 0d67f35..310ab4a 100644
--- 
a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
+++ 
b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsCreateCtrl.js
@@ -69,7 +69,9 @@ angular.module('ambariAdminConsole')
   $scope.afterStackVersionChange = function () {
 Stack.getSupportedOSList($scope.upgradeStack.selected.stack_name, 
$scope.upgradeStack.selected.stack_version)
 .then(function (data) {
-  var repositories = data.operatingSystems.map(function (os) {
+  //TODO map data.operating_systems after API is fixed
+  var operatingSystems = data.operating_systems || data.operatingSystems;
+  var repositories = operatingSystems.map(function (os) {
 return {
   os: os.OperatingSystems.os_type,
   packages: [

http://git-wip-us.apache.org/repos/asf/ambari/blob/f1e86c99/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
--
diff --git 
a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
 
b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
index 32401b9..e951c97 100644
--- 
a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
+++ 
b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/controllers/stackVersions/StackVersionsEditCtrl.js
@@ -43,7 +43,9 @@ angular.module('ambariAdminConsole')
   angular.forEach($scope.osList, function (os) {
 existingOSHash[os.OperatingSystems.os_type] = os;
   });
-  var osList = data.operatingSystems.map(function (os) {
+  //TODO map data.operating_systems after API is fixed
+  var operatingSystems = data.operating_systems || data.operatingSystems;
+  var osList = operatingSystems.map(function (os) {
   return existingOSHash[os.OperatingSystems.os_type] || {
 OperatingSystems: {
   os_type : os.OperatingSystems.os_type

http://git-wip-us.apache.org/repos/asf/ambari/blob/f1e86c99/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json
--
diff --git a/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json 
b/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json
index 64a4e1d..c7acf36 100644
--- a/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json
+++ b/ambari-web/app/assets/data/stacks/HDP-2.1/service_components.json
@@ -1,5 +1,5 @@
 {
-  "href" : 
"http://192.168.56.101:8080/api/v1/stacks/HDP/versions/2.1/services?fields=StackServices/*,serviceComponents/*&_=1409656091161";,
+  

[1/3] ambari git commit: AMBARI-8959. UI uses old sub-resource names for API requests to /stacks/. (akovalenko)

2014-12-30 Thread akovalenko
Repository: ambari
Updated Branches:
  refs/heads/trunk cef3f2ccb -> f1e86c999


http://git-wip-us.apache.org/repos/asf/ambari/blob/f1e86c99/ambari-web/test/service_components.js
--
diff --git a/ambari-web/test/service_components.js 
b/ambari-web/test/service_components.js
index efa4f66..6a0ea07 100644
--- a/ambari-web/test/service_components.js
+++ b/ambari-web/test/service_components.js
@@ -19,7 +19,7 @@
 module.exports = {
   "items" : [
 {
-  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/FALCON";,
+  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.1/services/FALCON";,
   "StackServices" : {
 "comments" : "Data management and processing platform",
 "custom_commands" : [ ],
@@ -56,9 +56,9 @@ module.exports = {
   "OOZIE"
 ]
   },
-  "serviceComponents" : [
+  "components" : [
 {
-  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/FALCON/serviceComponents/FALCON_CLIENT";,
+  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.1/services/FALCON/components/FALCON_CLIENT";,
   "StackServiceComponents" : {
 "cardinality" : "1+",
 "component_category" : "CLIENT",
@@ -74,7 +74,7 @@ module.exports = {
   "dependencies" : [ ]
 },
 {
-  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/FALCON/serviceComponents/FALCON_SERVER";,
+  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.1/services/FALCON/components/FALCON_SERVER";,
   "StackServiceComponents" : {
 "cardinality" : "1",
 "component_category" : "MASTER",
@@ -89,7 +89,7 @@ module.exports = {
   },
   "dependencies" : [
 {
-  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/FALCON/serviceComponents/FALCON_SERVER/dependencies/OOZIE_CLIENT";,
+  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.1/services/FALCON/components/FALCON_SERVER/dependencies/OOZIE_CLIENT";,
   "Dependencies" : {
 "component_name" : "OOZIE_CLIENT",
 "dependent_component_name" : "FALCON_SERVER",
@@ -99,7 +99,7 @@ module.exports = {
   }
 },
 {
-  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/FALCON/serviceComponents/FALCON_SERVER/dependencies/OOZIE_SERVER";,
+  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.1/services/FALCON/components/FALCON_SERVER/dependencies/OOZIE_SERVER";,
   "Dependencies" : {
 "component_name" : "OOZIE_SERVER",
 "dependent_component_name" : "FALCON_SERVER",
@@ -113,7 +113,7 @@ module.exports = {
   ]
 },
 {
-  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/FLUME";,
+  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.1/services/FLUME";,
   "StackServices" : {
 "comments" : "Data management and processing platform",
 "custom_commands" : [ ],
@@ -145,9 +145,9 @@ module.exports = {
   "HDFS"
 ]
   },
-  "serviceComponents" : [
+  "components" : [
 {
-  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/FLUME/serviceComponents/FLUME_HANDLER";,
+  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.1/services/FLUME/components/FLUME_HANDLER";,
   "StackServiceComponents" : {
 "cardinality" : "0+",
 "component_category" : "SLAVE",
@@ -165,7 +165,7 @@ module.exports = {
   ]
 },
 {
-  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/GANGLIA";,
+  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.1/services/GANGLIA";,
   "StackServices" : {
 "comments" : "Ganglia Metrics Collection system (http://oss.oetiker.ch/rrdtool/\"; target=\"_blank\">RRDTool will be 
installed too)",
 "custom_commands" : [ ],
@@ -185,9 +185,9 @@ module.exports = {
   }
 }
   },
-  "serviceComponents" : [
+  "components" : [
 {
-  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks2/HDP/versions/2.1/stackServices/GANGLIA/serviceComponents/GANGLIA_MONITOR";,
+  "href" : 
"http://c6401.ambari.apache.org:8080/api/v1/stacks/HDP/versions/2.1/services/GANGLIA/components/GANGLIA_MONITOR";,
   "StackServiceComponents" : {
 "cardinality" : "ALL",
 "componen

ambari git commit: AMBARI-8958. Alerts UI: add confirm password field in Edit Notification popup. (akovalenko)

2014-12-30 Thread akovalenko
Repository: ambari
Updated Branches:
  refs/heads/trunk b4c48c262 -> cef3f2ccb


AMBARI-8958. Alerts UI: add confirm password field in Edit Notification popup. 
(akovalenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cef3f2cc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cef3f2cc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cef3f2cc

Branch: refs/heads/trunk
Commit: cef3f2ccbca0532b16bf6bae7061ef1127f1b14b
Parents: b4c48c2
Author: Aleksandr Kovalenko 
Authored: Tue Dec 30 16:54:44 2014 +0200
Committer: Aleksandr Kovalenko 
Committed: Tue Dec 30 17:08:15 2014 +0200

--
 .../manage_alert_notifications_controller.js| 23 +--
 ambari-web/app/messages.js  |  2 +
 .../main/alerts/create_alert_notification.hbs   | 18 +++--
 ...anage_alert_notifications_controller_test.js | 40 +++-
 4 files changed, 76 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/cef3f2cc/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js
--
diff --git 
a/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js
 
b/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js
index 22584c5..2dd442e 100644
--- 
a/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js
+++ 
b/ambari-web/app/controllers/main/alerts/manage_alert_notifications_controller.js
@@ -89,9 +89,7 @@ App.ManageAlertNotificationsController = 
Em.Controller.extend({
   label: 
Em.I18n.t('alerts.actions.manage_alert_notifications_popup.SMTPUseAuthentication'),
   value: false,
   defaultValue: false,
-  inversedValue: function () {
-return !this.get('value');
-  }.property('value')
+  invertedValue: Em.computed.not('value')
 }),
 SMTPUsername: {
   label: 
Em.I18n.t('alerts.actions.manage_alert_notifications_popup.SMTPUsername'),
@@ -103,6 +101,11 @@ App.ManageAlertNotificationsController = 
Em.Controller.extend({
   value: '',
   defaultValue: ''
 },
+retypeSMTPPassword: {
+  label: 
Em.I18n.t('alerts.actions.manage_alert_notifications_popup.retypeSMTPPassword'),
+  value: '',
+  defaultValue: ''
+},
 SMTPSTARTTLS: {
   label: 
Em.I18n.t('alerts.actions.manage_alert_notifications_popup.SMTPSTARTTLS'),
   value: false,
@@ -290,6 +293,7 @@ App.ManageAlertNotificationsController = 
Em.Controller.extend({
 inputFields.set('SMTPUseAuthentication.value', 
selectedAlertNotification.get('properties')['mail.smtp.auth']);
 inputFields.set('SMTPUsername.value', 
selectedAlertNotification.get('properties')['ambari.dispatch.credential.username']);
 inputFields.set('SMTPPassword.value', 
selectedAlertNotification.get('properties')['ambari.dispatch.credential.password']);
+inputFields.set('retypeSMTPPassword.value', 
selectedAlertNotification.get('properties')['ambari.dispatch.credential.password']);
 inputFields.set('SMTPSTARTTLS.value', 
selectedAlertNotification.get('properties')['mail.smtp.starttls.enable']);
 inputFields.set('emailFrom.value', 
selectedAlertNotification.get('properties')['mail.smtp.from']);
 inputFields.set('version.value', 
selectedAlertNotification.get('properties')['ambari.dispatch.snmp.version']);
@@ -338,6 +342,7 @@ App.ManageAlertNotificationsController = 
Em.Controller.extend({
   this.emailFromValidation();
   this.smtpPortValidation();
   this.portValidation();
+  this.retypePasswordValidation();
 },
 
 isEmailMethodSelected: function () {
@@ -392,6 +397,18 @@ App.ManageAlertNotificationsController = 
Em.Controller.extend({
   }
 }.observes('controller.inputFields.port.value'),
 
+retypePasswordValidation: function () {
+  var passwordValue = 
this.get('controller.inputFields.SMTPPassword.value');
+  var retypePasswordValue = 
this.get('controller.inputFields.retypeSMTPPassword.value');
+  if (passwordValue !== retypePasswordValue) {
+this.set('parentView.hasErrors', true);
+this.set('controller.inputFields.retypeSMTPPassword.errorMsg', 
Em.I18n.t('alerts.notifications.error.retypePassword'));
+  } else {
+this.set('parentView.hasErrors', false);
+this.set('controller.inputFields.retypeSMTPPassword.errorMsg', 
null);
+  }
+}.observes('controller.inputFields.retypeSMTPPassword.value', 
'controller.inputFields.SMTPPassword.value'),
+
 
 groupsSelectView: Em.Select.extend({
   attributeBindings: ['disabled'],

http://git-wip-us.apache.org/repos/asf/ambari/blob/cef3f2cc/ambari-web/app/messa

ambari git commit: AMBARI-8957. Non-root install: Ganglia Monitor Install issue (aonishuk)

2014-12-30 Thread aonishuk
Repository: ambari
Updated Branches:
  refs/heads/trunk e8e1e3402 -> b4c48c262


AMBARI-8957. Non-root install: Ganglia Monitor Install issue (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b4c48c26
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b4c48c26
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b4c48c26

Branch: refs/heads/trunk
Commit: b4c48c262fe0b0cfadcae5be8d2860eecff9d1cf
Parents: e8e1e34
Author: Andrew Onishuk 
Authored: Tue Dec 30 17:00:39 2014 +0200
Committer: Andrew Onishuk 
Committed: Tue Dec 30 17:00:39 2014 +0200

--
 .../TestMonitorWebserverResource.py | 20 
 .../libraries/providers/monitor_webserver.py| 14 ++
 .../GANGLIA/3.5.0/package/files/setupGanglia.sh | 18 +++---
 .../GANGLIA/3.5.0/package/scripts/functions.py  | 19 +--
 .../package/scripts/ganglia_monitor_service.py  |  5 ++---
 .../3.5.0/package/scripts/ganglia_server.py |  4 +++-
 .../package/scripts/ganglia_server_service.py   |  4 ++--
 .../3.5.0/package/templates/gangliaLib.sh.j2|  3 ++-
 .../2.0.6/GANGLIA/test_ganglia_monitor.py   | 14 --
 .../stacks/2.0.6/GANGLIA/test_ganglia_server.py | 16 +---
 10 files changed, 68 insertions(+), 49 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/b4c48c26/ambari-agent/src/test/python/resource_management/TestMonitorWebserverResource.py
--
diff --git 
a/ambari-agent/src/test/python/resource_management/TestMonitorWebserverResource.py
 
b/ambari-agent/src/test/python/resource_management/TestMonitorWebserverResource.py
index 533ecaa..65ada42 100644
--- 
a/ambari-agent/src/test/python/resource_management/TestMonitorWebserverResource.py
+++ 
b/ambari-agent/src/test/python/resource_management/TestMonitorWebserverResource.py
@@ -31,11 +31,9 @@ class TestMonitorWebserverResource(TestCase):
 with Environment(test_mode=True) as env:
   MonitorWebserverProvider(MonitorWebserver("start")).action_start()
 defined_resources = env.resource_list
-expected_resources = "[MonitorWebserver['start'], " \
- "Execute['grep -E 'KeepAlive (On|Off)' 
/etc/httpd/conf/httpd.conf" \
- " && sed -i 's/KeepAlive Off/KeepAlive On/' 
/etc/httpd/conf/httpd.conf" \
- " || echo 'KeepAlive On' >> 
/etc/httpd/conf/httpd.conf']," \
- " Execute['/etc/init.d/httpd start']]"
+expected_resources = "[MonitorWebserver['start'], Execute['grep -E 
'KeepAlive (On|Off)' /etc/httpd/conf/httpd.conf && " \
+"/usr/bin/sudo [RMF_ENV_PLACEHOLDER] -H -E sed -i 's/KeepAlive 
Off/KeepAlive On/' /etc/httpd/conf/httpd.conf || " \
+"echo 'KeepAlive On' | /usr/bin/sudo [RMF_ENV_PLACEHOLDER] -H -E tee 
--append /etc/httpd/conf/httpd.conf > /dev/null'], 
Execute['('/etc/init.d/httpd', 'start')']]"
 self.assertEqual(str(defined_resources), expected_resources)
 
   @patch.object(System, "os_family", new='suse')
@@ -43,11 +41,9 @@ class TestMonitorWebserverResource(TestCase):
 with Environment(test_mode=True) as env:
   MonitorWebserverProvider(MonitorWebserver("start")).action_start()
 defined_resources = env.resource_list
-expected_resources = "[MonitorWebserver['start'], " \
- "Execute['grep -E 'KeepAlive (On|Off)' 
/etc/apache2/httpd.conf " \
- "&& sed -i 's/KeepAlive Off/KeepAlive On/' 
/etc/apache2/httpd.conf " \
- "|| echo 'KeepAlive On' >> 
/etc/apache2/httpd.conf']," \
- " Execute['/etc/init.d/apache2 start']]"
+expected_resources = "[MonitorWebserver['start'], Execute['grep -E 
'KeepAlive (On|Off)' /etc/apache2/httpd.conf && /usr/bin/sudo 
[RMF_ENV_PLACEHOLDER] " \
+"-H -E sed -i 's/KeepAlive Off/KeepAlive On/' /etc/apache2/httpd.conf || 
echo 'KeepAlive On' | " \
+"/usr/bin/sudo [RMF_ENV_PLACEHOLDER] -H -E tee --append 
/etc/apache2/httpd.conf > /dev/null'], Execute['('/etc/init.d/apache2', 
'start')']]"
 self.assertEqual(str(defined_resources), expected_resources)
 
   @patch.object(System, "os_family", new='redhat')
@@ -56,7 +52,7 @@ class TestMonitorWebserverResource(TestCase):
   MonitorWebserverProvider(MonitorWebserver("stop")).action_stop()
 defined_resources = env.resource_list
 expected_resources = "[MonitorWebserver['stop'], " \
- "Execute['/etc/init.d/httpd stop']]"
+ "Execute['('/etc/init.d/httpd', 'stop')']]"
 self.assertEqual(str(defined_resources), expected_resources)
 
   @patch.object(System, "os_family", new='suse')
@@ -65,5 +61,5 @@ class TestMonitorWebserverResource(TestCase):

ambari git commit: AMBARI-8956. Alerts UI: Wrong values of percent thresholds is shown. (akovalenko)

2014-12-30 Thread akovalenko
Repository: ambari
Updated Branches:
  refs/heads/trunk d40a2e5f1 -> e8e1e3402


AMBARI-8956. Alerts UI: Wrong values of percent thresholds is shown. 
(akovalenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e8e1e340
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e8e1e340
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e8e1e340

Branch: refs/heads/trunk
Commit: e8e1e340216689a8dd6056f49bbb35fb9fd00f69
Parents: d40a2e5
Author: Aleksandr Kovalenko 
Authored: Tue Dec 30 15:25:10 2014 +0200
Committer: Aleksandr Kovalenko 
Committed: Tue Dec 30 16:49:21 2014 +0200

--
 .../alerts/definition_configs_controller.js |  4 ++--
 ambari-web/app/models/alert_config.js   | 24 ++--
 ambari-web/test/models/alert_config_test.js | 15 ++--
 3 files changed, 27 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/e8e1e340/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
--
diff --git 
a/ambari-web/app/controllers/main/alerts/definition_configs_controller.js 
b/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
index 4fa0e5b..9f3be2e 100644
--- a/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
+++ b/ambari-web/app/controllers/main/alerts/definition_configs_controller.js
@@ -138,7 +138,7 @@ App.MainAlertDefinitionConfigsController = 
Em.Controller.extend({
 }
 
 configs.setEach('isDisabled', !this.get('canEdit'));
-configs.setEach('allConfigs', configs);
+configs.setEach('configsController', this);
 
 this.set('configs', configs);
   },
@@ -497,7 +497,7 @@ App.MainAlertDefinitionConfigsController = 
Em.Controller.extend({
   var largeValue = Em.get(this.get('configs').findProperty('name', 
'critical_threshold'), 'value');
   var largeValid = Em.get(this.get('configs').findProperty('name', 
'critical_threshold'), 'isValid');
 }
-return smallValid && largeValid ? !(smallValue <= largeValue) : false;
+return smallValid && largeValid ? Number(smallValue) > Number(largeValue) 
: false;
   }.property('configs.@each.value'),
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/e8e1e340/ambari-web/app/models/alert_config.js
--
diff --git a/ambari-web/app/models/alert_config.js 
b/ambari-web/app/models/alert_config.js
index d6e10c7..4b198db 100644
--- a/ambari-web/app/models/alert_config.js
+++ b/ambari-web/app/models/alert_config.js
@@ -152,12 +152,12 @@ App.AlertConfigProperty = Ember.Object.extend({
   }.property(),
 
   /**
-   * Array of a group of configs, that current config property relates to
+   * Configs controller
* Should be set in controller in rendering configs function
-   * Used to get access to other configs properties of one group
-   * @type {App.AlertConfigProperty[]}
+   * Used to get access to other configs properties of one group or definition 
properties
+   * @type {App.MainAlertDefinitionConfigsController}
*/
-  allConfigs: []
+  configsController: null
 
 });
 
@@ -348,26 +348,26 @@ App.AlertConfigProperties = {
   var valueMetric = this.get('valueMetric');
   var displayValue = this.get('displayValue');
   var newDisplayValue = value;
-  if (value && '%' == valueMetric && !isNaN(value)) {
+  if (value && '%' == valueMetric && !isNaN(value) && 
this.get('configsController.content.type') == 'AGGREGATE') {
 newDisplayValue = (Number(value) * 100) + '';
   }
   if (newDisplayValue != displayValue) {
 this.set('displayValue', newDisplayValue);
   }
-}.observes('value', 'valueMetric'),
+}.observes('value', 'valueMetric', 'configsController.content.type'),
 
 displayValueWasChanged: function () {
   var value = this.get('value');
   var valueMetric = this.get('valueMetric');
   var displayValue = this.get('displayValue');
   var newValue = displayValue;
-  if (displayValue && '%' == valueMetric && !isNaN(displayValue)) {
+  if (displayValue && '%' == valueMetric && !isNaN(displayValue) && 
this.get('configsController.content.type') == 'AGGREGATE') {
 newValue = (Number(displayValue) / 100) + '';
   }
   if (newValue != value) {
 this.set('value', newValue);
   }
-}.observes('displayValue', 'valueMetric')
+}.observes('displayValue', 'valueMetric', 'configsController.content.type')
 
   }),
 
@@ -466,14 +466,14 @@ App.AlertConfigProperties.Thresholds = {
   var value = this.get('value');
   if (!value) return false;
   value = ('' + value).trim();
-  if (this.get('showInputForValue') && this.get('valueMetric') == '%') {
+ 

ambari git commit: AMBARI-8950. Views: Pig, add autocomplete for path inputs. (alexantonenko)

2014-12-30 Thread alexantonenko
Repository: ambari
Updated Branches:
  refs/heads/trunk 10a9c63e8 -> d40a2e5f1


AMBARI-8950. Views: Pig, add autocomplete for path inputs. (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d40a2e5f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d40a2e5f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d40a2e5f

Branch: refs/heads/trunk
Commit: d40a2e5f1b8256aa2c510c3535cb5f95bbf90df9
Parents: 10a9c63
Author: Alex Antonenko 
Authored: Tue Dec 30 12:52:47 2014 +0200
Committer: Alex Antonenko 
Committed: Tue Dec 30 13:50:54 2014 +0200

--
 .../view/pig/resources/files/FileService.java   |  19 +-
 .../view/pig/templeton/client/Request.java  | 261 --
 .../pig/templeton/client/RequestWrapper.java| 271 +++
 .../pig/templeton/client/TempletonRequest.java  |   2 +-
 .../ui/pig-web/app/components/pathInput.js  |  50 
 .../main/resources/ui/pig-web/app/initialize.js |   1 +
 .../app/templates/modal/createScript.hbs|   4 +-
 .../pig-web/app/templates/modal/createUdf.hbs   |   8 +-
 .../src/main/resources/ui/pig-web/bower.json|   3 +-
 .../apache/ambari/view/pig/test/FileTest.java   |  16 +-
 10 files changed, 356 insertions(+), 279 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/d40a2e5f/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
--
diff --git 
a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
 
b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
index aaf877c..6a2628d 100644
--- 
a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
+++ 
b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/resources/files/FileService.java
@@ -25,6 +25,7 @@ import org.apache.ambari.view.pig.services.BaseService;
 import org.apache.ambari.view.pig.utils.*;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileAlreadyExistsException;
+import org.apache.hadoop.fs.FileStatus;
 import org.json.simple.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -34,6 +35,8 @@ import javax.ws.rs.*;
 import javax.ws.rs.core.*;
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
 
 /**
  * File access resource
@@ -60,9 +63,21 @@ public class FileService extends BaseService {
   @GET
   @Path("{filePath:.*}")
   @Produces(MediaType.APPLICATION_JSON)
-  public Response getFile(@PathParam("filePath") String filePath, 
@QueryParam("page") Long page) throws IOException, InterruptedException {
-LOG.debug("Reading file " + filePath);
+  public Response getFile(@PathParam("filePath") String filePath,
+  @QueryParam("page") Long page,
+  @QueryParam("action") String action) throws 
IOException, InterruptedException {
 try {
+  if (action != null && action.equals("ls")) {
+LOG.debug("List directory " + filePath);
+List ls = new LinkedList();
+for (FileStatus fs : getHdfsApi().listdir(filePath)) {
+  ls.add(fs.getPath().toString());
+}
+JSONObject object = new JSONObject();
+object.put("ls", ls);
+return Response.ok(object).status(200).build();
+  }
+  LOG.debug("Reading file " + filePath);
   FilePaginator paginator = new FilePaginator(filePath, context);
 
   if (page == null)

http://git-wip-us.apache.org/repos/asf/ambari/blob/d40a2e5f/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/Request.java
--
diff --git 
a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/Request.java
 
b/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/Request.java
deleted file mode 100644
index 521bfad..000
--- 
a/contrib/views/pig/src/main/java/org/apache/ambari/view/pig/templeton/client/Request.java
+++ /dev/null
@@ -1,261 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the Lic

ambari git commit: AMBARI-8954. HiveServer2 can not be started (aonishuk)

2014-12-30 Thread aonishuk
Repository: ambari
Updated Branches:
  refs/heads/trunk d9db1db3d -> 10a9c63e8


AMBARI-8954. HiveServer2 can not be started (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/10a9c63e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/10a9c63e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/10a9c63e

Branch: refs/heads/trunk
Commit: 10a9c63e8a8ddc9dd9ff8cf2fc22960874b3d320
Parents: d9db1db
Author: Andrew Onishuk 
Authored: Tue Dec 30 13:31:44 2014 +0200
Committer: Andrew Onishuk 
Committed: Tue Dec 30 13:31:44 2014 +0200

--
 .../common-services/HIVE/0.12.0.2.0/package/scripts/params.py  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/10a9c63e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
index 14439bb..0caae1b 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
@@ -77,7 +77,7 @@ if hdp_stack_version_major != "" and 
compare_versions(hdp_stack_version_major, '
   hive_tar_destination = 
config['configurations']['cluster-env']['hive_tar_destination_folder']  + "/" + 
os.path.basename(hive_tar_source)
   pig_tar_destination = 
config['configurations']['cluster-env']['pig_tar_destination_folder'] + "/" + 
os.path.basename(pig_tar_source)
   hadoop_streaming_tar_destination_dir = 
config['configurations']['cluster-env']['hadoop-streaming_tar_destination_folder']
-  sqoop_tar_destination = 
config['configurations']['cluster-env']['sqoop_tar_destination_folder'] + "/" + 
os.path.basename(sqoop_tar_source)
+  sqoop_tar_destination_dir = 
config['configurations']['cluster-env']['sqoop_tar_destination_folder'] + "/" + 
os.path.basename(sqoop_tar_source)
   mapreduce_tar_destination = 
config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + 
"/" + os.path.basename(mapreduce_tar_source)
   tez_tar_destination = 
config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + 
os.path.basename(tez_tar_source)
 



ambari git commit: AMBARI-8951 Distribute repositories/install packages UI hangs on latest trunk. (ababiichuk)

2014-12-30 Thread ababiichuk
Repository: ambari
Updated Branches:
  refs/heads/trunk 6c687711c -> d9db1db3d


AMBARI-8951 Distribute repositories/install packages UI hangs on latest trunk. 
(ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d9db1db3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d9db1db3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d9db1db3

Branch: refs/heads/trunk
Commit: d9db1db3d6e7188550a69bdcb63123f3627e7ace
Parents: 6c68771
Author: aBabiichuk 
Authored: Tue Dec 30 12:22:05 2014 +0200
Committer: aBabiichuk 
Committed: Tue Dec 30 12:23:09 2014 +0200

--
 .../admin/stack_versions/stack_version_details_controller.js| 5 +++--
 ambari-web/app/utils/ajax/ajax.js   | 4 
 .../stack_version/stack_version_details_controller_test.js  | 3 ++-
 3 files changed, 5 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/d9db1db3/ambari-web/app/controllers/main/admin/stack_versions/stack_version_details_controller.js
--
diff --git 
a/ambari-web/app/controllers/main/admin/stack_versions/stack_version_details_controller.js
 
b/ambari-web/app/controllers/main/admin/stack_versions/stack_version_details_controller.js
index d467d20..c4c4060 100644
--- 
a/ambari-web/app/controllers/main/admin/stack_versions/stack_version_details_controller.js
+++ 
b/ambari-web/app/controllers/main/admin/stack_versions/stack_version_details_controller.js
@@ -112,7 +112,7 @@ App.MainStackVersionsDetailsController = 
Em.Controller.extend({
   } else {
 clearTimeout(self.get('timeoutRef'));
   }
-}, App.componentsUpdateInterval));
+}, 3000));
   },
 
   /**
@@ -123,7 +123,7 @@ App.MainStackVersionsDetailsController = 
Em.Controller.extend({
*/
   updateProgress: function() {
 return App.ajax.send({
-  'name': 'admin.stack_versions.progress.request',
+  'name': 'admin.high_availability.polling',
   'sender': this,
   'data': {
 requestId: App.db.get('repoVersion', 'id')
@@ -140,6 +140,7 @@ App.MainStackVersionsDetailsController = 
Em.Controller.extend({
   updateProgressSuccess: function(data) {
 if (Em.get(data, 'Requests.progress_percent')) {
   this.set('progress', parseInt(Em.get(data, 
'Requests.progress_percent')));
+  this.set('logs', data.tasks);
 }
   },
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d9db1db3/ambari-web/app/utils/ajax/ajax.js
--
diff --git a/ambari-web/app/utils/ajax/ajax.js 
b/ambari-web/app/utils/ajax/ajax.js
index ec80075..9a46781 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -1372,10 +1372,6 @@ var urls = {
 },
 'mock': ''
   },
-  'admin.stack_versions.progress.request': {
-'real': 
'/clusters/{clusterName}/requests/{requestId}?fields=Requests/progress_percent',
-'mock': '/data/background_operations/host_upgrade_tasks.json'
-  },
   'admin.rolling_upgrade.pre_upgrade_check': {
 'real': '/clusters/{clusterName}/rolling_upgrades_check?fields=*',
 'mock': '/data/stack_versions/pre_upgrade_check.json'

http://git-wip-us.apache.org/repos/asf/ambari/blob/d9db1db3/ambari-web/test/controllers/main/admin/stack_version/stack_version_details_controller_test.js
--
diff --git 
a/ambari-web/test/controllers/main/admin/stack_version/stack_version_details_controller_test.js
 
b/ambari-web/test/controllers/main/admin/stack_version/stack_version_details_controller_test.js
index 235e845..4e288bf 100644
--- 
a/ambari-web/test/controllers/main/admin/stack_version/stack_version_details_controller_test.js
+++ 
b/ambari-web/test/controllers/main/admin/stack_version/stack_version_details_controller_test.js
@@ -90,8 +90,9 @@ describe('App.MainStackVersionsDetailsController', function 
() {
 
   describe('#updateProgressSuccess', function () {
 it("saves progress state t othe controller", function () {
-  controller.updateProgressSuccess({Requests: {progress_percent: 10} });
+  controller.updateProgressSuccess({Requests: {progress_percent: 10} , 
tasks: [{task: 1}]});
   expect(controller.get('progress')).to.equal(10);
+  expect(controller.get('logs')).to.eql([{task: 1}])
 });
   });
 



ambari git commit: AMBARI-8924. Add OUT_OF_SYNC set to cluster stack version API (dlysnichenko)

2014-12-30 Thread dmitriusan
Repository: ambari
Updated Branches:
  refs/heads/trunk ab8c0e350 -> 6c687711c


AMBARI-8924. Add OUT_OF_SYNC set to cluster stack version API (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6c687711
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6c687711
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6c687711

Branch: refs/heads/trunk
Commit: 6c687711c1f2358f41e58eea2a052df5884b3bcc
Parents: ab8c0e3
Author: Lisnichenko Dmitro 
Authored: Tue Dec 30 11:54:06 2014 +0200
Committer: Lisnichenko Dmitro 
Committed: Tue Dec 30 11:54:06 2014 +0200

--
 .../ClusterStackVersionResourceProvider.java|   1 +
 .../internal/HostResourceProvider.java  |  12 +
 .../HostStackVersionResourceProvider.java   |   1 +
 .../events/ServiceComponentInstalledEvent.java  |   8 +
 .../upgrade/HostVersionOutOfSyncListener.java   | 126 +
 .../org/apache/ambari/server/state/Cluster.java |  13 +-
 .../server/state/RepositoryVersionState.java|  39 ++-
 .../server/state/cluster/ClusterImpl.java   |  65 -
 .../svccomphost/ServiceComponentHostImpl.java   |  10 +
 .../AmbariManagementControllerTest.java |   2 +
 .../apache/ambari/server/orm/OrmTestHelper.java |  20 ++
 .../server/state/cluster/ClusterTest.java   | 258 ++-
 12 files changed, 527 insertions(+), 28 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/6c687711/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
index 4f5e03d..a33930d 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ClusterStackVersionResourceProvider.java
@@ -387,6 +387,7 @@ public class ClusterStackVersionResourceProvider extends 
AbstractControllerResou
 cluster.transitionClusterVersion(stackId, desiredRepoVersion, 
RepositoryVersionState.INSTALLING);
   }
   cluster.inferHostVersions(existingCSVer);
+  cluster.recalculateClusterVersionState(desiredRepoVersion);
 
   req.persist();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6c687711/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java
index d868320..b5d2d6d 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostResourceProvider.java
@@ -437,6 +437,7 @@ public class HostResourceProvider extends 
BaseBlueprintProcessor {
 
 Map> hostClustersMap = new HashMap>();
 Map> hostAttributes = new HashMap>();
+Set allClusterSet = new HashSet();
 
 for (HostRequest hostRequest : hostRequests) {
   if (hostRequest.getHostname() != null &&
@@ -446,6 +447,7 @@ public class HostResourceProvider extends 
BaseBlueprintProcessor {
 
 Set clusterSet = new HashSet();
 clusterSet.add(hostRequest.getClusterName());
+allClusterSet.add(hostRequest.getClusterName());
 hostClustersMap.put(hostRequest.getHostname(), clusterSet);
 if (hostRequest.getHostAttributes() != null) {
   hostAttributes.put(hostRequest.getHostname(), 
hostRequest.getHostAttributes());
@@ -453,6 +455,10 @@ public class HostResourceProvider extends 
BaseBlueprintProcessor {
   }
 }
 clusters.updateHostWithClusterAndAttributes(hostClustersMap, 
hostAttributes);
+
+for (String clusterName : allClusterSet) {
+  clusters.getCluster(clusterName).recalculateAllClusterVersionStates();
+}
   }
 
   private void createHostResource(Clusters clusters, Set duplicates,
@@ -793,6 +799,11 @@ public class HostResourceProvider extends 
BaseBlueprintProcessor {
   }
 }
   }
+
+  if (null != request.getClusterName() && 
!request.getClusterName().isEmpty()) {
+
clusters.getCluster(request.getClusterName()).recalculateAllClusterVersionStates();
+  }
+
   //todo: if attempt was made to update a property other than those
   //todo: that are allowed above, shou