ambari git commit: AMBARI-10022. RU - DB Schema differs between Postgres internal and external (alejandro)

2015-03-11 Thread alejandro
Repository: ambari
Updated Branches:
  refs/heads/trunk 6bf7adc5e -> 4d1334719


AMBARI-10022. RU - DB Schema differs between Postgres internal and external 
(alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4d133471
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4d133471
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4d133471

Branch: refs/heads/trunk
Commit: 4d1334719e92c2dfa5327675995ce44593772177
Parents: 6bf7adc
Author: Alejandro Fernandez 
Authored: Tue Mar 10 18:28:02 2015 -0700
Committer: Alejandro Fernandez 
Committed: Wed Mar 11 19:19:33 2015 -0700

--
 ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql| 2 ++
 .../src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql | 2 +-
 2 files changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/4d133471/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
--
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql 
b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
index 034f871..78a263f 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
@@ -816,6 +816,8 @@ BEGIN;
   union all
   select 'upgrade_id_seq', 0 
   union all
+  select 'upgrade_group_id_seq', 0
+  union all
   select 'upgrade_item_id_seq', 0;
 
   INSERT INTO adminresourcetype (resource_type_id, resource_type_name)

http://git-wip-us.apache.org/repos/asf/ambari/blob/4d133471/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
--
diff --git 
a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql 
b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
index 488e268..a06f1d2 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-EMBEDDED-CREATE.sql
@@ -578,7 +578,7 @@ GRANT ALL PRIVILEGES ON TABLE ambari.adminpermission TO 
:username;
 GRANT ALL PRIVILEGES ON TABLE ambari.adminprivilege TO :username;
 
 CREATE TABLE ambari.repo_version (
-  repo_version_id BIGINT,
+  repo_version_id BIGINT NOT NULL,
   stack VARCHAR(255) NOT NULL,
   version VARCHAR(255) NOT NULL,
   display_name VARCHAR(128) NOT NULL,



ambari git commit: Revert "AMBARI-10036. Add HDP 2.3 stack to Ambari (aonishuk)"

2015-03-11 Thread mahadev
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 c7e8cbaa7 -> 9c9c3041a


Revert "AMBARI-10036. Add HDP 2.3 stack to Ambari (aonishuk)"

This reverts commit 7d74689046ce92870faeb64707d0dd80d20fdcff.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9c9c3041
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9c9c3041
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9c9c3041

Branch: refs/heads/branch-2.0.0
Commit: 9c9c3041af09b46f73eed94f4aca73bf12f50709
Parents: c7e8cba
Author: Mahadev Konar 
Authored: Wed Mar 11 18:20:39 2015 -0700
Committer: Mahadev Konar 
Committed: Wed Mar 11 18:20:39 2015 -0700

--
 .../main/resources/stacks/HDP/2.3/metainfo.xml  | 23 -
 .../resources/stacks/HDP/2.3/repos/repoinfo.xml | 68 --
 .../HDP/2.3/services/ACCUMULO/metainfo.xml  | 45 --
 .../stacks/HDP/2.3/services/FALCON/metainfo.xml | 44 -
 .../stacks/HDP/2.3/services/FLUME/metainfo.xml  | 46 --
 .../stacks/HDP/2.3/services/HBASE/metainfo.xml  | 52 ---
 .../stacks/HDP/2.3/services/HDFS/metainfo.xml   | 95 
 .../stacks/HDP/2.3/services/HIVE/metainfo.xml   | 85 --
 .../stacks/HDP/2.3/services/KAFKA/metainfo.xml  | 44 -
 .../HDP/2.3/services/KERBEROS/metainfo.xml  | 25 --
 .../stacks/HDP/2.3/services/KNOX/metainfo.xml   | 44 -
 .../stacks/HDP/2.3/services/OOZIE/metainfo.xml  | 70 ---
 .../stacks/HDP/2.3/services/PIG/metainfo.xml| 44 -
 .../stacks/HDP/2.3/services/RANGER/metainfo.xml | 54 ---
 .../stacks/HDP/2.3/services/SLIDER/metainfo.xml | 50 ---
 .../stacks/HDP/2.3/services/SPARK/metainfo.xml  | 53 ---
 .../stacks/HDP/2.3/services/SQOOP/metainfo.xml  | 52 ---
 .../stacks/HDP/2.3/services/STORM/metainfo.xml  | 45 --
 .../stacks/HDP/2.3/services/TEZ/metainfo.xml| 46 --
 .../stacks/HDP/2.3/services/YARN/metainfo.xml   | 79 
 .../HDP/2.3/services/ZOOKEEPER/metainfo.xml | 45 --
 .../stacks/HDP/2.3/services/stack_advisor.py| 21 -
 22 files changed, 1130 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/9c9c3041/ambari-server/src/main/resources/stacks/HDP/2.3/metainfo.xml
--
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.3/metainfo.xml
deleted file mode 100644
index d04b44b..000
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/metainfo.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-
-
-
-
- true
-
-2.2
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c9c3041/ambari-server/src/main/resources/stacks/HDP/2.3/repos/repoinfo.xml
--
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/repos/repoinfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.3/repos/repoinfo.xml
deleted file mode 100644
index 5b25a04..000
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/repos/repoinfo.xml
+++ /dev/null
@@ -1,68 +0,0 @@
-
-
-
-  http://public-repo-1.hortonworks.com/HDP/hdp_urlinfo.json
-  
-
-  
http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.3.0.0
-  HDP-2.3
-  HDP
-
-
-  
http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6
-  HDP-UTILS-1.1.0.20
-  HDP-UTILS
-
-  
-  
-
-  
http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.3.0.0
-  HDP-2.3
-  HDP
-
-
-  
http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos5
-  HDP-UTILS-1.1.0.20
-  HDP-UTILS
-
-  
-  
-
-  
http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11sp3/2.x/updates/2.3.0.0
-  HDP-2.3
-  HDP
-
-
-  
http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/suse11sp3
-  HDP-UTILS-1.1.0.20
-  HDP-UTILS
-
-  
-  
-
-  
http://s3.amazonaws.com/dev.hortonworks.com/HDP/ubuntu12/2.x/updates/2.3.0.0
-  HDP-2.3
-  HDP
-
-
-  
http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/ubuntu12
-  HDP-UTILS-1.1.0.20
-  HDP-UTILS
-
-  
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/9c9c3041/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
deleted file mode 100644
index 5d7bfa0..000
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/met

ambari git commit: AMBARI-9007. Identity references fail to deference for service-level references in Kerberos descriptor parser (rlevas)

2015-03-11 Thread rlevas
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 7d7468904 -> c7e8cbaa7


AMBARI-9007. Identity references fail to deference for service-level references 
in Kerberos descriptor parser (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c7e8cbaa
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c7e8cbaa
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c7e8cbaa

Branch: refs/heads/branch-2.0.0
Commit: c7e8cbaa73563c600c0bd6e8b3a0d57cfc2dd28d
Parents: 7d74689
Author: Robert Levas 
Authored: Wed Mar 11 21:14:17 2015 -0400
Committer: Robert Levas 
Committed: Wed Mar 11 21:14:17 2015 -0400

--
 .../AbstractKerberosDescriptorContainer.java| 127 +--
 .../state/kerberos/KerberosDescriptor.java  |  20 +++
 .../HIVE/0.12.0.2.0/kerberos.json   |  54 
 .../state/kerberos/KerberosDescriptorTest.java  |  89 +
 ...test_get_referenced_identity_descriptor.json | 127 +++
 5 files changed, 350 insertions(+), 67 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/c7e8cbaa/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
index 2ec2cb5..874e331 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
@@ -527,10 +527,38 @@ public abstract class AbstractKerberosDescriptorContainer 
extends AbstractKerber
* 
* The path value is expected to be an "absolute" path through the Kerberos 
Descriptor hierarchy
* to some specific KerberosIdentityDescriptor.  The path must be in one of 
the following forms:
+   * 
+   * /identity
+   * /service/identity
+   * /service/component/identity
+   * 
* 
-   * /identity
-   * /service/identity
-   * /service/component/identity
+   * If the path starts with "../", the ".." will be translated to the path of 
the parent item.
+   * In the following example, ../service_identity will resolve to
+   * /SERVICE/service_identity:
+   * 
+   * {
+   *  "name": "SERVICE",
+   *  "identities": [
+   *{
+   *  "name": "service_identity",
+   *  ...
+   *}
+   *  ],
+   *  "components" : [
+   *{
+   *  "name": "COMPONENT",
+   *  "identities": [
+   *{
+   *  "name": "./service_identity",
+   *  ...
+   *},
+   *...
+   *  ]
+   *}
+   *  ]
+   * }
+   * 
*
* @param path a String declaring the path to a KerberosIdentityDescriptor
* @return a KerberosIdentityDescriptor identified by the path or null if 
not found
@@ -539,50 +567,72 @@ public abstract class AbstractKerberosDescriptorContainer 
extends AbstractKerber
   throws AmbariException {
 KerberosIdentityDescriptor identityDescriptor = null;
 
-if ((path != null) && path.startsWith("/")) {
-  // The name indicates it is referencing an identity somewhere in the 
hierarchy... try to find it.
-  // /[/[/]]
-  String[] pathParts = path.split("/");
+if (path != null) {
+  if(path.startsWith("../")) {
+// Resolve parent path
+AbstractKerberosDescriptor parent = getParent();
 
-  String serviceName = null;
-  String componentName = null;
-  String identityName;
+path = path.substring(2);
 
-  switch (pathParts.length) {
-case 3:
-  serviceName = pathParts[0];
-  componentName = pathParts[1];
-  identityName = pathParts[2];
-  break;
-case 2:
-  serviceName = pathParts[0];
-  identityName = pathParts[1];
-  break;
-case 1:
-  identityName = pathParts[0];
-  break;
-default:
-  throw new AmbariException(String.format("Unexpected path length in 
%s", path));
+while(parent != null) {
+  String name = parent.getName();
+
+  if (name != null) {
+path = String.format("/%s", name) + path;
+  }
+
+  parent = parent.getParent();
+}
   }
 
-  if (identityName != null) {
-// Start at the top of the hierarchy
-AbstractKerberosDescriptor descriptor = getRoot();
+  if (path.startsWith("/")) {
+// The name indicates it is referencing an identity somewhere in the 
hierarchy... try to find it.
+// /[/[/]]
+String[] pathPa

ambari git commit: AMBARI-9007. Identity references fail to deference for service-level references in Kerberos descriptor parser (rlevas)

2015-03-11 Thread rlevas
Repository: ambari
Updated Branches:
  refs/heads/trunk 04f54294e -> 6bf7adc5e


AMBARI-9007. Identity references fail to deference for service-level references 
in Kerberos descriptor parser (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6bf7adc5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6bf7adc5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6bf7adc5

Branch: refs/heads/trunk
Commit: 6bf7adc5e1e9ce2ade3163d50087257d727d830b
Parents: 04f5429
Author: Robert Levas 
Authored: Wed Mar 11 20:38:28 2015 -0400
Committer: Robert Levas 
Committed: Wed Mar 11 20:38:28 2015 -0400

--
 .../AbstractKerberosDescriptorContainer.java| 127 +--
 .../state/kerberos/KerberosDescriptor.java  |  20 +++
 .../HIVE/0.12.0.2.0/kerberos.json   |  54 
 .../state/kerberos/KerberosDescriptorTest.java  |  89 +
 ...test_get_referenced_identity_descriptor.json | 127 +++
 5 files changed, 350 insertions(+), 67 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/6bf7adc5/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
index 2ec2cb5..874e331 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
@@ -527,10 +527,38 @@ public abstract class AbstractKerberosDescriptorContainer 
extends AbstractKerber
* 
* The path value is expected to be an "absolute" path through the Kerberos 
Descriptor hierarchy
* to some specific KerberosIdentityDescriptor.  The path must be in one of 
the following forms:
+   * 
+   * /identity
+   * /service/identity
+   * /service/component/identity
+   * 
* 
-   * /identity
-   * /service/identity
-   * /service/component/identity
+   * If the path starts with "../", the ".." will be translated to the path of 
the parent item.
+   * In the following example, ../service_identity will resolve to
+   * /SERVICE/service_identity:
+   * 
+   * {
+   *  "name": "SERVICE",
+   *  "identities": [
+   *{
+   *  "name": "service_identity",
+   *  ...
+   *}
+   *  ],
+   *  "components" : [
+   *{
+   *  "name": "COMPONENT",
+   *  "identities": [
+   *{
+   *  "name": "./service_identity",
+   *  ...
+   *},
+   *...
+   *  ]
+   *}
+   *  ]
+   * }
+   * 
*
* @param path a String declaring the path to a KerberosIdentityDescriptor
* @return a KerberosIdentityDescriptor identified by the path or null if 
not found
@@ -539,50 +567,72 @@ public abstract class AbstractKerberosDescriptorContainer 
extends AbstractKerber
   throws AmbariException {
 KerberosIdentityDescriptor identityDescriptor = null;
 
-if ((path != null) && path.startsWith("/")) {
-  // The name indicates it is referencing an identity somewhere in the 
hierarchy... try to find it.
-  // /[/[/]]
-  String[] pathParts = path.split("/");
+if (path != null) {
+  if(path.startsWith("../")) {
+// Resolve parent path
+AbstractKerberosDescriptor parent = getParent();
 
-  String serviceName = null;
-  String componentName = null;
-  String identityName;
+path = path.substring(2);
 
-  switch (pathParts.length) {
-case 3:
-  serviceName = pathParts[0];
-  componentName = pathParts[1];
-  identityName = pathParts[2];
-  break;
-case 2:
-  serviceName = pathParts[0];
-  identityName = pathParts[1];
-  break;
-case 1:
-  identityName = pathParts[0];
-  break;
-default:
-  throw new AmbariException(String.format("Unexpected path length in 
%s", path));
+while(parent != null) {
+  String name = parent.getName();
+
+  if (name != null) {
+path = String.format("/%s", name) + path;
+  }
+
+  parent = parent.getParent();
+}
   }
 
-  if (identityName != null) {
-// Start at the top of the hierarchy
-AbstractKerberosDescriptor descriptor = getRoot();
+  if (path.startsWith("/")) {
+// The name indicates it is referencing an identity somewhere in the 
hierarchy... try to find it.
+// /[/[/]]
+String[] pathParts = path.spl

ambari git commit: AMBARI-9505 Hive service with HDPWIN 2.2 fails to start

2015-03-11 Thread fbarca
Repository: ambari
Updated Branches:
  refs/heads/trunk da3a88f2c -> 04f54294e


AMBARI-9505 Hive service with HDPWIN 2.2 fails to start

Copying the SQL Server JDBC driver into the hadoop shared lib directory.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/04f54294
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/04f54294
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/04f54294

Branch: refs/heads/trunk
Commit: 04f54294eb182c719c2998c67b9865d3e0b3fc9c
Parents: da3a88f
Author: Florian Barca 
Authored: Wed Mar 11 16:09:45 2015 -0700
Committer: Florian Barca 
Committed: Wed Mar 11 16:09:45 2015 -0700

--
 .../libraries/functions/__init__.py |  1 +
 .../libraries/functions/install_jdbc_driver.py  | 48 
 .../2.1/hooks/after-INSTALL/scripts/hook.py | 11 -
 3 files changed, 58 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/04f54294/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
--
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
index f6db722..e55a2cf 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
@@ -43,4 +43,5 @@ IS_WINDOWS = platform.system() == "Windows"
 if IS_WINDOWS:
   from resource_management.libraries.functions.windows_service_utils import *
   from resource_management.libraries.functions.install_hdp_msi import *
+  from resource_management.libraries.functions.install_jdbc_driver import *
   from resource_management.libraries.functions.reload_windows_env import *

http://git-wip-us.apache.org/repos/asf/ambari/blob/04f54294/ambari-common/src/main/python/resource_management/libraries/functions/install_jdbc_driver.py
--
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/install_jdbc_driver.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/install_jdbc_driver.py
new file mode 100644
index 000..be77cfd
--- /dev/null
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/install_jdbc_driver.py
@@ -0,0 +1,48 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+import os
+
+from ambari_commons.inet_utils import download_file
+from ambari_commons.os_utils import copy_file, search_file
+from resource_management.core.logger import Logger
+
+
+__all__ = ["ensure_jdbc_driver_is_in_classpath"]
+
+
+def ensure_jdbc_driver_is_in_classpath(dest_dir, cache_location, driver_url, 
driver_files):
+  #Attempt to find the JDBC driver installed locally
+  #If not, attempt to download it from the server resources URL
+  for driver_file in driver_files:
+dest_path = os.path.join(dest_dir, driver_file)
+Logger.info("JDBC driver file(s) {0}: Attempting to copy from {1} or 
download from {2} to {3}".format(
+  str(driver_files), cache_location, driver_url, dest_dir))
+if not os.path.exists(dest_path):
+  search_path = os.environ["PATH"]
+  if cache_location:
+search_path += os.pathsep + cache_location  #The locally installed 
version takes precedence over the cache
+
+  local_path = search_file(driver_file, search_path)
+  if not local_path:
+download_file(driver_url + "/" + driver_file, dest_path)
+  else:
+copy_file(local_path, dest_path)

http://git-wip-us.apache.org/repos/asf/ambari/blob/04f54294/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/after-INSTALL/scripts/hook.py
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/after-INSTALL/scripts/hook.py
 
b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/hooks/

[1/2] ambari git commit: AMBARI-10036. Add HDP 2.3 stack to Ambari (aonishuk)

2015-03-11 Thread aonishuk
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 15120403f -> 7d7468904
  refs/heads/trunk 7d6ab56a6 -> da3a88f2c


AMBARI-10036. Add HDP 2.3 stack to Ambari (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/da3a88f2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/da3a88f2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/da3a88f2

Branch: refs/heads/trunk
Commit: da3a88f2c8583e5fdefdc088fd51f38a409cd1c1
Parents: 7d6ab56
Author: Andrew Onishuk 
Authored: Wed Mar 11 23:48:08 2015 +0200
Committer: Andrew Onishuk 
Committed: Wed Mar 11 23:48:08 2015 +0200

--
 .../main/resources/stacks/HDP/2.3/metainfo.xml  | 23 +
 .../resources/stacks/HDP/2.3/repos/repoinfo.xml | 68 ++
 .../HDP/2.3/services/ACCUMULO/metainfo.xml  | 45 ++
 .../stacks/HDP/2.3/services/FALCON/metainfo.xml | 44 +
 .../stacks/HDP/2.3/services/FLUME/metainfo.xml  | 46 ++
 .../stacks/HDP/2.3/services/HBASE/metainfo.xml  | 52 +++
 .../stacks/HDP/2.3/services/HDFS/metainfo.xml   | 95 
 .../stacks/HDP/2.3/services/HIVE/metainfo.xml   | 85 ++
 .../stacks/HDP/2.3/services/KAFKA/metainfo.xml  | 44 +
 .../HDP/2.3/services/KERBEROS/metainfo.xml  | 25 ++
 .../stacks/HDP/2.3/services/KNOX/metainfo.xml   | 44 +
 .../stacks/HDP/2.3/services/OOZIE/metainfo.xml  | 70 +++
 .../stacks/HDP/2.3/services/PIG/metainfo.xml| 44 +
 .../stacks/HDP/2.3/services/RANGER/metainfo.xml | 54 +++
 .../stacks/HDP/2.3/services/SLIDER/metainfo.xml | 50 +++
 .../stacks/HDP/2.3/services/SPARK/metainfo.xml  | 53 +++
 .../stacks/HDP/2.3/services/SQOOP/metainfo.xml  | 52 +++
 .../stacks/HDP/2.3/services/STORM/metainfo.xml  | 45 ++
 .../stacks/HDP/2.3/services/TEZ/metainfo.xml| 46 ++
 .../stacks/HDP/2.3/services/YARN/metainfo.xml   | 79 
 .../HDP/2.3/services/ZOOKEEPER/metainfo.xml | 45 ++
 .../stacks/HDP/2.3/services/stack_advisor.py| 21 +
 22 files changed, 1130 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/da3a88f2/ambari-server/src/main/resources/stacks/HDP/2.3/metainfo.xml
--
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.3/metainfo.xml
new file mode 100644
index 000..d04b44b
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/metainfo.xml
@@ -0,0 +1,23 @@
+
+
+
+
+ true
+
+2.2
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/da3a88f2/ambari-server/src/main/resources/stacks/HDP/2.3/repos/repoinfo.xml
--
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/repos/repoinfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.3/repos/repoinfo.xml
new file mode 100644
index 000..5b25a04
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/repos/repoinfo.xml
@@ -0,0 +1,68 @@
+
+
+
+  http://public-repo-1.hortonworks.com/HDP/hdp_urlinfo.json
+  
+
+  
http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.3.0.0
+  HDP-2.3
+  HDP
+
+
+  
http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6
+  HDP-UTILS-1.1.0.20
+  HDP-UTILS
+
+  
+  
+
+  
http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.3.0.0
+  HDP-2.3
+  HDP
+
+
+  
http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos5
+  HDP-UTILS-1.1.0.20
+  HDP-UTILS
+
+  
+  
+
+  
http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11sp3/2.x/updates/2.3.0.0
+  HDP-2.3
+  HDP
+
+
+  
http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/suse11sp3
+  HDP-UTILS-1.1.0.20
+  HDP-UTILS
+
+  
+  
+
+  
http://s3.amazonaws.com/dev.hortonworks.com/HDP/ubuntu12/2.x/updates/2.3.0.0
+  HDP-2.3
+  HDP
+
+
+  
http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/ubuntu12
+  HDP-UTILS-1.1.0.20
+  HDP-UTILS
+
+  
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/da3a88f2/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
new file mode 100644
index 000..5d7bfa0
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
@@ -0,0 +1,45 @@
+
+
+

[2/2] ambari git commit: AMBARI-10036. Add HDP 2.3 stack to Ambari (aonishuk)

2015-03-11 Thread aonishuk
AMBARI-10036. Add HDP 2.3 stack to Ambari (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7d746890
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7d746890
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7d746890

Branch: refs/heads/branch-2.0.0
Commit: 7d74689046ce92870faeb64707d0dd80d20fdcff
Parents: 1512040
Author: Andrew Onishuk 
Authored: Wed Mar 11 23:48:11 2015 +0200
Committer: Andrew Onishuk 
Committed: Wed Mar 11 23:48:11 2015 +0200

--
 .../main/resources/stacks/HDP/2.3/metainfo.xml  | 23 +
 .../resources/stacks/HDP/2.3/repos/repoinfo.xml | 68 ++
 .../HDP/2.3/services/ACCUMULO/metainfo.xml  | 45 ++
 .../stacks/HDP/2.3/services/FALCON/metainfo.xml | 44 +
 .../stacks/HDP/2.3/services/FLUME/metainfo.xml  | 46 ++
 .../stacks/HDP/2.3/services/HBASE/metainfo.xml  | 52 +++
 .../stacks/HDP/2.3/services/HDFS/metainfo.xml   | 95 
 .../stacks/HDP/2.3/services/HIVE/metainfo.xml   | 85 ++
 .../stacks/HDP/2.3/services/KAFKA/metainfo.xml  | 44 +
 .../HDP/2.3/services/KERBEROS/metainfo.xml  | 25 ++
 .../stacks/HDP/2.3/services/KNOX/metainfo.xml   | 44 +
 .../stacks/HDP/2.3/services/OOZIE/metainfo.xml  | 70 +++
 .../stacks/HDP/2.3/services/PIG/metainfo.xml| 44 +
 .../stacks/HDP/2.3/services/RANGER/metainfo.xml | 54 +++
 .../stacks/HDP/2.3/services/SLIDER/metainfo.xml | 50 +++
 .../stacks/HDP/2.3/services/SPARK/metainfo.xml  | 53 +++
 .../stacks/HDP/2.3/services/SQOOP/metainfo.xml  | 52 +++
 .../stacks/HDP/2.3/services/STORM/metainfo.xml  | 45 ++
 .../stacks/HDP/2.3/services/TEZ/metainfo.xml| 46 ++
 .../stacks/HDP/2.3/services/YARN/metainfo.xml   | 79 
 .../HDP/2.3/services/ZOOKEEPER/metainfo.xml | 45 ++
 .../stacks/HDP/2.3/services/stack_advisor.py| 21 +
 22 files changed, 1130 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/7d746890/ambari-server/src/main/resources/stacks/HDP/2.3/metainfo.xml
--
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/metainfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.3/metainfo.xml
new file mode 100644
index 000..d04b44b
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/metainfo.xml
@@ -0,0 +1,23 @@
+
+
+
+
+ true
+
+2.2
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d746890/ambari-server/src/main/resources/stacks/HDP/2.3/repos/repoinfo.xml
--
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/repos/repoinfo.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.3/repos/repoinfo.xml
new file mode 100644
index 000..5b25a04
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/repos/repoinfo.xml
@@ -0,0 +1,68 @@
+
+
+
+  http://public-repo-1.hortonworks.com/HDP/hdp_urlinfo.json
+  
+
+  
http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.3.0.0
+  HDP-2.3
+  HDP
+
+
+  
http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6
+  HDP-UTILS-1.1.0.20
+  HDP-UTILS
+
+  
+  
+
+  
http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos5/2.x/updates/2.3.0.0
+  HDP-2.3
+  HDP
+
+
+  
http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos5
+  HDP-UTILS-1.1.0.20
+  HDP-UTILS
+
+  
+  
+
+  
http://s3.amazonaws.com/dev.hortonworks.com/HDP/suse11sp3/2.x/updates/2.3.0.0
+  HDP-2.3
+  HDP
+
+
+  
http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/suse11sp3
+  HDP-UTILS-1.1.0.20
+  HDP-UTILS
+
+  
+  
+
+  
http://s3.amazonaws.com/dev.hortonworks.com/HDP/ubuntu12/2.x/updates/2.3.0.0
+  HDP-2.3
+  HDP
+
+
+  
http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/ubuntu12
+  HDP-UTILS-1.1.0.20
+  HDP-UTILS
+
+  
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/7d746890/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
new file mode 100644
index 000..5d7bfa0
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/ACCUMULO/metainfo.xml
@@ -0,0 +1,45 @@
+
+
+
+  2.0
+  
+
+  ACCUMULO
+  1.7.0.2.3
+  
+
+  redhat5,redhat6,suse11
+  
+   

ambari git commit: AMBARI-10034 Wrong mysql-connector jar is not replaced during ambari-server setup (dsen)

2015-03-11 Thread dsen
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 2d78141e0 -> 15120403f


AMBARI-10034 Wrong mysql-connector jar is not replaced during ambari-server 
setup (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/15120403
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/15120403
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/15120403

Branch: refs/heads/branch-2.0.0
Commit: 15120403f23813f88e37f6e28cd3e9b73d3307c3
Parents: 2d78141
Author: Dmytro Sen 
Authored: Wed Mar 11 21:50:00 2015 +0200
Committer: Dmytro Sen 
Committed: Wed Mar 11 21:53:01 2015 +0200

--
 .../src/main/python/ambari_server/serverSetup.py   | 17 ++---
 1 file changed, 10 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/15120403/ambari-server/src/main/python/ambari_server/serverSetup.py
--
diff --git a/ambari-server/src/main/python/ambari_server/serverSetup.py 
b/ambari-server/src/main/python/ambari_server/serverSetup.py
index 1c6dbbc..42b7d24 100644
--- a/ambari-server/src/main/python/ambari_server/serverSetup.py
+++ b/ambari-server/src/main/python/ambari_server/serverSetup.py
@@ -797,13 +797,16 @@ def _cache_jdbc_driver(args):
   if os.path.lexists(jdbc_symlink):
 os.remove(jdbc_symlink)
 
-  if not os.path.isfile(os.path.join(resources_dir, jdbc_name)):
-try:
-  shutil.copy(args.jdbc_driver, resources_dir)
-except Exception, e:
-  err = "Can not copy file {0} to {1} due to: {2} . Please check file " \
-"permissions and free disk space.".format(args.jdbc_driver, 
resources_dir, str(e))
-  raise FatalException(1, err)
+  if os.path.isfile(os.path.join(resources_dir, jdbc_name)):
+os.remove(os.path.join(resources_dir, jdbc_name))
+
+  try:
+shutil.copy(args.jdbc_driver, resources_dir)
+print "Copying {0} to {1}".format(args.jdbc_driver, resources_dir)
+  except Exception, e:
+err = "Can not copy file {0} to {1} due to: {2} . Please check file " \
+  "permissions and free disk space.".format(args.jdbc_driver, 
resources_dir, str(e))
+raise FatalException(1, err)
 
   os.symlink(os.path.join(resources_dir, jdbc_name), jdbc_symlink)
   print "JDBC driver was successfully initialized."



ambari git commit: AMBARI-10034 Wrong mysql-connector jar is not replaced during ambari-server setup (dsen)

2015-03-11 Thread dsen
Repository: ambari
Updated Branches:
  refs/heads/trunk 14ef5195c -> 7d6ab56a6


AMBARI-10034 Wrong mysql-connector jar is not replaced during ambari-server 
setup (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7d6ab56a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7d6ab56a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7d6ab56a

Branch: refs/heads/trunk
Commit: 7d6ab56a671aef6d5bea2192aebfc43bf8b2e653
Parents: 14ef519
Author: Dmytro Sen 
Authored: Wed Mar 11 20:38:16 2015 +0200
Committer: Dmytro Sen 
Committed: Wed Mar 11 21:50:00 2015 +0200

--
 .../src/main/python/ambari_server/serverSetup.py   | 17 ++---
 1 file changed, 10 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/7d6ab56a/ambari-server/src/main/python/ambari_server/serverSetup.py
--
diff --git a/ambari-server/src/main/python/ambari_server/serverSetup.py 
b/ambari-server/src/main/python/ambari_server/serverSetup.py
index 1c6dbbc..42b7d24 100644
--- a/ambari-server/src/main/python/ambari_server/serverSetup.py
+++ b/ambari-server/src/main/python/ambari_server/serverSetup.py
@@ -797,13 +797,16 @@ def _cache_jdbc_driver(args):
   if os.path.lexists(jdbc_symlink):
 os.remove(jdbc_symlink)
 
-  if not os.path.isfile(os.path.join(resources_dir, jdbc_name)):
-try:
-  shutil.copy(args.jdbc_driver, resources_dir)
-except Exception, e:
-  err = "Can not copy file {0} to {1} due to: {2} . Please check file " \
-"permissions and free disk space.".format(args.jdbc_driver, 
resources_dir, str(e))
-  raise FatalException(1, err)
+  if os.path.isfile(os.path.join(resources_dir, jdbc_name)):
+os.remove(os.path.join(resources_dir, jdbc_name))
+
+  try:
+shutil.copy(args.jdbc_driver, resources_dir)
+print "Copying {0} to {1}".format(args.jdbc_driver, resources_dir)
+  except Exception, e:
+err = "Can not copy file {0} to {1} due to: {2} . Please check file " \
+  "permissions and free disk space.".format(args.jdbc_driver, 
resources_dir, str(e))
+raise FatalException(1, err)
 
   os.symlink(os.path.join(resources_dir, jdbc_name), jdbc_symlink)
   print "JDBC driver was successfully initialized."



ambari git commit: AMBARI-9831 Stack Versions page: disable controls while call in progress. (ababiichuk)

2015-03-11 Thread ababiichuk
Repository: ambari
Updated Branches:
  refs/heads/trunk 516d718fc -> 14ef5195c


AMBARI-9831 Stack Versions page: disable controls while call in progress. 
(ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/14ef5195
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/14ef5195
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/14ef5195

Branch: refs/heads/trunk
Commit: 14ef5195c9ba643302759e0c82a7d4a210951bfd
Parents: 516d718
Author: aBabiichuk 
Authored: Wed Mar 11 18:39:41 2015 +0200
Committer: aBabiichuk 
Committed: Wed Mar 11 21:18:19 2015 +0200

--
 .../main/admin/stack_and_upgrade_controller.js| 10 --
 .../main/admin/stack_and_upgrade_controller_test.js   |  3 ++-
 2 files changed, 10 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/14ef5195/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
--
diff --git 
a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js 
b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index 73975eb..a40e69e 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -145,7 +145,6 @@ App.MainAdminStackAndUpgradeController = 
Em.Controller.extend(App.LocalStorage,
 this.loadUpgradeData(true).done(function() {
   self.loadStackVersionsToModel(true).done(function () {
 self.loadRepoVersionsToModel().done(function() {
-  self.set('requestInProgress', false);
   var currentVersion = App.StackVersion.find().findProperty('state', 
'CURRENT');
   if (currentVersion) {
 self.set('currentVersion', {
@@ -431,11 +430,13 @@ App.MainAdminStackAndUpgradeController = 
Em.Controller.extend(App.LocalStorage,
 };
 
 if (App.get('supports.preUpgradeCheck')) {
+  this.set('requestInProgress', true);
   App.ajax.send({
 name: "admin.rolling_upgrade.pre_upgrade_check",
 sender: this,
 data: params,
-success: "runPreUpgradeCheckSuccess"
+success: "runPreUpgradeCheckSuccess",
+error: "runPreUpgradeCheckError"
   });
 } else {
   this.upgrade(params);
@@ -452,6 +453,7 @@ App.MainAdminStackAndUpgradeController = 
Em.Controller.extend(App.LocalStorage,
*/
   runPreUpgradeCheckSuccess: function (data, opt, params) {
 if (data.items.someProperty('UpgradeChecks.status', "FAIL")) {
+  this.set('requestInProgress', false);
   var header = 
Em.I18n.t('popup.clusterCheck.Upgrade.header').format(params.label);
   var title = Em.I18n.t('popup.clusterCheck.Upgrade.title');
   var alert = Em.I18n.t('popup.clusterCheck.Upgrade.alert');
@@ -461,6 +463,10 @@ App.MainAdminStackAndUpgradeController = 
Em.Controller.extend(App.LocalStorage,
 }
   },
 
+  runPreUpgradeCheckError: function() {
+this.set('requestInProgress', false);
+  },
+
   /**
* confirmation popup before install repository version
*/

http://git-wip-us.apache.org/repos/asf/ambari/blob/14ef5195/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
--
diff --git 
a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js 
b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
index 2af6753..48f3514 100644
--- 
a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
+++ 
b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
@@ -245,7 +245,8 @@ describe('App.MainAdminStackAndUpgradeController', 
function() {
   value: '2.2',
   label: 'HDP-2.2'
 },
-success: "runPreUpgradeCheckSuccess"
+success: "runPreUpgradeCheckSuccess",
+error: "runPreUpgradeCheckError"
   });
 });
   });



ambari git commit: AMBARI-9831 Stack Versions page: disable controls while call in progress. (ababiichuk)

2015-03-11 Thread ababiichuk
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 f85f82cef -> 2d78141e0


AMBARI-9831 Stack Versions page: disable controls while call in progress. 
(ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2d78141e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2d78141e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2d78141e

Branch: refs/heads/branch-2.0.0
Commit: 2d78141e024de4c505c14b9cf5ca914fa10377e3
Parents: f85f82c
Author: aBabiichuk 
Authored: Wed Mar 11 18:40:01 2015 +0200
Committer: aBabiichuk 
Committed: Wed Mar 11 21:19:34 2015 +0200

--
 .../main/admin/stack_and_upgrade_controller.js| 10 --
 .../main/admin/stack_and_upgrade_controller_test.js   |  3 ++-
 2 files changed, 10 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/2d78141e/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
--
diff --git 
a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js 
b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
index 73975eb..a40e69e 100644
--- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
+++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js
@@ -145,7 +145,6 @@ App.MainAdminStackAndUpgradeController = 
Em.Controller.extend(App.LocalStorage,
 this.loadUpgradeData(true).done(function() {
   self.loadStackVersionsToModel(true).done(function () {
 self.loadRepoVersionsToModel().done(function() {
-  self.set('requestInProgress', false);
   var currentVersion = App.StackVersion.find().findProperty('state', 
'CURRENT');
   if (currentVersion) {
 self.set('currentVersion', {
@@ -431,11 +430,13 @@ App.MainAdminStackAndUpgradeController = 
Em.Controller.extend(App.LocalStorage,
 };
 
 if (App.get('supports.preUpgradeCheck')) {
+  this.set('requestInProgress', true);
   App.ajax.send({
 name: "admin.rolling_upgrade.pre_upgrade_check",
 sender: this,
 data: params,
-success: "runPreUpgradeCheckSuccess"
+success: "runPreUpgradeCheckSuccess",
+error: "runPreUpgradeCheckError"
   });
 } else {
   this.upgrade(params);
@@ -452,6 +453,7 @@ App.MainAdminStackAndUpgradeController = 
Em.Controller.extend(App.LocalStorage,
*/
   runPreUpgradeCheckSuccess: function (data, opt, params) {
 if (data.items.someProperty('UpgradeChecks.status', "FAIL")) {
+  this.set('requestInProgress', false);
   var header = 
Em.I18n.t('popup.clusterCheck.Upgrade.header').format(params.label);
   var title = Em.I18n.t('popup.clusterCheck.Upgrade.title');
   var alert = Em.I18n.t('popup.clusterCheck.Upgrade.alert');
@@ -461,6 +463,10 @@ App.MainAdminStackAndUpgradeController = 
Em.Controller.extend(App.LocalStorage,
 }
   },
 
+  runPreUpgradeCheckError: function() {
+this.set('requestInProgress', false);
+  },
+
   /**
* confirmation popup before install repository version
*/

http://git-wip-us.apache.org/repos/asf/ambari/blob/2d78141e/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
--
diff --git 
a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js 
b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
index 2af6753..48f3514 100644
--- 
a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
+++ 
b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js
@@ -245,7 +245,8 @@ describe('App.MainAdminStackAndUpgradeController', 
function() {
   value: '2.2',
   label: 'HDP-2.2'
 },
-success: "runPreUpgradeCheckSuccess"
+success: "runPreUpgradeCheckSuccess",
+error: "runPreUpgradeCheckError"
   });
 });
   });



ambari git commit: AMBARI-10021 - Python Does Not Close Alert TCP Connections Reliably

2015-03-11 Thread jonathanhurley
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 42b9baadd -> f85f82cef


AMBARI-10021 - Python Does Not Close Alert TCP Connections Reliably


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f85f82ce
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f85f82ce
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f85f82ce

Branch: refs/heads/branch-2.0.0
Commit: f85f82ceffae2dfaa38d0079289592d2f9a1a8ba
Parents: 42b9baa
Author: Jonathan Hurley 
Authored: Wed Mar 11 09:47:08 2015 -0400
Committer: Jonathan Hurley 
Committed: Wed Mar 11 14:42:53 2015 -0400

--
 .../python/ambari_agent/alerts/metric_alert.py  |  17 ++-
 .../python/ambari_agent/alerts/web_alert.py | 116 ---
 .../AMBARI_METRICS/0.1.0/alerts.json|   3 -
 .../FALCON/0.5.0.2.1/alerts.json|   2 +-
 .../HBASE/0.96.0.2.0/alerts.json|   6 -
 .../common-services/HDFS/2.1.0.2.0/alerts.json  |   8 +-
 .../package/alerts/alert_checkpoint_time.py |  20 +++-
 .../package/alerts/alert_ha_namenode_health.py  |  20 +++-
 .../package/alerts/alert_hive_metastore.py  |   3 -
 .../package/alerts/alert_hive_thrift_port.py|   3 -
 .../package/alerts/alert_webhcat_server.py  |   9 +-
 .../common-services/OOZIE/4.0.0.2.0/alerts.json |   2 +-
 .../common-services/STORM/0.9.1.2.1/alerts.json |   2 +-
 .../common-services/YARN/2.1.0.2.0/alerts.json  |   8 +-
 .../package/alerts/alert_nodemanager_health.py  |  25 +++-
 .../alerts/alert_nodemanagers_summary.py|  27 +++--
 .../BIGTOP/0.8/services/HBASE/alerts.json   |   3 -
 .../stacks/BIGTOP/0.8/services/HDFS/alerts.json |   8 +-
 .../HDFS/package/files/alert_checkpoint_time.py |  20 +++-
 .../package/files/alert_ha_namenode_health.py   |  20 +++-
 .../package/files/alert_hive_thrift_port.py |   5 +-
 .../package/files/alert_webhcat_server.py   |  11 +-
 .../stacks/BIGTOP/0.8/services/YARN/alerts.json |   8 +-
 .../package/files/alert_nodemanager_health.py   |  30 -
 24 files changed, 250 insertions(+), 126 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/f85f82ce/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py
--
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py 
b/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py
index 83dc54d..8b5f15d 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py
@@ -154,10 +154,19 @@ class MetricAlert(BaseAlert):
 
   # use a customer header processor that will look for the non-standard
   # "Refresh" header and attempt to follow the redirect
-  url_opener = urllib2.build_opener(RefreshHeaderProcessor())
-  response = url_opener.open(url)
-
-  content = response.read()
+  response = None
+  try:
+url_opener = urllib2.build_opener(RefreshHeaderProcessor())
+response = url_opener.open(url)
+content = response.read()
+  finally:
+# explicitely close the connection as we've seen python hold onto these
+if response is not None:
+  try:
+response.close()
+  except:
+logger.debug("[Alert][{0}] Unable to close JMX URL connection to 
{1}".format
+  (self.get_name(), url))
 
   json_response = json.loads(content)
   json_data = json_response['beans'][0]

http://git-wip-us.apache.org/repos/asf/ambari/blob/f85f82ce/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
--
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py 
b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
index 8252781..2db4996 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
@@ -22,7 +22,9 @@ import logging
 import time
 import subprocess
 import os
+import urllib2
 import uuid
+
 from  tempfile import gettempdir
 from alerts.base_alert import BaseAlert
 from collections import namedtuple
@@ -45,10 +47,13 @@ except ImportError:
 
 logger = logging.getLogger()
 
-CURL_CONNECTION_TIMEOUT = '20'
+CONNECTION_TIMEOUT = 10.0
+CURL_CONNECTION_TIMEOUT = "10"
+
+WebResponse = namedtuple('WebResponse', 'status_code time_millis error_msg')
 
 class WebAlert(BaseAlert):
-  
+
   def __init__(self, alert_meta, alert_source_meta, config):
 super(WebAlert, self).__init__(alert_meta, alert_source_meta)
 
@@ -72,6 +77,10 @@ class WebAlert(BaseAlert):
   self.get_name(), alert_uri.uri, str(alert_uri.is_ssl_enabled)))
 
 url = self._build_web_

ambari git commit: AMBARI-10021 - Python Does Not Close Alert TCP Connections Reliably

2015-03-11 Thread jonathanhurley
Repository: ambari
Updated Branches:
  refs/heads/trunk f81da87c0 -> 516d718fc


AMBARI-10021 - Python Does Not Close Alert TCP Connections Reliably


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/516d718f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/516d718f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/516d718f

Branch: refs/heads/trunk
Commit: 516d718fc96625a146a9e276c65a8fd9990a5976
Parents: f81da87
Author: Jonathan Hurley 
Authored: Wed Mar 11 09:47:08 2015 -0400
Committer: Jonathan Hurley 
Committed: Wed Mar 11 14:15:03 2015 -0400

--
 .../python/ambari_agent/alerts/metric_alert.py  |  17 ++-
 .../python/ambari_agent/alerts/web_alert.py | 116 ---
 .../AMBARI_METRICS/0.1.0/alerts.json|   3 -
 .../FALCON/0.5.0.2.1/alerts.json|   2 +-
 .../HBASE/0.96.0.2.0/alerts.json|   6 -
 .../common-services/HDFS/2.1.0.2.0/alerts.json  |   8 +-
 .../package/alerts/alert_checkpoint_time.py |  20 +++-
 .../package/alerts/alert_ha_namenode_health.py  |  20 +++-
 .../package/alerts/alert_hive_metastore.py  |   3 -
 .../package/alerts/alert_hive_thrift_port.py|   3 -
 .../package/alerts/alert_webhcat_server.py  |   9 +-
 .../common-services/OOZIE/4.0.0.2.0/alerts.json |   2 +-
 .../common-services/STORM/0.9.1.2.1/alerts.json |   2 +-
 .../common-services/YARN/2.1.0.2.0/alerts.json  |   8 +-
 .../package/alerts/alert_nodemanager_health.py  |  25 +++-
 .../alerts/alert_nodemanagers_summary.py|  27 +++--
 .../BIGTOP/0.8/services/HBASE/alerts.json   |   3 -
 .../stacks/BIGTOP/0.8/services/HDFS/alerts.json |   8 +-
 .../HDFS/package/files/alert_checkpoint_time.py |  20 +++-
 .../package/files/alert_ha_namenode_health.py   |  20 +++-
 .../package/files/alert_hive_thrift_port.py |   5 +-
 .../package/files/alert_webhcat_server.py   |  11 +-
 .../stacks/BIGTOP/0.8/services/YARN/alerts.json |   8 +-
 .../package/files/alert_nodemanager_health.py   |  30 -
 24 files changed, 250 insertions(+), 126 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/516d718f/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py
--
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py 
b/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py
index 83dc54d..8b5f15d 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/metric_alert.py
@@ -154,10 +154,19 @@ class MetricAlert(BaseAlert):
 
   # use a customer header processor that will look for the non-standard
   # "Refresh" header and attempt to follow the redirect
-  url_opener = urllib2.build_opener(RefreshHeaderProcessor())
-  response = url_opener.open(url)
-
-  content = response.read()
+  response = None
+  try:
+url_opener = urllib2.build_opener(RefreshHeaderProcessor())
+response = url_opener.open(url)
+content = response.read()
+  finally:
+# explicitely close the connection as we've seen python hold onto these
+if response is not None:
+  try:
+response.close()
+  except:
+logger.debug("[Alert][{0}] Unable to close JMX URL connection to 
{1}".format
+  (self.get_name(), url))
 
   json_response = json.loads(content)
   json_data = json_response['beans'][0]

http://git-wip-us.apache.org/repos/asf/ambari/blob/516d718f/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
--
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py 
b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
index 8252781..2db4996 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
@@ -22,7 +22,9 @@ import logging
 import time
 import subprocess
 import os
+import urllib2
 import uuid
+
 from  tempfile import gettempdir
 from alerts.base_alert import BaseAlert
 from collections import namedtuple
@@ -45,10 +47,13 @@ except ImportError:
 
 logger = logging.getLogger()
 
-CURL_CONNECTION_TIMEOUT = '20'
+CONNECTION_TIMEOUT = 10.0
+CURL_CONNECTION_TIMEOUT = "10"
+
+WebResponse = namedtuple('WebResponse', 'status_code time_millis error_msg')
 
 class WebAlert(BaseAlert):
-  
+
   def __init__(self, alert_meta, alert_source_meta, config):
 super(WebAlert, self).__init__(alert_meta, alert_source_meta)
 
@@ -72,6 +77,10 @@ class WebAlert(BaseAlert):
   self.get_name(), alert_uri.uri, str(alert_uri.is_ssl_enabled)))
 
 url = self._build_web_query(alert_ur

[2/2] ambari git commit: AMBARI-10032. HostCleanup.py failed to start on Ubuntu (aonishuk)

2015-03-11 Thread aonishuk
AMBARI-10032. HostCleanup.py failed to start on Ubuntu (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/42b9baad
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/42b9baad
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/42b9baad

Branch: refs/heads/branch-2.0.0
Commit: 42b9baadd2015e5e836939a022e18b09cb2c55d6
Parents: d481b4d
Author: Andrew Onishuk 
Authored: Wed Mar 11 20:03:25 2015 +0200
Committer: Andrew Onishuk 
Committed: Wed Mar 11 20:03:25 2015 +0200

--
 ambari-agent/src/main/python/ambari_agent/HostCleanup.py | 8 
 1 file changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/42b9baad/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
--
diff --git a/ambari-agent/src/main/python/ambari_agent/HostCleanup.py 
b/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
index ca9bc41..ca2bcf4 100644
--- a/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
+++ b/ambari-agent/src/main/python/ambari_agent/HostCleanup.py
@@ -17,12 +17,12 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 
express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 '''
-import sys
-from ambari_commons.os_family_impl import OsFamilyImpl, OsFamilyFuncImpl
-
 # For compatibility with different OSes
 # Edit PYTHONPATH to be able to import common_functions
+import sys
 sys.path.append("/usr/lib/python2.6/site-packages/")
+
+
 import os
 import string
 import subprocess
@@ -37,7 +37,7 @@ import datetime
 from AmbariConfig import AmbariConfig
 from ambari_commons import OSCheck, OSConst
 from ambari_commons.constants import AMBARI_SUDO_BINARY
-
+from ambari_commons.os_family_impl import OsFamilyImpl, OsFamilyFuncImpl
 
 logger = logging.getLogger()
 



[1/2] ambari git commit: AMBARI-10031. Ambari-agent died under SLES (and could not even restart automatically) (aonishuk)

2015-03-11 Thread aonishuk
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 41ca3cc68 -> d481b4d6f
  refs/heads/trunk c12bce3ac -> 1b22d34e5


AMBARI-10031. Ambari-agent died under SLES (and could not even restart 
automatically) (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1b22d34e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1b22d34e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1b22d34e

Branch: refs/heads/trunk
Commit: 1b22d34e51375e265fc125fda6b587438e02d185
Parents: c12bce3
Author: Andrew Onishuk 
Authored: Wed Mar 11 20:01:37 2015 +0200
Committer: Andrew Onishuk 
Committed: Wed Mar 11 20:01:37 2015 +0200

--
 .../resource_management/TestGroupResource.py| 10 -
 .../resource_management/TestUserResource.py | 22 ++--
 .../python/resource_management/core/shell.py|  2 +-
 3 files changed, 17 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/1b22d34e/ambari-agent/src/test/python/resource_management/TestGroupResource.py
--
diff --git 
a/ambari-agent/src/test/python/resource_management/TestGroupResource.py 
b/ambari-agent/src/test/python/resource_management/TestGroupResource.py
index 597a6ee..d0ca261 100644
--- a/ambari-agent/src/test/python/resource_management/TestGroupResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestGroupResource.py
@@ -51,7 +51,7 @@ class TestGroupResource(TestCase):
 
 
 self.assertEqual(popen_mock.call_count, 1)
-popen_mock.assert_called_with(['/bin/bash', '--login', '--noprofile', 
'-c', "ambari-sudo.sh  PATH=/bin -H -E groupadd -p secure hadoop"], 
shell=False, preexec_fn=None, stderr=-2, stdout=5, bufsize=1, env={'PATH': 
'/bin'}, cwd=None)
+popen_mock.assert_called_with(['/bin/bash', '--login', '--noprofile', 
'-c', "ambari-sudo.sh  PATH=/bin -H -E groupadd -p secure hadoop"], 
shell=False, preexec_fn=None, stderr=-2, stdout=5, bufsize=1, env={'PATH': 
'/bin'}, cwd=None, close_fds=True)
 getgrnam_mock.assert_called_with('hadoop')
 
 
@@ -73,7 +73,7 @@ class TestGroupResource(TestCase):
 
 
 self.assertEqual(popen_mock.call_count, 1)
-popen_mock.assert_called_with(['/bin/bash', '--login', '--noprofile', 
'-c', "ambari-sudo.sh  PATH=/bin -H -E groupmod -p secure -g 2 mapred"], 
shell=False, preexec_fn=None, stderr=-2, stdout=5, bufsize=1, env={'PATH': 
'/bin'}, cwd=None)
+popen_mock.assert_called_with(['/bin/bash', '--login', '--noprofile', 
'-c', "ambari-sudo.sh  PATH=/bin -H -E groupmod -p secure -g 2 mapred"], 
shell=False, preexec_fn=None, stderr=-2, stdout=5, bufsize=1, env={'PATH': 
'/bin'}, cwd=None, close_fds=True)
 getgrnam_mock.assert_called_with('mapred')
 
 
@@ -98,7 +98,7 @@ class TestGroupResource(TestCase):
 except Fail:
   pass
 self.assertEqual(popen_mock.call_count, 1)
-popen_mock.assert_called_with(['/bin/bash', '--login', '--noprofile', 
'-c', "ambari-sudo.sh  PATH=/bin -H -E groupmod -p secure -g 2 mapred"], 
shell=False, preexec_fn=None, stderr=-2, stdout=5, bufsize=1, env={'PATH': 
'/bin'}, cwd=None)
+popen_mock.assert_called_with(['/bin/bash', '--login', '--noprofile', 
'-c', "ambari-sudo.sh  PATH=/bin -H -E groupmod -p secure -g 2 mapred"], 
shell=False, preexec_fn=None, stderr=-2, stdout=5, bufsize=1, env={'PATH': 
'/bin'}, cwd=None, close_fds=True)
 getgrnam_mock.assert_called_with('mapred')
 
 
@@ -119,7 +119,7 @@ class TestGroupResource(TestCase):
 
 
 self.assertEqual(popen_mock.call_count, 1)
-popen_mock.assert_called_with(['/bin/bash', '--login', '--noprofile', 
'-c', 'groupdel mapred'], shell=False, preexec_fn=None, stderr=-2, stdout=5, 
bufsize=1, env={'PATH': '/bin'}, cwd=None)
+popen_mock.assert_called_with(['/bin/bash', '--login', '--noprofile', 
'-c', 'groupdel mapred'], shell=False, preexec_fn=None, stderr=-2, stdout=5, 
bufsize=1, env={'PATH': '/bin'}, cwd=None, close_fds=True)
 getgrnam_mock.assert_called_with('mapred')
 
 
@@ -144,5 +144,5 @@ class TestGroupResource(TestCase):
   pass
 
 self.assertEqual(popen_mock.call_count, 1)
-popen_mock.assert_called_with(['/bin/bash', '--login', '--noprofile', 
'-c', 'groupdel mapred'], shell=False, preexec_fn=None, stderr=-2, stdout=5, 
bufsize=1, env={'PATH': '/bin'}, cwd=None)
+popen_mock.assert_called_with(['/bin/bash', '--login', '--noprofile', 
'-c', 'groupdel mapred'], shell=False, preexec_fn=None, stderr=-2, stdout=5, 
bufsize=1, env={'PATH': '/bin'}, cwd=None, close_fds=True)
 getgrnam_mock.assert_called_with('mapred')

http://git-wip-us.apache.org/repos/asf/ambari/blob/1b22d34e/ambari-agent/src/test/python/resource_management/TestUserResource.py
-

[1/2] ambari git commit: AMBARI-10030. SNameNode start fails on CentOS5 (aonishuk)

2015-03-11 Thread aonishuk
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 fa1f08f7a -> 8f85e8c60
  refs/heads/trunk 928545392 -> c12bce3ac


AMBARI-10030. SNameNode start fails on CentOS5 (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c12bce3a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c12bce3a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c12bce3a

Branch: refs/heads/trunk
Commit: c12bce3acce61d226a58a3aca482580e52fb9376
Parents: 9285453
Author: Andrew Onishuk 
Authored: Wed Mar 11 19:27:32 2015 +0200
Committer: Andrew Onishuk 
Committed: Wed Mar 11 19:27:32 2015 +0200

--
 .../python/resource_management/core/sudo.py | 22 +---
 1 file changed, 15 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/c12bce3a/ambari-common/src/main/python/resource_management/core/sudo.py
--
diff --git a/ambari-common/src/main/python/resource_management/core/sudo.py 
b/ambari-common/src/main/python/resource_management/core/sudo.py
index 938e95b..13c32a1 100644
--- a/ambari-common/src/main/python/resource_management/core/sudo.py
+++ b/ambari-common/src/main/python/resource_management/core/sudo.py
@@ -22,6 +22,7 @@ Ambari Agent
 import os
 import tempfile
 from resource_management.core import shell
+from resource_management.core.logger import Logger
 
 # os.chown replacement
 def chown(path, owner, group):
@@ -104,12 +105,19 @@ def path_lexists(path):
 # os.stat
 def stat(path):
   class Stat:
+RETRY_COUNT = 5
 def __init__(self, path):
-  # TODO: check this on Ubuntu
-  out = shell.checked_call(["stat", "-c", "%u %g %a", path], sudo=True)[1]
-  uid_str, gid_str, mode_str = out.split(' ')
-  self.st_uid = int(uid_str)
-  self.st_gid = int(gid_str)
-  self.st_mode = int(mode_str, 8)
-  
+  # Sometimes (on heavy load) stat call returns an empty output with zero 
return code
+  for i in range(0, self.RETRY_COUNT):
+out = shell.checked_call(["stat", "-c", "%u %g %a", path], 
sudo=True)[1]
+values = out.split(' ')
+if len(values) == 3:
+  uid_str, gid_str, mode_str = values
+  self.st_uid, self.st_gid, self.st_mode = int(uid_str), int(gid_str), 
int(mode_str, 8)
+  break
+  else:
+warning_message = "Can not parse a sudo stat call output: 
\"{0}\"".format(out)
+Logger.warning(warning_message)
+stat_val = os.stat(path)
+self.st_uid, self.st_gid, self.st_mode = stat_val.st_uid, 
stat_val.st_gid, stat_val.st_mode & 0
   return Stat(path)



[2/2] ambari git commit: AMBARI-10030. SNameNode start fails on CentOS5 (aonishuk)

2015-03-11 Thread aonishuk
AMBARI-10030. SNameNode start fails on CentOS5 (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8f85e8c6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8f85e8c6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8f85e8c6

Branch: refs/heads/branch-2.0.0
Commit: 8f85e8c609347484fe2c490a750e80eec1db9825
Parents: fa1f08f
Author: Andrew Onishuk 
Authored: Wed Mar 11 19:27:34 2015 +0200
Committer: Andrew Onishuk 
Committed: Wed Mar 11 19:27:34 2015 +0200

--
 .../python/resource_management/core/sudo.py | 22 +---
 1 file changed, 15 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/8f85e8c6/ambari-common/src/main/python/resource_management/core/sudo.py
--
diff --git a/ambari-common/src/main/python/resource_management/core/sudo.py 
b/ambari-common/src/main/python/resource_management/core/sudo.py
index 938e95b..13c32a1 100644
--- a/ambari-common/src/main/python/resource_management/core/sudo.py
+++ b/ambari-common/src/main/python/resource_management/core/sudo.py
@@ -22,6 +22,7 @@ Ambari Agent
 import os
 import tempfile
 from resource_management.core import shell
+from resource_management.core.logger import Logger
 
 # os.chown replacement
 def chown(path, owner, group):
@@ -104,12 +105,19 @@ def path_lexists(path):
 # os.stat
 def stat(path):
   class Stat:
+RETRY_COUNT = 5
 def __init__(self, path):
-  # TODO: check this on Ubuntu
-  out = shell.checked_call(["stat", "-c", "%u %g %a", path], sudo=True)[1]
-  uid_str, gid_str, mode_str = out.split(' ')
-  self.st_uid = int(uid_str)
-  self.st_gid = int(gid_str)
-  self.st_mode = int(mode_str, 8)
-  
+  # Sometimes (on heavy load) stat call returns an empty output with zero 
return code
+  for i in range(0, self.RETRY_COUNT):
+out = shell.checked_call(["stat", "-c", "%u %g %a", path], 
sudo=True)[1]
+values = out.split(' ')
+if len(values) == 3:
+  uid_str, gid_str, mode_str = values
+  self.st_uid, self.st_gid, self.st_mode = int(uid_str), int(gid_str), 
int(mode_str, 8)
+  break
+  else:
+warning_message = "Can not parse a sudo stat call output: 
\"{0}\"".format(out)
+Logger.warning(warning_message)
+stat_val = os.stat(path)
+self.st_uid, self.st_gid, self.st_mode = stat_val.st_uid, 
stat_val.st_gid, stat_val.st_mode & 0
   return Stat(path)



ambari git commit: AMBARI-9990. CopyFromLocal failed to copy Tez tarball to HDFS failed because multiple processes tried to copy to the same destination simultaneously (alejandro)

2015-03-11 Thread alejandro
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 da5dee4c7 -> fa1f08f7a


AMBARI-9990. CopyFromLocal failed to copy Tez tarball to HDFS failed because 
multiple processes tried to copy to the same destination simultaneously 
(alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fa1f08f7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fa1f08f7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fa1f08f7

Branch: refs/heads/branch-2.0.0
Commit: fa1f08f7a96820bcdf1be24fa9282491965962cc
Parents: da5dee4
Author: Alejandro Fernandez 
Authored: Wed Mar 11 10:14:51 2015 -0700
Committer: Alejandro Fernandez 
Committed: Wed Mar 11 10:14:51 2015 -0700

--
 .../functions/dynamic_variable_interpretation.py | 19 +++
 1 file changed, 19 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/fa1f08f7/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
--
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
index 00b8d70..1e70219 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
@@ -23,6 +23,7 @@ import os
 import glob
 import re
 import tempfile
+import uuid
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.resources.copy_from_local import 
CopyFromLocal
@@ -101,17 +102,35 @@ def _copy_files(source_and_dest_pairs, component_user, 
file_owner, group_owner,
  mode=0555
 )
 
+# Because CopyFromLocal does not guarantee synchronization, it's 
possible for two processes to first attempt to
+# copy the file to a temporary location, then process 2 fails because 
the temporary file was already created by
+# process 1, so process 2 tries to clean up by deleting the temporary 
file, and then process 1
+# cannot finish the copy to the final destination, and both fail!
+# For this reason, the file name on the destination must be unique, 
and we then rename it to the intended value.
+# The rename operation is synchronized by the Namenode.
+orig_dest_file_name = os.path.split(destination)[1]
+unique_string = str(uuid.uuid4())[:8]
+new_dest_file_name = orig_dest_file_name + "." + unique_string
+new_destination = os.path.join(destination_dir, new_dest_file_name)
 CopyFromLocal(source,
   mode=0444,
   owner=file_owner,
   group=group_owner,
   user=params.hdfs_user,   # this will be the 
user to run the commands as
   dest_dir=destination_dir,
+  dest_file=new_dest_file_name,
   kinnit_if_needed=kinit_if_needed,
   hdfs_user=params.hdfs_user,
   hadoop_bin_dir=params.hadoop_bin_dir,
   hadoop_conf_dir=params.hadoop_conf_dir
 )
+
+mv_command = format("fs -mv {new_destination} {destination}")
+ExecuteHadoop(mv_command,
+  user=params.hdfs_user,
+  bin_dir=params.hadoop_bin_dir,
+  conf_dir=params.hadoop_conf_dir
+)
   except Exception, e:
 Logger.error("Failed to copy file. Source: %s, Destination: %s. Error: 
%s" % (source, destination, e.message))
 return_value = 1



ambari git commit: AMBARI-10024. UI issue on Customize Services page under SLES. (onechiporenko)

2015-03-11 Thread onechiporenko
Repository: ambari
Updated Branches:
  refs/heads/trunk 1c78b2015 -> 928545392


AMBARI-10024. UI issue on Customize Services page under SLES. (onechiporenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/92854539
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/92854539
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/92854539

Branch: refs/heads/trunk
Commit: 928545392aaad89fc82a22c2b38f11429b571647
Parents: 1c78b20
Author: Oleg Nechiporenko 
Authored: Wed Mar 11 15:14:24 2015 +0200
Committer: Oleg Nechiporenko 
Committed: Wed Mar 11 19:01:05 2015 +0200

--
 .../app/controllers/wizard/step7_controller.js  |  2 +
 .../test/controllers/wizard/step7_test.js   | 47 
 2 files changed, 49 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/92854539/ambari-web/app/controllers/wizard/step7_controller.js
--
diff --git a/ambari-web/app/controllers/wizard/step7_controller.js 
b/ambari-web/app/controllers/wizard/step7_controller.js
index 46f3801..ba3a4bd 100644
--- a/ambari-web/app/controllers/wizard/step7_controller.js
+++ b/ambari-web/app/controllers/wizard/step7_controller.js
@@ -1212,6 +1212,7 @@ App.WizardStep7Controller = 
Em.Controller.extend(App.ServerValidatorMixin, {
   },
 
   showDatabaseConnectionWarningPopup: function (serviceNames, deferred) {
+var self = this;
 return App.ModalPopup.show({
   header: Em.I18n.t('installer.step7.popup.database.connection.header'),
   body: 
Em.I18n.t('installer.step7.popup.database.connection.body').format(serviceNames.join(',
 ')),
@@ -1222,6 +1223,7 @@ App.WizardStep7Controller = 
Em.Controller.extend(App.ServerValidatorMixin, {
 this._super();
   },
   onSecondary: function () {
+self.set('submitButtonClicked', false);
 deferred.reject();
 this._super();
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/92854539/ambari-web/test/controllers/wizard/step7_test.js
--
diff --git a/ambari-web/test/controllers/wizard/step7_test.js 
b/ambari-web/test/controllers/wizard/step7_test.js
index 181fd46..bc5ba73 100644
--- a/ambari-web/test/controllers/wizard/step7_test.js
+++ b/ambari-web/test/controllers/wizard/step7_test.js
@@ -1413,4 +1413,51 @@ describe('App.InstallerStep7Controller', function () {
 
   });
 
+  describe('#showDatabaseConnectionWarningPopup', function () {
+
+var cases = [
+{
+  method: 'onSecondary',
+  submitButtonClicked: false,
+  isRejected: true,
+  title: 'Cancel button clicked'
+},
+{
+  method: 'onPrimary',
+  submitButtonClicked: true,
+  isResolved: true,
+  title: 'Proceed Anyway button clicked'
+}
+  ],
+  dfd,
+  testObject,
+  serviceNames = ['HIVE', 'OOZIE'],
+  bodyMessage = 'HIVE, OOZIE';
+
+beforeEach(function () {
+  installerStep7Controller.set('submitButtonClicked', true);
+  dfd = $.Deferred(function (d) {
+d.done(function () {
+  testObject.isResolved = true;
+});
+d.fail(function () {
+  testObject.isRejected = true;
+})
+  });
+  testObject = {};
+});
+
+cases.forEach(function (item) {
+  it(item.title, function () {
+var popup = 
installerStep7Controller.showDatabaseConnectionWarningPopup(serviceNames, dfd);
+
expect(popup.get('body')).to.equal(Em.I18n.t('installer.step7.popup.database.connection.body').format(bodyMessage));
+popup[item.method]();
+expect(testObject.isResolved).to.equal(item.isResolved);
+expect(testObject.isRejected).to.equal(item.isRejected);
+
expect(installerStep7Controller.get('submitButtonClicked')).to.equal(item.submitButtonClicked);
+  });
+});
+
+  });
+
 });



ambari git commit: AMBARI-10016. Kerberos: Run ambari-server using non-root causes issues with AD velocity engine (rlevas)

2015-03-11 Thread rlevas
Repository: ambari
Updated Branches:
  refs/heads/trunk d84898e59 -> 1c78b2015


AMBARI-10016. Kerberos: Run ambari-server using non-root causes issues with AD 
velocity engine (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1c78b201
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1c78b201
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1c78b201

Branch: refs/heads/trunk
Commit: 1c78b20151f12d2714da2e033db672fe5f080ced
Parents: d84898e
Author: Robert Levas 
Authored: Wed Mar 11 12:43:10 2015 -0400
Committer: Robert Levas 
Committed: Wed Mar 11 12:43:25 2015 -0400

--
 .../kerberos/ADKerberosOperationHandler.java   | 17 ++---
 1 file changed, 2 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/1c78b201/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
index faa813c..38a7563 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/kerberos/ADKerberosOperationHandler.java
@@ -25,7 +25,7 @@ import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.velocity.VelocityContext;
-import org.apache.velocity.app.VelocityEngine;
+import org.apache.velocity.app.Velocity;
 import org.apache.velocity.exception.MethodInvocationException;
 import org.apache.velocity.exception.ParseErrorException;
 import org.apache.velocity.exception.ResourceNotFoundException;
@@ -115,12 +115,6 @@ public class ADKerberosOperationHandler extends 
KerberosOperationHandler {
   private SearchControls searchControls = null;
 
   /**
-   * VelocityEngine used to process the "create principal template" that is 
expected to generate
-   * a JSON structure declaring the attributes of the Active Directory account
-   */
-  private VelocityEngine velocityEngine = null;
-
-  /**
* The Gson instance to use to convert the template-generated JSON structure 
to a Map of attribute
* names to values.
*/
@@ -183,9 +177,6 @@ public class ADKerberosOperationHandler extends 
KerberosOperationHandler {
 
 this.createTemplate = 
kerberosConfiguration.get(KERBEROS_ENV_CREATE_ATTRIBUTES_TEMPLATE);
 
-this.velocityEngine = new VelocityEngine();
-this.velocityEngine.init();
-
 this.gson = new Gson();
 
 setOpen(true);
@@ -199,7 +190,6 @@ public class ADKerberosOperationHandler extends 
KerberosOperationHandler {
   @Override
   public void close() throws KerberosOperationException {
 this.searchControls = null;
-this.velocityEngine = null;
 
 this.gson = null;
 
@@ -515,9 +505,6 @@ public class ADKerberosOperationHandler extends 
KerberosOperationHandler {
   protected Map processCreateTemplate(Map 
context)
   throws KerberosOperationException {
 
-if (velocityEngine == null) {
-  throw new KerberosOperationException("The Velocity Engine must not be 
null");
-}
 if (gson == null) {
   throw new KerberosOperationException("The JSON parser must not be null");
 }
@@ -543,7 +530,7 @@ public class ADKerberosOperationHandler extends 
KerberosOperationHandler {
 }
 
 try {
-  if (velocityEngine.evaluate(new VelocityContext(context), stringWriter, 
"Active Directory principal create template", template)) {
+  if (Velocity.evaluate(new VelocityContext(context), stringWriter, 
"Active Directory principal create template", template)) {
 String json = stringWriter.toString();
 Type type = new TypeToken>() {
 }.getType();



ambari git commit: AMBARI-10020 - Ambari Server sync-ldap not pulling in group membership (tbeerbower)

2015-03-11 Thread tbeerbower
Repository: ambari
Updated Branches:
  refs/heads/trunk a4dbcf321 -> 91cad


AMBARI-10020 - Ambari Server sync-ldap not pulling in group membership 
(tbeerbower)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/91ca
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/91ca
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/91ca

Branch: refs/heads/trunk
Commit: 91cad8862029882abf806e32dab051167d9c
Parents: a4dbcf3
Author: tbeerbower 
Authored: Wed Mar 11 10:19:49 2015 -0400
Committer: tbeerbower 
Committed: Wed Mar 11 10:19:49 2015 -0400

--
 .../security/ldap/AmbariLdapDataPopulator.java  | 71 ++--
 .../ldap/AmbariLdapDataPopulatorTest.java   | 34 +-
 2 files changed, 50 insertions(+), 55 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/91ca/ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java
index d4d3916..d1293cb 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulator.java
@@ -349,10 +349,11 @@ public class AmbariLdapDataPopulator {
* @param internalUsers map of internal users
* @throws AmbariException if group refresh failed
*/
-  protected void refreshGroupMembers(LdapBatchDto batchInfo, LdapGroupDto 
group, Map internalUsers) throws AmbariException {
+  protected void refreshGroupMembers(LdapBatchDto batchInfo, LdapGroupDto 
group, Map internalUsers)
+  throws AmbariException {
 Set externalMembers = new HashSet();
-for (String memberAttribute: group.getMemberAttributes()) {
-  LdapUserDto groupMember = getLdapUserByMemberAttr(memberAttribute);
+for (String memberAttributeValue: group.getMemberAttributes()) {
+  LdapUserDto groupMember = getLdapUserByMemberAttr(memberAttributeValue);
   if (groupMember != null) {
 externalMembers.add(groupMember.getUserName());
   }
@@ -417,14 +418,20 @@ public class AmbariLdapDataPopulator {
   /**
* Get the LDAP member for the given member attribute.
*
-   * @param memberAttribute  the member attribute
+   * @param memberAttributeValue  the member attribute value
*
* @return the user for the given member attribute; null if not found
*/
-  protected LdapUserDto getLdapUserByMemberAttr(String memberAttribute) {
-Filter userObjectFilter = new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, 
ldapServerProperties.getUserObjectClass());
-Set filteredLdapUsers = 
getFilteredLdapUsers(userObjectFilter, getMemberFilter(memberAttribute));
-return (filteredLdapUsers.isEmpty()) ? null : 
filteredLdapUsers.iterator().next();
+  protected LdapUserDto getLdapUserByMemberAttr(String memberAttributeValue) {
+LdapUserDto dto = getLdapUser(memberAttributeValue);
+if (dto == null) {
+  Set filteredLdapUsers = getFilteredLdapUsers(
+  new EqualsFilter(OBJECT_CLASS_ATTRIBUTE, 
ldapServerProperties.getUserObjectClass()),
+  getMemberFilter(memberAttributeValue));
+
+  dto = (filteredLdapUsers.isEmpty()) ? null : 
filteredLdapUsers.iterator().next();
+}
+return dto;
   }
 
   /**
@@ -455,31 +462,11 @@ public class AmbariLdapDataPopulator {
   }
 
   // get a filter based on the given member attribute
-  private Filter getMemberFilter(String memberAttribute) {
-
-String   usernameAttribute = ldapServerProperties.getUsernameAttribute();
-String   dnAttribute = ldapServerProperties.getDnAttribute();
-OrFilter memberFilter  = null;
-
-String[] filters = memberAttribute.split(",");
-for (String filter : filters) {
-  String[] operands = filter.split("=");
-  if (operands.length == 2) {
+  private Filter getMemberFilter(String memberAttributeValue) {
+String dnAttribute = ldapServerProperties.getDnAttribute();
 
-String lOperand = operands[0];
-
-if (lOperand.equals(usernameAttribute) || 
lOperand.equals(UID_ATTRIBUTE) || lOperand.equals(dnAttribute)) {
-  if (memberFilter == null) {
-memberFilter = new OrFilter();
-  }
-  memberFilter.or(new EqualsFilter(lOperand, operands[1]));
-}
-  }
-}
-return memberFilter == null ?
-new OrFilter().or(new EqualsFilter(dnAttribute, memberAttribute)).
-or(new EqualsFilter(UID_ATTRIBUTE, memberAttribute)) :
-memberFilter;
+return new OrFilter().or(new Equal

ambari git commit: AMBARI-10025. RU: Hive messages can be reduced (ncole)

2015-03-11 Thread ncole
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 ee0a7ce95 -> 8b854e79d


AMBARI-10025. RU: Hive messages can be reduced (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8b854e79
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8b854e79
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8b854e79

Branch: refs/heads/branch-2.0.0
Commit: 8b854e79d4ab29d86fbfa2808743aee930a25687
Parents: ee0a7ce
Author: Nate Cole 
Authored: Wed Mar 11 09:28:29 2015 -0400
Committer: Nate Cole 
Committed: Wed Mar 11 09:28:29 2015 -0400

--
 .../main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml   | 8 +---
 1 file changed, 1 insertion(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/8b854e79/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
index 4cdccb1..b3b351b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
@@ -40,7 +40,7 @@
 
   
 
-  Before continuing, please backup the Hive Metastore 
database located on the following host(s): {{hosts.all}}.  During the upgrade 
process, you will be instructed when it is time to upgrade the Hive Metastore 
database.  To prepare, review the README documentation at 
/usr/hdp/{{version}}/hive/scripts/metastore/upgrade/[db] for the correct 
database type when you are asked to perform this manual step (during the Hive 
stage of upgrade).  Do NOT upgrade the database at this time.  You will be 
prompted when to perform the database upgrade.
+  Before continuing, please backup the Hive Metastore 
database located on the following host(s): {{hosts.all}}.
 
   
 
@@ -425,12 +425,6 @@
 
 
   
-
-  
-The Hive Metastore database schema must be upgraded 
before proceeding.  Please consult the README documentation at 
/usr/hdp/{{version}}/hive/scripts/metastore/upgrade/[db] for the correct 
database type to correctly upgrade the Hive Metastore database.  Note that the 
README may indicate to stop the Metastore; this step is not required, as Ambari 
will restart the required processes.  The database upgrade should be performed 
on the following host(s): {{hosts.all}}.
-  
-
-
 
   
 



ambari git commit: AMBARI-10025. RU: Hive messages can be reduced (ncole)

2015-03-11 Thread ncole
Repository: ambari
Updated Branches:
  refs/heads/trunk 9e920aae7 -> a4dbcf321


AMBARI-10025. RU: Hive messages can be reduced (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a4dbcf32
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a4dbcf32
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a4dbcf32

Branch: refs/heads/trunk
Commit: a4dbcf321ed55b1b152fe7689aefaa8a85fa9676
Parents: 9e920aa
Author: Nate Cole 
Authored: Wed Mar 11 07:27:41 2015 -0400
Committer: Nate Cole 
Committed: Wed Mar 11 09:27:21 2015 -0400

--
 .../main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml   | 8 +---
 1 file changed, 1 insertion(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/a4dbcf32/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
index 4cdccb1..b3b351b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.2.xml
@@ -40,7 +40,7 @@
 
   
 
-  Before continuing, please backup the Hive Metastore 
database located on the following host(s): {{hosts.all}}.  During the upgrade 
process, you will be instructed when it is time to upgrade the Hive Metastore 
database.  To prepare, review the README documentation at 
/usr/hdp/{{version}}/hive/scripts/metastore/upgrade/[db] for the correct 
database type when you are asked to perform this manual step (during the Hive 
stage of upgrade).  Do NOT upgrade the database at this time.  You will be 
prompted when to perform the database upgrade.
+  Before continuing, please backup the Hive Metastore 
database located on the following host(s): {{hosts.all}}.
 
   
 
@@ -425,12 +425,6 @@
 
 
   
-
-  
-The Hive Metastore database schema must be upgraded 
before proceeding.  Please consult the README documentation at 
/usr/hdp/{{version}}/hive/scripts/metastore/upgrade/[db] for the correct 
database type to correctly upgrade the Hive Metastore database.  Note that the 
README may indicate to stop the Metastore; this step is not required, as Ambari 
will restart the required processes.  The database upgrade should be performed 
on the following host(s): {{hosts.all}}.
-  
-
-
 
   
 



ambari git commit: AMBARI-10009. SLES secured cluster has alerts for Yarn and Storm.(vbrodetskyi)

2015-03-11 Thread vbrodetskyi
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.0 a842630e4 -> ee0a7ce95


AMBARI-10009. SLES secured cluster has alerts for Yarn and Storm.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ee0a7ce9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ee0a7ce9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ee0a7ce9

Branch: refs/heads/branch-2.0.0
Commit: ee0a7ce95d0e25a46eaaf1c1dde0023d3cdbf40f
Parents: a842630
Author: Vitaly Brodetskyi 
Authored: Wed Mar 11 15:15:29 2015 +0200
Committer: Vitaly Brodetskyi 
Committed: Wed Mar 11 15:15:29 2015 +0200

--
 .../main/python/ambari_agent/alerts/web_alert.py| 16 +++-
 1 file changed, 15 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/ee0a7ce9/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
--
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py 
b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
index d7a833e..8252781 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
@@ -22,6 +22,7 @@ import logging
 import time
 import subprocess
 import os
+import uuid
 from  tempfile import gettempdir
 from alerts.base_alert import BaseAlert
 from collections import namedtuple
@@ -172,10 +173,19 @@ class WebAlert(BaseAlert):
   else:
 kerberos_env = None
 
+  # check if cookies dir exists, if not then create it
+  tmp_dir = self.config.get('agent', 'tmp_dir')
+  cookies_dir = os.path.join(tmp_dir, "cookies")
+
+  if not os.path.exists(cookies_dir):
+os.makedirs(cookies_dir)
+
   # substitute 0.0.0.0 in url with actual fqdn
   url = url.replace('0.0.0.0', self.host_name)
+  cookie_file_name = str(uuid.uuid4())
+  cookie_file = os.path.join(cookies_dir, cookie_file_name)
   start_time = time.time()
-  curl = subprocess.Popen(['curl', '--negotiate', '-u', ':', '-sL', '-w',
+  curl = subprocess.Popen(['curl', '--negotiate', '-u', ':', '-b', 
cookie_file, '-c', cookie_file, '-sL', '-w',
 '%{http_code}', url, '--connect-timeout', CURL_CONNECTION_TIMEOUT,
 '-o', '/dev/null'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, 
env=kerberos_env)
 
@@ -192,6 +202,10 @@ class WebAlert(BaseAlert):
 
   return WebResponse(status_code=0, time_millis=0, error_msg=str(exc))
 
+finally:
+  if os.path.isfile(cookie_file):
+os.remove(cookie_file)
+
 return WebResponse(status_code=response_code, time_millis=time_millis, 
error_msg=None)
 
 



ambari git commit: AMBARI-10009. SLES secured cluster has alerts for Yarn and Storm.(vbrodetskyi)

2015-03-11 Thread vbrodetskyi
Repository: ambari
Updated Branches:
  refs/heads/trunk 08dd22f94 -> 9e920aae7


AMBARI-10009. SLES secured cluster has alerts for Yarn and Storm.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9e920aae
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9e920aae
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9e920aae

Branch: refs/heads/trunk
Commit: 9e920aae7a7fe5e129e37791040909e0ae55ff84
Parents: 08dd22f
Author: Vitaly Brodetskyi 
Authored: Wed Mar 11 15:14:16 2015 +0200
Committer: Vitaly Brodetskyi 
Committed: Wed Mar 11 15:14:16 2015 +0200

--
 .../main/python/ambari_agent/alerts/web_alert.py| 16 +++-
 1 file changed, 15 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/9e920aae/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
--
diff --git a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py 
b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
index d7a833e..8252781 100644
--- a/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
+++ b/ambari-agent/src/main/python/ambari_agent/alerts/web_alert.py
@@ -22,6 +22,7 @@ import logging
 import time
 import subprocess
 import os
+import uuid
 from  tempfile import gettempdir
 from alerts.base_alert import BaseAlert
 from collections import namedtuple
@@ -172,10 +173,19 @@ class WebAlert(BaseAlert):
   else:
 kerberos_env = None
 
+  # check if cookies dir exists, if not then create it
+  tmp_dir = self.config.get('agent', 'tmp_dir')
+  cookies_dir = os.path.join(tmp_dir, "cookies")
+
+  if not os.path.exists(cookies_dir):
+os.makedirs(cookies_dir)
+
   # substitute 0.0.0.0 in url with actual fqdn
   url = url.replace('0.0.0.0', self.host_name)
+  cookie_file_name = str(uuid.uuid4())
+  cookie_file = os.path.join(cookies_dir, cookie_file_name)
   start_time = time.time()
-  curl = subprocess.Popen(['curl', '--negotiate', '-u', ':', '-sL', '-w',
+  curl = subprocess.Popen(['curl', '--negotiate', '-u', ':', '-b', 
cookie_file, '-c', cookie_file, '-sL', '-w',
 '%{http_code}', url, '--connect-timeout', CURL_CONNECTION_TIMEOUT,
 '-o', '/dev/null'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, 
env=kerberos_env)
 
@@ -192,6 +202,10 @@ class WebAlert(BaseAlert):
 
   return WebResponse(status_code=0, time_millis=0, error_msg=str(exc))
 
+finally:
+  if os.path.isfile(cookie_file):
+os.remove(cookie_file)
+
 return WebResponse(status_code=response_code, time_millis=time_millis, 
error_msg=None)