ambari git commit: AMBARI-11044. atlas version should not include 0.0 (Jon Maron via rlevas)

2015-05-11 Thread rlevas
Repository: ambari
Updated Branches:
  refs/heads/trunk c415528d4 - b0fa86ce8


AMBARI-11044. atlas version should not include 0.0 (Jon Maron via rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b0fa86ce
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b0fa86ce
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b0fa86ce

Branch: refs/heads/trunk
Commit: b0fa86ce88ca5f88e21392c41b48ae3ab15615e9
Parents: c415528
Author: Jon Maron jma...@hortonworks.com
Authored: Mon May 11 11:10:24 2015 -0400
Committer: Robert Levas rle...@hortonworks.com
Committed: Mon May 11 11:10:24 2015 -0400

--
 .../ATLAS/0.1.0.2.3.0.0/alerts.json |  60 --
 .../configuration/application-properties.xml| 190 ---
 .../configuration/metadata-env.xml  |  94 -
 .../ATLAS/0.1.0.2.3.0.0/kerberos.json   |  59 --
 .../ATLAS/0.1.0.2.3.0.0/metainfo.xml|  79 
 .../ATLAS/0.1.0.2.3.0.0/package/files/log4j.xml |  85 -
 .../0.1.0.2.3.0.0/package/scripts/metadata.py   |  96 --
 .../package/scripts/metadata_server.py  |  69 ---
 .../0.1.0.2.3.0.0/package/scripts/params.py |  80 
 .../package/scripts/properties_config.py|  33 
 .../package/scripts/service_check.py|  57 --
 .../package/scripts/status_params.py|  36 
 .../common-services/ATLAS/0.1.0.2.3/alerts.json |  60 ++
 .../configuration/application-properties.xml| 190 +++
 .../0.1.0.2.3/configuration/metadata-env.xml|  94 +
 .../ATLAS/0.1.0.2.3/kerberos.json   |  59 ++
 .../ATLAS/0.1.0.2.3/metainfo.xml|  79 
 .../ATLAS/0.1.0.2.3/package/files/log4j.xml |  85 +
 .../ATLAS/0.1.0.2.3/package/scripts/metadata.py |  96 ++
 .../package/scripts/metadata_server.py  |  69 +++
 .../ATLAS/0.1.0.2.3/package/scripts/params.py   |  80 
 .../package/scripts/properties_config.py|  33 
 .../0.1.0.2.3/package/scripts/service_check.py  |  57 ++
 .../0.1.0.2.3/package/scripts/status_params.py  |  36 
 .../stacks/HDP/2.3/services/ATLAS/metainfo.xml  |   4 +-
 .../stacks/2.3/ATLAS/test_metadata_server.py|   2 +-
 26 files changed, 941 insertions(+), 941 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/b0fa86ce/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3.0.0/alerts.json
--
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3.0.0/alerts.json
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3.0.0/alerts.json
deleted file mode 100644
index 7202950..000
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3.0.0/alerts.json
+++ /dev/null
@@ -1,60 +0,0 @@
-{
-  ATLAS: {
-service: [],
-ATLAS_SERVER: [
-  {
-name: metadata_server_process,
-label: Atlas Metadata Server Process,
-description: This host-level alert is triggered if the individual 
Metadata server process cannot be established to be up and listening on the 
network.,
-interval: 1,
-scope: ANY,
-source: {
-  type: PORT,
-  uri: {{metadata-env/metadata_port}},
-  default_port: 21000,
-  reporting: {
-ok: {
-  text: TCP OK - {0:.3f}s response on port {1}
-},
-warning: {
-  text: TCP OK - {0:.3f}s response on port {1},
-  value: 1.5
-},
-critical: {
-  text: Connection failed: {0} to {1}:{2},
-  value: 5.0
-}
-  }
-}
-  },
-  {
-name: metadata_server_webui,
-label: Metadata Server Web UI,
-description: This host-level alert is triggered if the Metadata 
Server Web UI is unreachable.,
-interval: 1,
-scope: ANY,
-enabled: true,
-source: {
-  type: WEB,
-  uri: {
-http: {{metadata-env/metadata_port}},
-default_port: 21000,
-kerberos_keytab: 
{{metadata-runtime.properties/*.metadata.http.authentication.keytab}},
-kerberos_principal: 
{{metadata-runtime.properties/*.metadata.http.authentication.principal}}
-  },
-  reporting: {
-ok: {
-  text: HTTP {0} response in {2:.3f}s
-},
-warning:{
-  text: HTTP {0} response from {1} in {2:.3f}s ({3})
-},
-critical: {
-  text: Connection failed to {1} ({3})
-}
-  }
-}
-  }
-]
-  }
-}


ambari git commit: AMBARI-11009. Storm service check fails with multiple nimbus hosts, remove messages from Storm RU now that it is rolling, allow nimbus.seeds to be edited (alejandro)

2015-05-11 Thread alejandro
Repository: ambari
Updated Branches:
  refs/heads/trunk b0fa86ce8 - 661d143b9


AMBARI-11009. Storm service check fails with multiple nimbus hosts, remove 
messages from Storm RU now that it is rolling, allow nimbus.seeds to be edited 
(alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/661d143b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/661d143b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/661d143b

Branch: refs/heads/trunk
Commit: 661d143b9010711a55bb3f7f903aa148425733e2
Parents: b0fa86c
Author: Alejandro Fernandez afernan...@hortonworks.com
Authored: Thu May 7 16:03:43 2015 -0700
Committer: Alejandro Fernandez afernan...@hortonworks.com
Committed: Mon May 11 11:09:50 2015 -0700

--
 .../0.9.1.2.1/package/scripts/params_linux.py   |   8 +-
 .../0.9.1.2.1/package/scripts/service_check.py  |   8 +-
 .../0.9.1.2.1/package/scripts/yaml_utils.py |  10 +-
 .../STORM/package/scripts/yaml_config.py|   6 +-
 .../services/STORM/configuration/storm-site.xml |   2 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.3.xml |  10 -
 .../stacks/2.3/STORM/test_service_check.py  |  50 
 .../python/stacks/2.3/STORM/test_storm_base.py  | 124 +
 .../stacks/2.3/configs/storm_default.json   | 260 +++
 ambari-web/app/data/HDP2/site_properties.js |   1 -
 10 files changed, 456 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/661d143b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
index 6c96d73..729e383 100644
--- 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/params_linux.py
@@ -81,8 +81,12 @@ user_group = 
config['configurations']['cluster-env']['user_group']
 java64_home = config['hostLevelParams']['java_home']
 jps_binary = format({java64_home}/bin/jps)
 nimbus_port = config['configurations']['storm-site']['nimbus.thrift.port']
-nimbus_seeds_supported = 
config['configurations']['storm-env']['nimbus_seeds_supported']
-nimbus_host = config['configurations']['storm-site']['nimbus.seeds'] if 
nimbus_seeds_supported else 
config['configurations']['storm-site']['nimbus.host']
+
+# nimbus.seeds is supported in HDP 2.3.0.0 and higher
+nimbus_seeds_supported = 
default('/configurations/storm-env/nimbus_seeds_supported', False)
+nimbus_host = default('/configurations/storm-site/nimbus.host', None)
+nimbus_seeds = default('/configurations/storm-site/nimbus.seeds', None)
+
 rest_api_port = 8745
 rest_api_admin_port = 8746
 rest_api_conf_file = format({conf_dir}/config.yaml)

http://git-wip-us.apache.org/repos/asf/ambari/blob/661d143b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/service_check.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/service_check.py
index 483c144..4484501 100644
--- 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/service_check.py
@@ -53,7 +53,13 @@ class ServiceCheckDefault(ServiceCheck):
  content=StaticFile(wordCount.jar)
 )
 
-cmd = format(storm jar /tmp/wordCount.jar storm.starter.WordCountTopology 
WordCount{unique} -c nimbus.host={nimbus_host})
+cmd = 
+if params.nimbus_seeds_supported:
+  # Because this command is guaranteed to run on one of the hosts with 
storm client, there is no need
+  # to specify -c nimbus.seeds={nimbus_seeds}
+  cmd = format(storm jar /tmp/wordCount.jar 
storm.starter.WordCountTopology WordCount{unique})
+elif params.nimbus_host is not None:
+  cmd = format(storm jar /tmp/wordCount.jar 
storm.starter.WordCountTopology WordCount{unique} -c nimbus.host={nimbus_host})
 
 Execute(cmd,
 logoutput=True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/661d143b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/yaml_utils.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/package/scripts/yaml_utils.py
 

ambari git commit: AMBARI-11038. Fix timing issue regarding setting of topology resolved configuration for clusters provisioned via blueprints

2015-05-11 Thread jspeidel
Repository: ambari
Updated Branches:
  refs/heads/trunk d05c9c287 - 66a4bfb26


AMBARI-11038. Fix timing issue regarding setting of topology resolved 
configuration
for clusters provisioned via blueprints


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/66a4bfb2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/66a4bfb2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/66a4bfb2

Branch: refs/heads/trunk
Commit: 66a4bfb26ad5a59d925e56f465bec3346696
Parents: d05c9c2
Author: John Speidel jspei...@hortonworks.com
Authored: Mon May 11 15:38:25 2015 -0400
Committer: John Speidel jspei...@hortonworks.com
Committed: Mon May 11 17:37:03 2015 -0400

--
 .../topology/ClusterConfigurationRequest.java   | 14 ++--
 .../ambari/server/topology/LogicalRequest.java  | 14 
 .../ambari/server/topology/TopologyManager.java | 67 ++--
 .../server/topology/TopologyManagerTest.java| 27 +++-
 4 files changed, 50 insertions(+), 72 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/66a4bfb2/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
index a8c2ff3..eb583fd 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java
@@ -84,7 +84,7 @@ public class ClusterConfigurationRequest {
*/
   public void setConfigurationsOnCluster(ClusterTopology clusterTopology, 
String tag)  {
 //todo: also handle setting of host group scoped configuration which is 
updated by config processor
-ListBlueprintServiceConfigRequest listofConfigRequests = new 
LinkedListBlueprintServiceConfigRequest();
+ListBlueprintServiceConfigRequest configurationRequests = new 
LinkedListBlueprintServiceConfigRequest();
 
 Blueprint blueprint = clusterTopology.getBlueprint();
 Configuration clusterConfiguration = clusterTopology.getConfiguration();
@@ -108,7 +108,7 @@ public class ClusterConfigurationRequest {
 }
   }
 
-  listofConfigRequests.add(blueprintConfigRequest);
+  configurationRequests.add(blueprintConfigRequest);
 }
 
 // since the stack returns cluster-env with each service's config ensure 
that only one
@@ -118,9 +118,9 @@ public class ClusterConfigurationRequest {
 MapString, MapString, String clusterEnvAttributes = 
clusterConfiguration.getFullAttributes().get(cluster-env);
 
 globalConfigRequest.addConfigElement(cluster-env, 
clusterEnvProps,clusterEnvAttributes);
-listofConfigRequests.add(globalConfigRequest);
+configurationRequests.add(globalConfigRequest);
 
-setConfigurationsOnCluster(listofConfigRequests, tag);
+setConfigurationsOnCluster(configurationRequests, tag);
   }
 
   /**
@@ -131,12 +131,12 @@ public class ClusterConfigurationRequest {
*
* This method will also send these requests to the management controller.
*
-   * @param listOfBlueprintConfigRequests a list of requests to send to the 
AmbariManagementController.
+   * @param configurationRequests a list of requests to send to the 
AmbariManagementController.
*/
-  private void setConfigurationsOnCluster(ListBlueprintServiceConfigRequest 
listOfBlueprintConfigRequests,
+  private void setConfigurationsOnCluster(ListBlueprintServiceConfigRequest 
configurationRequests,
   String tag)  {
 // iterate over services to deploy
-for (BlueprintServiceConfigRequest blueprintConfigRequest : 
listOfBlueprintConfigRequests) {
+for (BlueprintServiceConfigRequest blueprintConfigRequest : 
configurationRequests) {
   ClusterRequest clusterRequest = null;
   // iterate over the config types associated with this service
   ListConfigurationRequest requestsPerService = new 
LinkedListConfigurationRequest();

http://git-wip-us.apache.org/repos/asf/ambari/blob/66a4bfb2/ambari-server/src/main/java/org/apache/ambari/server/topology/LogicalRequest.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/topology/LogicalRequest.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/topology/LogicalRequest.java
index 087ad4c..88c791b 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/topology/LogicalRequest.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/topology/LogicalRequest.java
@@ -158,20 +158,6 

[3/8] ambari git commit: Revert AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

2015-05-11 Thread jonathanhurley
Revert AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

This reverts commit 02cd3ae9a98f9a3adaeba8e4c50203ca6e042755.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/714838d8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/714838d8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/714838d8

Branch: refs/heads/trunk
Commit: 714838d8a9984954377b37a2c55ba7d8a0199bb6
Parents: 661d143
Author: Jonathan Hurley jhur...@hortonworks.com
Authored: Mon May 11 15:20:16 2015 -0400
Committer: Jonathan Hurley jhur...@hortonworks.com
Committed: Mon May 11 15:30:02 2015 -0400

--
 .../HIVE/0.12.0.2.0/package/scripts/params_linux.py| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/714838d8/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 6f390c1..89f0224 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -112,7 +112,7 @@ if Script.is_hdp_stack_greater_or_equal(2.2):
   hive_tar_destination = 
config['configurations']['cluster-env']['hive_tar_destination_folder']  + / + 
os.path.basename(hive_tar_source)
   pig_tar_destination = 
config['configurations']['cluster-env']['pig_tar_destination_folder'] + / + 
os.path.basename(pig_tar_source)
   hadoop_streaming_tar_destination_dir = 
config['configurations']['cluster-env']['hadoop-streaming_tar_destination_folder']
-  sqoop_tar_destination_dir = 
config['configurations']['cluster-env']['sqoop_tar_destination_folder'] + / + 
os.path.basename(sqoop_tar_source)
+  sqoop_tar_destination = 
config['configurations']['cluster-env']['sqoop_tar_destination_folder'] + / + 
os.path.basename(sqoop_tar_source)
   mapreduce_tar_destination = 
config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + 
/ + os.path.basename(mapreduce_tar_source)
   tez_tar_destination = 
config['configurations']['cluster-env']['tez_tar_destination_folder'] + / + 
os.path.basename(tez_tar_source)
 



[2/8] ambari git commit: Revert AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

2015-05-11 Thread jonathanhurley
Revert AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

This reverts commit bf9301247db36a504bc02a39ab8cc92fcca10a52.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d2ebd9c9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d2ebd9c9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d2ebd9c9

Branch: refs/heads/trunk
Commit: d2ebd9c9ea9414a293ec73f416e9aac40c9ad48a
Parents: 714838d
Author: Jonathan Hurley jhur...@hortonworks.com
Authored: Mon May 11 15:20:26 2015 -0400
Committer: Jonathan Hurley jhur...@hortonworks.com
Committed: Mon May 11 15:30:02 2015 -0400

--
 .../common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/d2ebd9c9/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index 7a3401e..5f3f5f6 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -16,7 +16,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express 
or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 
-
+HBASE/0.96.0.2.0/package/scripts/params_linux.py
 from resource_management.libraries.functions import conf_select
 from resource_management import *
 from resource_management.libraries.functions import format



[8/8] ambari git commit: Revert AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

2015-05-11 Thread jonathanhurley
Revert AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

This reverts commit 6e8dce44336c0afaa34a5aefbc500a116b4b91e8.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e833066e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e833066e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e833066e

Branch: refs/heads/trunk
Commit: e833066e72050bed2b34cbdb00d70167a7c09835
Parents: 20161e6
Author: Jonathan Hurley jhur...@hortonworks.com
Authored: Mon May 11 15:20:41 2015 -0400
Committer: Jonathan Hurley jhur...@hortonworks.com
Committed: Mon May 11 15:30:03 2015 -0400

--
 ambari-agent/pom.xml|   7 -
 .../resource_management/TestContentSources.py   |   6 +-
 .../resource_management/TestCopyFromLocal.py|  68 +++
 .../TestPropertiesFileResource.py   |  10 +-
 .../TestRepositoryResource.py   |   6 +-
 .../TestXmlConfigResource.py|   8 +-
 .../python/resource_management/core/source.py   |  11 +-
 .../libraries/functions/__init__.py |   1 +
 .../libraries/functions/version.py  |  29 +-
 .../libraries/providers/__init__.py |   3 +-
 .../libraries/providers/copy_from_local.py  |  94 
 .../libraries/providers/hdfs_directory.py   | 112 +
 .../libraries/providers/hdfs_resource.py| 111 -
 .../libraries/resources/__init__.py |   3 +-
 .../libraries/resources/copy_from_local.py  |  41 ++
 .../libraries/resources/hdfs_directory.py   |  45 ++
 .../libraries/resources/hdfs_resource.py|  76 
 .../libraries/script/script.py  |   2 -
 .../1.6.1.2.2.0/package/scripts/accumulo.py |   6 +-
 .../package/scripts/accumulo_configuration.py   |  14 +-
 .../1.6.1.2.2.0/package/scripts/params.py   |  19 +-
 .../0.1.0/package/scripts/hbase.py  |  14 +-
 .../0.1.0/package/scripts/params.py |  16 +-
 .../FALCON/0.5.0.2.1/package/scripts/falcon.py  |  23 +-
 .../0.5.0.2.1/package/scripts/params_linux.py   |  20 +-
 .../HBASE/0.96.0.2.0/package/scripts/hbase.py   |  14 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |  21 +-
 .../0.96.0.2.0/package/scripts/service_check.py |   1 +
 .../package/files/fast-hdfs-resource.jar| Bin 19285282 - 0 bytes
 .../HDFS/2.1.0.2.0/package/scripts/hdfs.py  |   5 -
 .../2.1.0.2.0/package/scripts/hdfs_namenode.py  |  15 +-
 .../package/scripts/hdfs_nfsgateway.py  |   1 +
 .../2.1.0.2.0/package/scripts/hdfs_snamenode.py |   1 +
 .../2.1.0.2.0/package/scripts/params_linux.py   |  20 +-
 .../2.1.0.2.0/package/scripts/service_check.py  |  53 ++-
 .../0.12.0.2.0/package/files/templetonSmoke.sh  |  22 +-
 .../HIVE/0.12.0.2.0/package/scripts/hive.py | 116 ++---
 .../0.12.0.2.0/package/scripts/hive_server.py   |  13 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |  63 +--
 .../HIVE/0.12.0.2.0/package/scripts/webhcat.py  |  71 +++
 .../package/scripts/webhcat_service_check.py|  28 +-
 .../package/templates/templeton_smoke.pig.j2|  24 -
 .../MAHOUT/1.0.0.2.3/package/scripts/params.py  |  17 +-
 .../1.0.0.2.3/package/scripts/service_check.py  |  49 +-
 .../4.0.0.2.0/package/files/oozieSmoke2.sh  |  52 ++-
 .../files/prepareOozieHdfsDirectories.sh|  45 --
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py|   6 +-
 .../package/scripts/oozie_server_upgrade.py |  16 +-
 .../4.0.0.2.0/package/scripts/params_linux.py   |  22 +-
 .../4.0.0.2.0/package/scripts/service_check.py  |  38 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |  25 +-
 .../0.12.0.2.0/package/scripts/service_check.py |  67 ++-
 .../package/scripts/job_history_server.py   |  10 +-
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |  21 +-
 .../1.2.0.2.2/package/scripts/setup_spark.py|   6 +-
 .../1.2.0.2.2/package/scripts/spark_service.py  |   9 +-
 .../STORM/0.9.1.2.1/configuration/storm-env.xml |  12 +-
 .../0.4.0.2.1/package/scripts/params_linux.py   |  25 +-
 .../0.4.0.2.1/package/scripts/service_check.py  |  53 ++-
 .../2.1.0.2.0/package/scripts/historyserver.py  |  25 +-
 .../2.1.0.2.0/package/scripts/install_jars.py   |  68 +--
 .../package/scripts/mapred_service_check.py |  27 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |  37 +-
 .../package/scripts/resourcemanager.py  |  12 +-
 .../2.1.0.2.0/package/scripts/service_check.py  |   2 +-
 .../YARN/2.1.0.2.0/package/scripts/yarn.py  |  38 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py | 192 
 .../2.0.6/HBASE/test_hbase_regionserver.py  |  91 
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |   6 -
 .../stacks/2.0.6/HDFS/test_journalnode.py   |   6 -
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 448 +--
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py |   6 -
 

[6/8] ambari git commit: Revert AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

2015-05-11 Thread jonathanhurley
http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
index 1e75eb0..583e6e9 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
@@ -61,35 +61,37 @@ def yarn(name = None):
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def yarn(name = None):
   import params
-  if name == historyserver:
+  if name in [nodemanager,historyserver]:
 if params.yarn_log_aggregation_enabled:
-  params.HdfsResource(params.yarn_nm_app_log_dir,
-   action=create_on_execute,
-   type=directory,
+  params.HdfsDirectory(params.yarn_nm_app_log_dir,
+   action=create_delayed,
owner=params.yarn_user,
group=params.user_group,
mode=0777,
recursive_chmod=True
   )
-params.HdfsResource(/mapred,
- type=directory,
- action=create_on_execute,
+params.HdfsDirectory(/mapred,
+ action=create_delayed,
  owner=params.mapred_user
 )
-params.HdfsResource(/mapred/system,
- type=directory,
- action=create_on_execute,
+params.HdfsDirectory(/mapred/system,
+ action=create_delayed,
  owner=params.hdfs_user
 )
-params.HdfsResource(params.mapreduce_jobhistory_done_dir,
- type=directory,
- action=create_on_execute,
+params.HdfsDirectory(params.mapreduce_jobhistory_intermediate_done_dir,
+ action=create_delayed,
  owner=params.mapred_user,
  group=params.user_group,
- change_permissions_for_parents=True,
  mode=0777
 )
-params.HdfsResource(None, action=execute)
+
+params.HdfsDirectory(params.mapreduce_jobhistory_done_dir,
+ action=create_delayed,
+ owner=params.mapred_user,
+ group=params.user_group,
+ mode=01777
+)
+params.HdfsDirectory(None, action=create)
 
   if name == nodemanager:
 Directory(params.nm_local_dirs.split(',') + params.nm_log_dirs.split(','),
@@ -176,14 +178,12 @@ def yarn(name = None):
group=params.user_group
 )
 if not is_empty(params.node_label_enable) and params.node_label_enable or 
is_empty(params.node_label_enable) and params.node_labels_dir:
-  params.HdfsResource(params.node_labels_dir,
-   type=directory,
-   action=create_on_execute,
+  params.HdfsDirectory(params.node_labels_dir,
+   action=create,
owner=params.yarn_user,
group=params.user_group,
mode=0700
   )
-  params.HdfsResource(None, action=execute)
   elif name == 'apptimelineserver':
 Directory(params.ats_leveldb_dir,
owner=params.yarn_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
--
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index cc404eb..b56d15a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -280,42 +280,36 @@ class TestHBaseMaster(RMFTestCase):
   owner='hbase',
   content='log4jproperties\nline2'
 )
-
-self.assertResourceCalled('HdfsResource', 
'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-security_enabled = False,
-hadoop_bin_dir = '/usr/bin',
-keytab = UnknownConfigurationMock(),
-
-kinit_path_local = '/usr/bin/kinit',
-user = 'hdfs',
-owner = 'hbase',
-hadoop_conf_dir = '/etc/hadoop/conf',
-type = 'directory',
-action = ['create_on_execute'],
-)
-self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
-security_enabled = False,
-hadoop_conf_dir = '/etc/hadoop/conf',
-keytab = 

[4/8] ambari git commit: Revert AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

2015-05-11 Thread jonathanhurley
http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.2/configs/default.json
--
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/default.json 
b/ambari-server/src/test/python/stacks/2.2/configs/default.json
index 8188928..c5a6ae7 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/default.json
@@ -185,18 +185,7 @@
 ignore_groupsusers_create: false,
 smokeuser: ambari-qa,
 kerberos_domain: EXAMPLE.COM,
-user_group: hadoop,
-   hive_tar_source: /usr/hdp/current/hive-client/hive.tar.gz, 
-   hadoop-streaming_tar_destination_folder: hdfs:///hdp/apps/{{ 
hdp_stack_version }}/mapreduce/, 
-   pig_tar_source: /usr/hdp/current/pig-client/pig.tar.gz, 
-   hive_tar_destination_folder: hdfs:///hdp/apps/{{ 
hdp_stack_version }}/hive/, 
-   tez_tar_destination_folder: hdfs:///hdp/apps/{{ 
hdp_stack_version }}/tez/, 
-   mapreduce_tar_destination_folder: hdfs:///hdp/apps/{{ 
hdp_stack_version }}/mapreduce/, 
-   tez_tar_source: /usr/hdp/current/tez-client/lib/tez.tar.gz, 
-   pig_tar_destination_folder: hdfs:///hdp/apps/{{ 
hdp_stack_version }}/pig/, 
-   mapreduce_tar_source: 
/usr/hdp/current/hadoop-client/mapreduce.tar.gz, 
-   sqoop_tar_destination_folder: hdfs:///hdp/apps/{{ 
hdp_stack_version }}/sqoop/, 
-   sqoop_tar_source: /usr/hdp/current/sqoop-client/sqoop.tar.gz
+user_group: hadoop
 },
 ranger-knox-plugin-properties: {
 POLICY_MGR_URL: {{policymgr_mgr_url}}, 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.2/configs/secured.json
--
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/secured.json 
b/ambari-server/src/test/python/stacks/2.2/configs/secured.json
index e224ebc..5bd8814 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/secured.json
@@ -173,18 +173,7 @@
 user_group: hadoop,
 smokeuser_keytab: 
/etc/security/keytabs/smokeuser.headless.keytab,
 smokeuser_principal_name: ambari...@example.com,
-kinit_path_local: /usr/bin,
-   hive_tar_source: /usr/hdp/current/hive-client/hive.tar.gz, 
-   hadoop-streaming_tar_destination_folder: hdfs:///hdp/apps/{{ 
hdp_stack_version }}/mapreduce/, 
-   pig_tar_source: /usr/hdp/current/pig-client/pig.tar.gz, 
-   hive_tar_destination_folder: hdfs:///hdp/apps/{{ 
hdp_stack_version }}/hive/, 
-   tez_tar_destination_folder: hdfs:///hdp/apps/{{ 
hdp_stack_version }}/tez/, 
-   mapreduce_tar_destination_folder: hdfs:///hdp/apps/{{ 
hdp_stack_version }}/mapreduce/, 
-   tez_tar_source: /usr/hdp/current/tez-client/lib/tez.tar.gz, 
-   pig_tar_destination_folder: hdfs:///hdp/apps/{{ 
hdp_stack_version }}/pig/, 
-   mapreduce_tar_source: 
/usr/hdp/current/hadoop-client/mapreduce.tar.gz, 
-   sqoop_tar_destination_folder: hdfs:///hdp/apps/{{ 
hdp_stack_version }}/sqoop/, 
-   sqoop_tar_source: /usr/hdp/current/sqoop-client/sqoop.tar.gz
+kinit_path_local: /usr/bin
 },
 webhcat-site: {
 templeton.jar: 
/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
--
diff --git 
a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py 
b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
index 665119f..0d943c4 100644
--- 
a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
+++ 
b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
@@ -33,42 +33,35 @@ class TestMahoutClient(RMFTestCase):
target = RMFTestCase.TARGET_COMMON_SERVICES
 )
 
+self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f 
/user/ambari-qa/mahoutsmokeoutput /user/ambari-qa/mahoutsmokeinput',
+  security_enabled = False,
+  keytab = UnknownConfigurationMock(),
+  conf_dir = '/usr/hdp/current/hadoop-client/conf',
+  try_sleep = 5,
+  kinit_path_local = '/usr/bin/kinit',
+  tries = 3,
+  user = 'ambari-qa',
+  

[5/8] ambari git commit: Revert AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

2015-05-11 Thread jonathanhurley
http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
--
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py 
b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
index f2d7edb..3fa1c9c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
@@ -117,6 +117,37 @@ class TestWebHCatServer(RMFTestCase):
 self.assertNoMoreResources()
 
   def assert_configure_default(self):
+self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+  security_enabled = False,
+  keytab = UnknownConfigurationMock(),
+  conf_dir = '/etc/hadoop/conf',
+  hdfs_user = 'hdfs',
+  kinit_path_local = /usr/bin/kinit,
+  mode = 0755,
+  owner = 'hcat',
+  bin_dir = '/usr/bin',
+  action = ['create_delayed'],
+  )
+self.assertResourceCalled('HdfsDirectory', '/user/hcat',
+  security_enabled = False,
+  keytab = UnknownConfigurationMock(),
+  conf_dir = '/etc/hadoop/conf',
+  hdfs_user = 'hdfs',
+  kinit_path_local = /usr/bin/kinit,
+  mode = 0755,
+  owner = 'hcat',
+  bin_dir = '/usr/bin',
+  action = ['create_delayed'],
+  )
+self.assertResourceCalled('HdfsDirectory', None,
+  security_enabled = False,
+  keytab = UnknownConfigurationMock(),
+  conf_dir = '/etc/hadoop/conf',
+  hdfs_user = 'hdfs',
+  kinit_path_local = /usr/bin/kinit,
+  bin_dir = '/usr/bin',
+  action = ['create'],
+  )
 self.assertResourceCalled('Directory', '/var/run/webhcat',
   owner = 'hcat',
   group = 'hadoop',
@@ -134,6 +165,42 @@ class TestWebHCatServer(RMFTestCase):
   group = 'hadoop',
   recursive = True,
   )
+self.assertResourceCalled('CopyFromLocal', 
'/usr/lib/hadoop-mapreduce/hadoop-streaming-*.jar',
+  owner='hcat',
+  mode=0755,
+  dest_dir='/apps/webhcat',
+  kinnit_if_needed='',
+  hadoop_conf_dir='/etc/hadoop/conf',
+  hadoop_bin_dir='/usr/bin',
+  hdfs_user='hdfs'
+)
+self.assertResourceCalled('CopyFromLocal', 
'/usr/share/HDP-webhcat/pig.tar.gz',
+  owner='hcat',
+  mode=0755,
+  dest_dir='/apps/webhcat',
+  kinnit_if_needed='',
+  hadoop_conf_dir='/etc/hadoop/conf',
+  hadoop_bin_dir='/usr/bin',
+  hdfs_user='hdfs'
+)
+self.assertResourceCalled('CopyFromLocal', 
'/usr/share/HDP-webhcat/hive.tar.gz',
+  owner='hcat',
+  mode=0755,
+  dest_dir='/apps/webhcat',
+  kinnit_if_needed='',
+  hadoop_bin_dir='/usr/bin',
+  hadoop_conf_dir='/etc/hadoop/conf',
+  hdfs_user='hdfs'
+)
+self.assertResourceCalled('CopyFromLocal', 
'/usr/share/HDP-webhcat/sqoop*.tar.gz',
+  owner='hcat',
+  mode=0755,
+  dest_dir='/apps/webhcat',
+  kinnit_if_needed='',
+  hadoop_bin_dir='/usr/bin',
+  hadoop_conf_dir='/etc/hadoop/conf',
+  hdfs_user='hdfs'
+)
 self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
   owner = 'hcat',
   group = 'hadoop',
@@ -158,6 +225,37 @@ class TestWebHCatServer(RMFTestCase):
   )
 
   def assert_configure_secured(self):
+self.assertResourceCalled('HdfsDirectory', '/apps/webhcat',
+  

[7/8] ambari git commit: Revert AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

2015-05-11 Thread jonathanhurley
http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index 6edca7d..bfd4e74 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -22,7 +22,6 @@ from resource_management import *
 from resource_management.libraries import functions
 import sys
 import os
-import glob
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 from urlparse import urlparse
@@ -82,96 +81,18 @@ def hive(name=None):
 
   if name == 'hiveserver2':
 
-if params.hdp_stack_version_major !=  and 
compare_versions(params.hdp_stack_version_major, '2.2') =0:
-  
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-  type=file,
-  action=create_on_execute,
-  source=params.mapreduce_tar_source,
-  group=params.user_group,
-  mode=params.tarballs_mode
-  )
-
-if params.hdp_stack_version_major !=  and 
compare_versions(params.hdp_stack_version_major, 2.2.0.0)  0:
-  params.HdfsResource(params.webhcat_apps_dir,
-   type=directory,
-   action=create_on_execute,
-   owner=params.webhcat_user,
-   mode=0755
-  )
-  
-if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-  params.HdfsResource(params.hcat_hdfs_user_dir,
-   type=directory,
-   action=create_on_execute,
-   owner=params.hcat_user,
-   mode=params.hcat_hdfs_user_mode
-  )
-params.HdfsResource(params.webhcat_hdfs_user_dir,
- type=directory,
- action=create_on_execute,
- owner=params.webhcat_user,
- mode=params.webhcat_hdfs_user_mode
-)
-  
-for src_filepath in glob.glob(params.hadoop_streaming_tar_source):
-  src_filename = os.path.basename(src_filepath)
-  
params.HdfsResource(InlineTemplate(params.hadoop_streaming_tar_destination_dir).get_content()
 + '/' + src_filename,
-  type=file,
-  action=create_on_execute,
-  source=src_filepath,
-  group=params.user_group,
-  mode=params.tarballs_mode
-  )
-  
-if (os.path.isfile(params.pig_tar_source)):
-  
params.HdfsResource(InlineTemplate(params.pig_tar_destination).get_content(),
-  type=file,
-  action=create_on_execute,
-  source=params.pig_tar_source,
-  group=params.user_group,
-  mode=params.tarballs_mode
-  )
-  
-
params.HdfsResource(InlineTemplate(params.hive_tar_destination).get_content(),
-type=file,
-action=create_on_execute,
-source=params.hive_tar_source,
-group=params.user_group,
-mode=params.tarballs_mode
-)
- 
-for src_filepath in glob.glob(params.sqoop_tar_source):
-  src_filename = os.path.basename(src_filepath)
-  
params.HdfsResource(InlineTemplate(params.sqoop_tar_destination_dir).get_content()
 + '/' + src_filename,
-  type=file,
-  action=create_on_execute,
-  source=src_filepath,
-  group=params.user_group,
-  mode=params.tarballs_mode
-  )
-  
-params.HdfsResource(params.hive_apps_whs_dir,
- type=directory,
-  action=create_on_execute,
-  owner=params.hive_user,
-  mode=0777
+params.HdfsDirectory(params.hive_apps_whs_dir,
+ action=create_delayed,
+ owner=params.hive_user,
+ mode=0777
 )
-params.HdfsResource(params.hive_hdfs_user_dir,
- type=directory,
-  action=create_on_execute,
-  owner=params.hive_user,
-  mode=params.hive_hdfs_user_mode
+params.HdfsDirectory(params.hive_hdfs_user_dir,
+ action=create_delayed,
+   

[1/8] ambari git commit: Revert AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

2015-05-11 Thread jonathanhurley
Repository: ambari
Updated Branches:
  refs/heads/trunk 661d143b9 - e833066e7


Revert AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

This reverts commit 0564f0c3ba2f82235925101a33c4316082c43e98.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/20161e62
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/20161e62
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/20161e62

Branch: refs/heads/trunk
Commit: 20161e62769797b4780df1faccd20ab8b6f9e6da
Parents: d2ebd9c
Author: Jonathan Hurley jhur...@hortonworks.com
Authored: Mon May 11 15:20:32 2015 -0400
Committer: Jonathan Hurley jhur...@hortonworks.com
Committed: Mon May 11 15:30:02 2015 -0400

--
 .../YARN/2.1.0.2.0/package/scripts/params_linux.py | 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/20161e62/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 8b5db38..7437e37 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -20,8 +20,11 @@ Ambari Agent
 
 import os
 
+ HEAD
 from resource_management.libraries.functions import conf_select
+===
 from resource_management import *
+ AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
@@ -98,12 +101,15 @@ if Script.is_hdp_stack_greater_or_equal(2.2):
   mapreduce_tar_source = 
config['configurations']['cluster-env']['mapreduce_tar_source']
   mapreduce_tar_destination = 
config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + 
/ + os.path.basename(mapreduce_tar_source)
 
+ HEAD
+===
   # the configuration direction for HDFS/YARN/MapR is the hadoop config
   # directory, which is symlinked by hadoop-client only
   hadoop_conf_dir = /usr/hdp/current/hadoop-client/conf
   tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
   tez_tar_destination = 
config['configurations']['cluster-env']['tez_tar_destination_folder'] + / + 
os.path.basename(tez_tar_source)
 
+ AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)
 
 limits_conf_dir = /etc/security/limits.d
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + 
yarn_container_bin



ambari git commit: AMBARI-11043. falcon client not initalizing for secure clusters (rlevas)

2015-05-11 Thread rlevas
Repository: ambari
Updated Branches:
  refs/heads/trunk e833066e7 - 78bc075ff


AMBARI-11043. falcon client not initalizing for secure clusters (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/78bc075f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/78bc075f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/78bc075f

Branch: refs/heads/trunk
Commit: 78bc075ffe8517a971a253dc1ce2762b58d1505a
Parents: e833066
Author: Robert Levas rle...@hortonworks.com
Authored: Mon May 11 16:02:33 2015 -0400
Committer: Robert Levas rle...@hortonworks.com
Committed: Mon May 11 16:02:33 2015 -0400

--
 .../main/resources/common-services/FALCON/0.5.0.2.1/kerberos.json | 3 +++
 1 file changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/78bc075f/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/kerberos.json
--
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/kerberos.json
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/kerberos.json
index 6f34933..df3ba34 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/kerberos.json
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/kerberos.json
@@ -22,6 +22,9 @@
   }
 }
   ],
+  auth_to_local_properties : [
+
falcon-startup.properties/*.falcon.http.authentication.kerberos.name.rules
+  ],
   components: [
 {
   name: FALCON_SERVER,



ambari git commit: AMBARI-11045 - IllegalStateException: Work already begun on this thread. (tbeerbower)

2015-05-11 Thread tbeerbower
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.maint fc304d985 - fc79361df


AMBARI-11045 - IllegalStateException: Work already begun on this thread. 
(tbeerbower)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fc79361d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fc79361d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fc79361d

Branch: refs/heads/branch-2.0.maint
Commit: fc79361dfb4b351ae3c0b95ba4df0aa9670a90df
Parents: fc304d9
Author: tbeerbower tbeerbo...@hortonworks.com
Authored: Mon May 11 16:32:01 2015 -0400
Committer: tbeerbower tbeerbo...@hortonworks.com
Committed: Mon May 11 16:33:33 2015 -0400

--
 .../server/controller/AmbariHandlerList.java| 105 ---
 .../controller/AmbariHandlerListTest.java   |  95 +
 2 files changed, 92 insertions(+), 108 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/fc79361d/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
index 607da10..4207007 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
@@ -17,23 +17,7 @@
  */
 package org.apache.ambari.server.controller;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import javax.inject.Inject;
-import javax.inject.Singleton;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
+import org.apache.ambari.server.api.AmbariPersistFilter;
 import org.apache.ambari.server.orm.entities.ViewEntity;
 import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
 import org.apache.ambari.server.view.ViewContextImpl;
@@ -52,6 +36,20 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.web.filter.DelegatingFilterProxy;
 
+import javax.inject.Inject;
+import javax.inject.Provider;
+import javax.inject.Singleton;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
 /**
  * An Ambari specific extension of the FailsafeHandlerList that allows for the 
addition
  * of view instances as handlers.
@@ -77,13 +75,23 @@ public class AmbariHandlerList extends HandlerCollection 
implements ViewInstance
   @Inject
   SessionManager sessionManager;
 
+  /**
+   * The web app context provider.
+   */
   @Inject
-  DelegatingFilterProxy springSecurityFilter;
+  ProviderWebAppContext webAppContextProvider;
 
   /**
-   * The Handler factory.
+   * The persistence filter.
*/
-  private final HandlerFactory handlerFactory;
+  @Inject
+  AmbariPersistFilter persistFilter;
+
+  /**
+   * The security filter.
+   */
+  @Inject
+  DelegatingFilterProxy springSecurityFilter;
 
   /**
* Mapping of view instance entities to handlers.
@@ -108,30 +116,6 @@ public class AmbariHandlerList extends HandlerCollection 
implements ViewInstance
*/
   public AmbariHandlerList() {
 super(true);
-this.handlerFactory = new HandlerFactory() {
-  @Override
-  public Handler create(ViewInstanceEntity viewInstanceDefinition, String 
webApp, String contextPath) {
-
-WebAppContext context = new WebAppContext(webApp, contextPath);
-
-
context.setClassLoader(viewInstanceDefinition.getViewEntity().getClassLoader());
-context.setAttribute(ViewContext.CONTEXT_ATTRIBUTE, new 
ViewContextImpl(viewInstanceDefinition, viewRegistry));
-context.setSessionHandler(new SharedSessionHandler(sessionManager));
-context.addFilter(new FilterHolder(springSecurityFilter), /*, 1);
-
-return context;
-  }
-};
-  }
-
-  /**
-   * Construct an AmbariHandlerList with the given handler factory.
-   *
-   * @param handlerFactory  the handler factory.
-   */
-  protected AmbariHandlerList(HandlerFactory handlerFactory) {
-super(true);
-this.handlerFactory = handlerFactory;
   }
 
 
@@ -241,8 +225,19 @@ public class AmbariHandlerList extends 

ambari git commit: AMBARI-11045 - IllegalStateException: Work already begun on this thread. (tbeerbower)

2015-05-11 Thread tbeerbower
Repository: ambari
Updated Branches:
  refs/heads/trunk 51475f2f9 - 4cf145e71


AMBARI-11045 - IllegalStateException: Work already begun on this thread. 
(tbeerbower)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4cf145e7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4cf145e7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4cf145e7

Branch: refs/heads/trunk
Commit: 4cf145e71b70e872e79d73c736065ae9e8167f72
Parents: 51475f2
Author: tbeerbower tbeerbo...@hortonworks.com
Authored: Mon May 11 16:32:01 2015 -0400
Committer: tbeerbower tbeerbo...@hortonworks.com
Committed: Mon May 11 16:32:13 2015 -0400

--
 .../server/controller/AmbariHandlerList.java| 105 ---
 .../controller/AmbariHandlerListTest.java   |  95 +
 2 files changed, 92 insertions(+), 108 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/4cf145e7/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
--
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
index 607da10..4207007 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariHandlerList.java
@@ -17,23 +17,7 @@
  */
 package org.apache.ambari.server.controller;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import javax.inject.Inject;
-import javax.inject.Singleton;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
+import org.apache.ambari.server.api.AmbariPersistFilter;
 import org.apache.ambari.server.orm.entities.ViewEntity;
 import org.apache.ambari.server.orm.entities.ViewInstanceEntity;
 import org.apache.ambari.server.view.ViewContextImpl;
@@ -52,6 +36,20 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.web.filter.DelegatingFilterProxy;
 
+import javax.inject.Inject;
+import javax.inject.Provider;
+import javax.inject.Singleton;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
 /**
  * An Ambari specific extension of the FailsafeHandlerList that allows for the 
addition
  * of view instances as handlers.
@@ -77,13 +75,23 @@ public class AmbariHandlerList extends HandlerCollection 
implements ViewInstance
   @Inject
   SessionManager sessionManager;
 
+  /**
+   * The web app context provider.
+   */
   @Inject
-  DelegatingFilterProxy springSecurityFilter;
+  ProviderWebAppContext webAppContextProvider;
 
   /**
-   * The Handler factory.
+   * The persistence filter.
*/
-  private final HandlerFactory handlerFactory;
+  @Inject
+  AmbariPersistFilter persistFilter;
+
+  /**
+   * The security filter.
+   */
+  @Inject
+  DelegatingFilterProxy springSecurityFilter;
 
   /**
* Mapping of view instance entities to handlers.
@@ -108,30 +116,6 @@ public class AmbariHandlerList extends HandlerCollection 
implements ViewInstance
*/
   public AmbariHandlerList() {
 super(true);
-this.handlerFactory = new HandlerFactory() {
-  @Override
-  public Handler create(ViewInstanceEntity viewInstanceDefinition, String 
webApp, String contextPath) {
-
-WebAppContext context = new WebAppContext(webApp, contextPath);
-
-
context.setClassLoader(viewInstanceDefinition.getViewEntity().getClassLoader());
-context.setAttribute(ViewContext.CONTEXT_ATTRIBUTE, new 
ViewContextImpl(viewInstanceDefinition, viewRegistry));
-context.setSessionHandler(new SharedSessionHandler(sessionManager));
-context.addFilter(new FilterHolder(springSecurityFilter), /*, 1);
-
-return context;
-  }
-};
-  }
-
-  /**
-   * Construct an AmbariHandlerList with the given handler factory.
-   *
-   * @param handlerFactory  the handler factory.
-   */
-  protected AmbariHandlerList(HandlerFactory handlerFactory) {
-super(true);
-this.handlerFactory = handlerFactory;
   }
 
 
@@ -241,8 +225,19 @@ public class AmbariHandlerList extends HandlerCollection 
implements 

ambari git commit: AMBARI-11046 Update gluster specific fields for oozie and hive

2015-05-11 Thread eboyd
Repository: ambari
Updated Branches:
  refs/heads/trunk 4cf145e71 - d05c9c287


AMBARI-11046 Update gluster specific fields for oozie and hive


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d05c9c28
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d05c9c28
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d05c9c28

Branch: refs/heads/trunk
Commit: d05c9c287151b4205e1bacad25bb6235b82a1d19
Parents: 4cf145e
Author: Erin A Boyd eb...@redhat.com
Authored: Mon May 11 16:30:21 2015 -0400
Committer: “Erin eb...@redhat.com
Committed: Mon May 11 16:37:04 2015 -0400

--
 .../services/HIVE/configuration/hive-site.xml   | 377 +++
 .../services/OOZIE/configuration/oozie-site.xml | 326 
 2 files changed, 703 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/d05c9c28/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HIVE/configuration/hive-site.xml
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HIVE/configuration/hive-site.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HIVE/configuration/hive-site.xml
new file mode 100644
index 000..6cfb4ec
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HIVE/configuration/hive-site.xml
@@ -0,0 +1,377 @@
+?xml version=1.0?
+?xml-stylesheet type=text/xsl href=configuration.xsl?
+!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the License); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an AS IS BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+--
+
+configuration supports_final=true
+
+  property
+namehive.heapsize/name
+value1024/value
+descriptionHive Java heap size/description
+  /property
+
+  property
+nameambari.hive.db.schema.name/name
+valuehive/value
+descriptionDatabase name used as the Hive Metastore/description
+  /property
+
+  property
+namejavax.jdo.option.ConnectionURL/name
+valuejdbc:mysql://localhost/hive?createDatabaseIfNotExist=true/value
+descriptionJDBC connect string for a JDBC metastore/description
+  /property
+
+  property
+namejavax.jdo.option.ConnectionDriverName/name
+valuecom.mysql.jdbc.Driver/value
+descriptionDriver class name for a JDBC metastore/description
+  /property
+
+  property
+namejavax.jdo.option.ConnectionUserName/name
+valuehive/value
+descriptionusername to use against metastore database/description
+  /property
+
+  property require-input=true
+namejavax.jdo.option.ConnectionPassword/name
+value /value
+property-typePASSWORD/property-type
+descriptionpassword to use against metastore database/description
+  /property
+
+  property
+namehive.metastore.warehouse.dir/name
+value/apps/hive/warehouse/value
+descriptionlocation of default database for the warehouse/description
+  /property
+
+  property
+namehive.metastore.sasl.enabled/name
+valuefalse/value
+descriptionIf true, the metastore thrift interface will be secured with 
SASL.
+ Clients must authenticate with Kerberos./description
+  /property
+
+  property
+namehive.metastore.cache.pinobjtypes/name
+valueTable,Database,Type,FieldSchema,Order/value
+descriptionList of comma separated metastore object types that should be 
pinned in the cache/description
+  /property
+
+  property
+namehive.metastore.uris/name
+valuethrift://localhost:9083/value
+descriptionURI for client to contact metastore server/description
+  /property
+
+  property
+namehive.metastore.pre.event.listeners/name
+
valueorg.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener/value
+descriptionPre-event listener classes to be loaded on the metastore side 
to run code
+  whenever databases, tables, and partitions are created, altered, or 
dropped.
+  Set to 
org.apache.hadoop.hive.ql.security.authorization.AuthorizationPreEventListener
+  if metastore-side authorization is desired./description
+  /property
+
+  property
+namehive.metastore.pre.event.listeners/name
+

ambari git commit: AMBARI-11042. Tez UI View: Support Auto Create and Cluster Association. (Sreenath Somarajapuram via yusaku)

2015-05-11 Thread yusaku
Repository: ambari
Updated Branches:
  refs/heads/trunk 21442eb8f - 7aecacbf7


AMBARI-11042. Tez UI View: Support Auto Create and Cluster Association. 
(Sreenath Somarajapuram via yusaku)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7aecacbf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7aecacbf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7aecacbf

Branch: refs/heads/trunk
Commit: 7aecacbf74e063c2039cfbfa3f973447ef8aa4bf
Parents: 21442eb
Author: Yusaku Sako yus...@hortonworks.com
Authored: Mon May 11 17:15:53 2015 -0700
Committer: Yusaku Sako yus...@hortonworks.com
Committed: Mon May 11 18:08:15 2015 -0700

--
 .../ambari/view/tez/ConfigurationService.java   | 50 ++
 .../apache/ambari/view/tez/PropertyService.java | 68 
 .../apache/ambari/view/tez/SettingService.java  | 47 ++
 .../resources/ui/scripts/init-ambari-view.js| 10 ++-
 contrib/views/tez/src/main/resources/view.xml   | 40 ++--
 5 files changed, 207 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/7aecacbf/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/ConfigurationService.java
--
diff --git 
a/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/ConfigurationService.java
 
b/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/ConfigurationService.java
new file mode 100644
index 000..9f3c18d
--- /dev/null
+++ 
b/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/ConfigurationService.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.tez;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriInfo;
+
+/**
+ * The tez configuration service.
+ */
+public class ConfigurationService extends PropertyService {
+
+  /**
+   * Handles: GET /.
+   *
+   * @param headers   http headers
+   * @param uiuri info
+   *
+   * @return value JSON representation
+   */
+  @GET
+  @Path(/)
+  @Produces({text/plain, application/json})
+  public Response getValue(@Context HttpHeaders headers, @Context UriInfo ui) {
+return Response.ok(getResponse(
+  ViewController.PARAM_YARN_ATS_URL,
+  ViewController.PARAM_YARN_RESOURCEMANAGER_URL)
+).build();
+  }
+} // end PropertyService

http://git-wip-us.apache.org/repos/asf/ambari/blob/7aecacbf/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/PropertyService.java
--
diff --git 
a/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/PropertyService.java
 
b/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/PropertyService.java
new file mode 100644
index 000..04803d9
--- /dev/null
+++ 
b/contrib/views/tez/src/main/java/org/apache/ambari/view/tez/PropertyService.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.tez;
+
+import org.apache.ambari.view.ViewContext;
+
+import javax.inject.Inject;
+import 

ambari git commit: AMBARI-11049. Some Hadoop Directory Parameters Are Wrong On Runnings Processes After Upgrade (ncole)

2015-05-11 Thread ncole
Repository: ambari
Updated Branches:
  refs/heads/trunk 66a4bfb26 - 3da48c232


AMBARI-11049. Some Hadoop Directory Parameters Are Wrong On Runnings Processes 
After Upgrade (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3da48c23
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3da48c23
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3da48c23

Branch: refs/heads/trunk
Commit: 3da48c232f08dea8f536356e0b0b8f6e25a0bf2b
Parents: 66a4bfb
Author: Nate Cole nc...@hortonworks.com
Authored: Mon May 11 13:31:13 2015 -0400
Committer: Nate Cole nc...@hortonworks.com
Committed: Mon May 11 17:43:46 2015 -0400

--
 .../libraries/functions/conf_select.py  | 57 ---
 .../1.6.1.2.2.0/package/scripts/params.py   |  3 +-
 .../0.5.0.2.1/package/scripts/params_linux.py   |  5 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |  3 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |  9 +--
 .../0.12.0.2.0/package/scripts/params_linux.py  |  2 +-
 .../0.12.0.2.0/package/scripts/status_params.py |  1 +
 .../MAHOUT/1.0.0.2.3/package/scripts/params.py  |  2 +-
 .../4.0.0.2.0/package/scripts/params_linux.py   |  8 +--
 .../0.12.0.2.0/package/scripts/params_linux.py  |  3 +-
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |  2 +-
 .../0.4.0.2.1/package/scripts/params_linux.py   |  3 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   | 10 +--
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |  7 +-
 .../2.0.6/hooks/before-ANY/scripts/params.py|  3 +-
 .../2.0.6/hooks/before-START/scripts/params.py  | 10 ++-
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 75 
 17 files changed, 149 insertions(+), 54 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
--
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 45888d7..63b474f 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -18,13 +18,21 @@ limitations under the License.
 
 
 
-__all__ = [select, create]
+__all__ = [select, create, get_hadoop_conf_dir, get_hadoop_dir]
 
 import version
 from resource_management.core import shell
+from resource_management.core.exceptions import Fail
 from resource_management.libraries.script.script import Script
 
 TEMPLATE = conf-select {0} --package {1} --stack-version {2} --conf-version 0
+HADOOP_DIR_TEMPLATE = /usr/hdp/{0}/{1}/{2}
+HADOOP_DIR_DEFAULTS = {
+  libexec: /usr/lib/hadoop/libexec,
+  sbin: /usr/lib/hadoop/sbin,
+  bin: /usr/bin,
+  lib: /usr/lib/hadoop/lib
+}
 
 def _valid(stack_name, package, ver):
   if stack_name != HDP:
@@ -35,6 +43,17 @@ def _valid(stack_name, package, ver):
 
   return True
 
+def _is_upgrade():
+  from resource_management.libraries.functions.default import default
+  direction = default(/commandParams/upgrade_direction, None)
+  stack_name = default(/hostLevelParams/stack_name, None)
+  ver = default(/commandParams/version, None)
+
+  if direction and stack_name and ver:
+return (stack_name, ver)
+
+  return None
+
 def create(stack_name, package, version):
   
   Creates a config version for the specified package
@@ -76,22 +95,40 @@ def get_hadoop_conf_dir():
   the configs are written in the correct place
   
 
-  config = Script.get_config()
   hadoop_conf_dir = /etc/hadoop/conf
 
   if Script.is_hdp_stack_greater_or_equal(2.2):
-from resource_management.libraries.functions.default import default
-
 hadoop_conf_dir = /usr/hdp/current/hadoop-client/conf
 
-direction = default(/commandParams/upgrade_direction, None)
-ver = default(/commandParams/version, None)
-stack_name = default(/hostLevelParams/stack_name, None)
+res = _is_upgrade()
 
-if direction and ver and stack_name and 
Script.is_hdp_stack_greater_or_equal(2.3):
-  select(stack_name, hadoop, ver)
-  hadoop_conf_dir = /usr/hdp/{0}/hadoop/conf.format(ver)
+if res is not None and Script.is_hdp_stack_greater_or_equal(2.3):
+  select(res[0], hadoop, res[1])
+  hadoop_conf_dir = /usr/hdp/{0}/hadoop/conf.format(res[1])
 
   return hadoop_conf_dir
 
+def get_hadoop_dir(target):
+  
+  Return the hadoop shared directory in the following override order
+  1. Use default for 2.1 and lower
+  2. If 2.2 and higher, use /usr/hdp/current/hadoop-client/{target}
+  3. If 2.2 and higher AND for an upgrade, use 
/usr/hdp/version/hadoop/{target}
+  :target: the target directory
+  

ambari git commit: AMBARI-11051: [WinTP2] Build choco package for ambari-metrics (jluniya)

2015-05-11 Thread jluniya
Repository: ambari
Updated Branches:
  refs/heads/trunk 54fd07325 - 6dfa24550


AMBARI-11051: [WinTP2] Build choco package for ambari-metrics (jluniya)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6dfa2455
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6dfa2455
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6dfa2455

Branch: refs/heads/trunk
Commit: 6dfa2455088182d4d5048c6f30aec042a518b3af
Parents: 54fd073
Author: Jayush Luniya jlun...@hortonworks.com
Authored: Mon May 11 17:39:34 2015 -0700
Committer: Jayush Luniya jlun...@hortonworks.com
Committed: Mon May 11 17:39:34 2015 -0700

--
 .../core/providers/__init__.py  |   3 +-
 .../core/providers/package/choco.py |  96 +
 .../libraries/script/script.py  |   8 +-
 ambari-metrics/ambari-metrics-assembly/pom.xml  | 204 ++-
 .../main/assembly/collector-windows-choco.xml   |  51 +
 .../src/main/assembly/collector-windows.xml |   8 +-
 .../src/main/assembly/monitor-windows-choco.xml |  51 +
 .../src/main/assembly/monitor-windows.xml   |   8 +-
 .../src/main/assembly/sink-windows-choco.xml|  51 +
 .../src/main/assembly/sink-windows.xml  |   6 +-
 .../collector/ambari-metrics-collector.nuspec   |  26 +++
 .../choco/collector/chocolateyinstall.ps1   |  78 +++
 .../choco/collector/chocolateyuninstall.ps1 |  69 +++
 .../choco/monitor/ambari-metrics-monitor.nuspec |  26 +++
 .../package/choco/monitor/chocolateyinstall.ps1 |  77 +++
 .../choco/monitor/chocolateyuninstall.ps1   |  69 +++
 .../sink/ambari-metrics-hadoop-sink.nuspec  |  26 +++
 .../package/choco/sink/chocolateyinstall.ps1|  75 +++
 .../package/choco/sink/chocolateyuninstall.ps1  |  69 +++
 .../src/main/package/msi/collector.wxs  |  70 ---
 .../src/main/package/msi/monitor.wxs|  70 ---
 .../src/main/package/msi/sink.wxs   |  63 --
 ambari-metrics/pom.xml  |   1 +
 ambari-server/src/main/python/setupAgent.py |   2 +-
 .../AMBARI_METRICS/0.1.0/metainfo.xml   |   6 +-
 25 files changed, 837 insertions(+), 376 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/6dfa2455/ambari-common/src/main/python/resource_management/core/providers/__init__.py
--
diff --git 
a/ambari-common/src/main/python/resource_management/core/providers/__init__.py 
b/ambari-common/src/main/python/resource_management/core/providers/__init__.py
index 0cefd12..abb51c3 100644
--- 
a/ambari-common/src/main/python/resource_management/core/providers/__init__.py
+++ 
b/ambari-common/src/main/python/resource_management/core/providers/__init__.py
@@ -58,7 +58,8 @@ PROVIDERS = dict(
 
ServiceConfig=resource_management.core.providers.windows.service.ServiceConfigProvider,
 
Execute=resource_management.core.providers.windows.system.ExecuteProvider,
 File=resource_management.core.providers.windows.system.FileProvider,
-
Directory=resource_management.core.providers.windows.system.DirectoryProvider
+
Directory=resource_management.core.providers.windows.system.DirectoryProvider,
+Package=resource_management.core.providers.package.choco.ChocoProvider
   ),
   default=dict(
 File=resource_management.core.providers.system.FileProvider,

http://git-wip-us.apache.org/repos/asf/ambari/blob/6dfa2455/ambari-common/src/main/python/resource_management/core/providers/package/choco.py
--
diff --git 
a/ambari-common/src/main/python/resource_management/core/providers/package/choco.py
 
b/ambari-common/src/main/python/resource_management/core/providers/package/choco.py
new file mode 100644
index 000..531372c
--- /dev/null
+++ 
b/ambari-common/src/main/python/resource_management/core/providers/package/choco.py
@@ -0,0 +1,96 @@
+
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+License); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an AS IS BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+
+
+from 

ambari git commit: AMBARI-11039. RU wrong version of tez.tar.gz uploaded to HDFS (alejandro)

2015-05-11 Thread alejandro
Repository: ambari
Updated Branches:
  refs/heads/branch-2.0.maint 15d3f0d29 - fc304d985


AMBARI-11039. RU wrong version of tez.tar.gz uploaded to HDFS (alejandro)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fc304d98
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fc304d98
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fc304d98

Branch: refs/heads/branch-2.0.maint
Commit: fc304d985e4ed0802b31c0c488344c8c3e5b9e85
Parents: 15d3f0d
Author: Alejandro Fernandez afernan...@hortonworks.com
Authored: Fri May 8 19:08:43 2015 -0700
Committer: Alejandro Fernandez afernan...@hortonworks.com
Committed: Mon May 11 11:12:49 2015 -0700

--
 .../dynamic_variable_interpretation.py  | 95 +---
 1 file changed, 61 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/fc304d98/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
--
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
index 1e70219..b32feb3 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/dynamic_variable_interpretation.py
@@ -20,11 +20,9 @@ limitations under the License.
 
 __all__ = [copy_tarballs_to_hdfs, ]
 import os
-import glob
 import re
 import tempfile
 import uuid
-from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.resources.copy_from_local import 
CopyFromLocal
 from resource_management.libraries.resources.execute_hadoop import 
ExecuteHadoop
@@ -33,6 +31,35 @@ from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
 from resource_management.core import shell
 
+
+# For a given stack, define a component, such as tez or hive, with a 2-tuple 
that defines
+# (a, b), where
+# a = source file to use
+# b = destination folder to copy file to in HDFS.
+# {{ hdp_stack_version }} is dynamically interpreted based on the version
+SOURCE_TO_DESTINATION = {HDP:
+  {
+tez:
+  (/usr/hdp/{{ hdp_stack_version }}/tez/lib/tez.tar.gz,
+   /hdp/apps/{{ hdp_stack_version }}/tez/),
+hive:
+  (/usr/hdp/{{ hdp_stack_version }}/hive/hive.tar.gz,
+   /hdp/apps/{{ hdp_stack_version }}/hive/),
+pig:
+  (/usr/hdp/{{ hdp_stack_version }}/pig/pig.tar.gz,
+   /hdp/apps/{{ hdp_stack_version }}/pig/),
+hadoop-streaming:
+  (/usr/hdp/{{ hdp_stack_version 
}}/hadoop-mapreduce/hadoop-streaming.jar,
+   /hdp/apps/{{ hdp_stack_version }}/mapreduce/),
+sqoop:
+  (/usr/hdp/{{ hdp_stack_version }}/sqoop/sqoop.tar.gz,
+   /hdp/apps/{{ hdp_stack_version }}/sqoop/),
+mapreduce:
+  (/usr/hdp/{{ hdp_stack_version }}/hadoop/mapreduce.tar.gz,
+   /hdp/apps/{{ hdp_stack_version }}/mapreduce/)
+  }
+}
+
 
 This file provides helper methods needed for the versioning of RPMs. 
Specifically, it does dynamic variable
 interpretation to replace strings like {{ hdp_stack_version }}  where the 
value of the
@@ -43,42 +70,37 @@ E.g., 998.2.2.1.0-998
 Please note that -${build_number} is optional.
 
 
-# These values must be the suffix of the properties in cluster-env.xml
-TAR_SOURCE_SUFFIX = _tar_source
-TAR_DESTINATION_FOLDER_SUFFIX = _tar_destination_folder
-
 
-def _get_tar_source_and_dest_folder(tarball_prefix):
+def _get_tar_source_and_dest_folder(stack_name, tarball_prefix):
   
+  :param stack_name: Stack name, such as HDP
   :param tarball_prefix: Prefix of the tarball must be one of tez, hive, mr, 
pig
-  :return: Returns a tuple of (x, y) after verifying the properties
+  :return: Returns a tuple of (source_file, destination_folder) after 
verifying the properties
   
-  component_tar_source_file = default(/configurations/cluster-env/%s%s % 
(tarball_prefix.lower(), TAR_SOURCE_SUFFIX), None)
-  # E.g., /usr/hdp/current/hadoop-client/tez-{{ hdp_stack_version }}.tar.gz
-
-  component_tar_destination_folder = 
default(/configurations/cluster-env/%s%s % (tarball_prefix.lower(), 
TAR_DESTINATION_FOLDER_SUFFIX), None)
-  # E.g., hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/
+  if stack_name is None or stack_name not in SOURCE_TO_DESTINATION:
+Logger.warning(Did not find stack_name %s in dictionary. % 
str(stack_name))
+return None, None
 
-  if not component_tar_source_file or not component_tar_destination_folder:
-