[2/2] ambari git commit: AMBARI-11709. Heatmaps are not getting displayed. (jaimin)
AMBARI-11709. Heatmaps are not getting displayed. (jaimin) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/368ee72f Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/368ee72f Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/368ee72f Branch: refs/heads/trunk Commit: 368ee72fa45e82c878d9fbc51fa755fe1b44b9cd Parents: 3afdc67 Author: Jaimin Jetly Authored: Thu Jun 4 17:27:04 2015 -0700 Committer: Jaimin Jetly Committed: Thu Jun 4 22:38:24 2015 -0700 -- ambari-web/app/mixins/common/widgets/widget_mixin.js | 7 --- 1 file changed, 4 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/368ee72f/ambari-web/app/mixins/common/widgets/widget_mixin.js -- diff --git a/ambari-web/app/mixins/common/widgets/widget_mixin.js b/ambari-web/app/mixins/common/widgets/widget_mixin.js index c48aecb..95c7555 100644 --- a/ambari-web/app/mixins/common/widgets/widget_mixin.js +++ b/ambari-web/app/mixins/common/widgets/widget_mixin.js @@ -298,13 +298,13 @@ App.WidgetMixin = Ember.Mixin.create({ }, /** - * make GET call to get host component metrics accross + * make GET call to get metrics value for all host components * @param {object} request * @return {$.ajax} */ getHostComponentsMetrics: function (request) { request.metric_paths.forEach(function (_metric, index) { - request.metric_paths[index] = "host_components/" + _metric; + request.metric_paths[index] = "host_components/" + _metric.metric_path; }); return App.ajax.send({ name: 'widgets.serviceComponent.metrics.get', @@ -334,11 +334,12 @@ App.WidgetMixin = Ember.Mixin.create({ }, getHostsMetrics: function (request) { +var metricPaths = request.metric_paths.mapProperty('metric_path'); return App.ajax.send({ name: 'widgets.hosts.metrics.get', sender: this, data: { -metricPaths: request.metric_paths.join(',') +metricPaths: metricPaths.join(',') }, success: 'getHostsMetricsSuccessCallback' });
[1/2] ambari git commit: AMBARI-11710. Some widgets and Heatmap for YARN and HDFS does not show for HDP 2.2 . (jaimin)
Repository: ambari Updated Branches: refs/heads/trunk 3afdc672a -> 3bb8af2bb AMBARI-11710. Some widgets and Heatmap for YARN and HDFS does not show for HDP 2.2 . (jaimin) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3bb8af2b Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3bb8af2b Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3bb8af2b Branch: refs/heads/trunk Commit: 3bb8af2bb11f94a4134d7dc98f3c34e881640a53 Parents: 368ee72 Author: Jaimin Jetly Authored: Thu Jun 4 22:38:11 2015 -0700 Committer: Jaimin Jetly Committed: Thu Jun 4 22:38:24 2015 -0700 -- .../HBASE/0.96.0.2.0/widgets.json | 10 +- .../common-services/HDFS/2.1.0.2.0/widgets.json | 192 -- .../YARN/2.1.0.2.0/YARN_widgets.json| 91 +-- .../stacks/HDP/2.3/services/HDFS/widgets.json | 620 + .../stacks/HDP/2.3/services/YARN/widgets.json | 676 +++ 5 files changed, 1302 insertions(+), 287 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/3bb8af2b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/widgets.json -- diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/widgets.json b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/widgets.json index aaf3b9e..9b45034 100644 --- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/widgets.json +++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/widgets.json @@ -382,7 +382,7 @@ "section_name": "HBASE_HEATMAPS", "widgetLayoutInfo": [ { - "widget_name": "HBASE_COMPACTION_QUEUE_SIZE", + "widget_name": "HBase Compaction Queue Size", "description": "", "widget_type": "HEATMAP", "is_visible": true, @@ -406,7 +406,7 @@ } }, { - "widget_name": "HBASE_MEMSTORE_SIZES", + "widget_name": "HBase Memstore Sizes", "description": "", "widget_type": "HEATMAP", "is_visible": false, @@ -430,7 +430,7 @@ } }, { - "widget_name": "HBASE_READ_REQUEST", + "widget_name": "HBase Read Request Count", "description": "", "widget_type": "HEATMAP", "is_visible": false, @@ -453,7 +453,7 @@ } }, { - "widget_name": "HBASE_WRITE_REQUEST", + "widget_name": "HBase Write Request Count", "description": "", "widget_type": "HEATMAP", "is_visible": false, @@ -476,7 +476,7 @@ } }, { - "widget_name": "HBASE_REGIONS", + "widget_name": "HBase Regions", "description": "", "widget_type": "HEATMAP", "is_visible": false, http://git-wip-us.apache.org/repos/asf/ambari/blob/3bb8af2b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json -- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json index 580b129..7e93a6e 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/widgets.json @@ -247,54 +247,6 @@ } }, { - "widget_name": "NameNode Operations", - "description": "Total number of file operation over time.", - "widget_type": "GRAPH", - "is_visible": false, - "metrics": [ -{ - "name": "dfs.namenode.TotalFileOps", - "metric_path": "metrics/dfs/namenode/TotalFileOps", - "service_name": "HDFS", - "component_name": "NAMENODE", - "host_component_criteria": "host_components/metrics/dfs/FSNamesystem/HAState=active" -} - ], - "values": [ -{ - "name": "NameNode File Operations", - "value": "${dfs.namenode.TotalFileOps}" -} - ], - "properties": { -"graph_type": "LINE", -"time_range": "1" - } -}, -{ - "widget_name": "Failed disk volumes", - "description": "Number of Failed disk volumes across all DataNodes. Its indicative of HDFS bad health.", - "widget_type": "NUMBER", - "is_visible": true, - "metrics": [ -{ - "name": "FSDatasetState.org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.NumFailedVolumes._sum",
[1/2] ambari git commit: AMBARI-11671. Remove SetGoalState when stopping Accumulo components (Billie Rinaldi via smohanty)
Repository: ambari Updated Branches: refs/heads/trunk cbcadd2c3 -> 3afdc672a AMBARI-11671. Remove SetGoalState when stopping Accumulo components (Billie Rinaldi via smohanty) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/56876e2a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/56876e2a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/56876e2a Branch: refs/heads/trunk Commit: 56876e2a9f7df59c15469183ee6f03a30023d470 Parents: cbcadd2 Author: Sumit Mohanty Authored: Thu Jun 4 19:46:11 2015 -0700 Committer: Sumit Mohanty Committed: Thu Jun 4 19:46:11 2015 -0700 -- .../ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_service.py| 5 - 1 file changed, 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/56876e2a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_service.py -- diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_service.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_service.py index f1385e0..0c35c98 100644 --- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_service.py +++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_service.py @@ -60,11 +60,6 @@ def accumulo_service( name, timeout=30, user=params.accumulo_user ) -elif name != 'monitor': - Execute(format("{daemon_script} org.apache.accumulo.master.state.SetGoalState SAFE_MODE"), - not_if=pid_exists, - user=params.accumulo_user - ) except: pass
[2/2] ambari git commit: AMBARI-11671. Accumulo GC has port collision with SecondaryNameNode (Billie Rinaldi via smohanty)
AMBARI-11671. Accumulo GC has port collision with SecondaryNameNode (Billie Rinaldi via smohanty) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3afdc672 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3afdc672 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3afdc672 Branch: refs/heads/trunk Commit: 3afdc672a6ffa752462f3d5d17571d13dcdbfaa1 Parents: 56876e2 Author: Sumit Mohanty Authored: Thu Jun 4 19:46:56 2015 -0700 Committer: Sumit Mohanty Committed: Thu Jun 4 19:46:56 2015 -0700 -- .../resources/common-services/ACCUMULO/1.6.1.2.2.0/alerts.json | 2 +- .../ACCUMULO/1.6.1.2.2.0/configuration/accumulo-site.xml | 2 +- ambari-server/src/test/python/stacks/2.2/configs/default.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/3afdc672/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/alerts.json -- diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/alerts.json b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/alerts.json index ac09dc6..fb2cf76 100644 --- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/alerts.json +++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/alerts.json @@ -64,7 +64,7 @@ "source": { "type": "PORT", "uri": "{{accumulo-site/gc.port.client}}", - "default_port": 50091, + "default_port": 50092, "reporting": { "ok": { "text": "TCP OK - {0:.3f}s response on port {1}" http://git-wip-us.apache.org/repos/asf/ambari/blob/3afdc672/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/configuration/accumulo-site.xml -- diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/configuration/accumulo-site.xml b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/configuration/accumulo-site.xml index 622ec5a..2a7e083 100644 --- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/configuration/accumulo-site.xml +++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/configuration/accumulo-site.xml @@ -104,7 +104,7 @@ gc.port.client -50091 +50092 Port for monitoring the Accumulo garbage collector. http://git-wip-us.apache.org/repos/asf/ambari/blob/3afdc672/ambari-server/src/test/python/stacks/2.2/configs/default.json -- diff --git a/ambari-server/src/test/python/stacks/2.2/configs/default.json b/ambari-server/src/test/python/stacks/2.2/configs/default.json index 0dc9ad0..ee8fbc4 100644 --- a/ambari-server/src/test/python/stacks/2.2/configs/default.json +++ b/ambari-server/src/test/python/stacks/2.2/configs/default.json @@ -358,7 +358,7 @@ "tserver.memory.maps.native.enabled": "true", "general.classpaths": "\n$ACCUMULO_HOME/lib/accumulo-server.jar,\n$ACCUMULO_HOME/lib/accumulo-core.jar,\n$ACCUMULO_HOME/lib/accumulo-start.jar,\n$ACCUMULO_HOME/lib/accumulo-fate.jar,\n$ACCUMULO_HOME/lib/accumulo-proxy.jar,\n$ACCUMULO_HOME/lib/[^.].*.jar,\n$ZOOKEEPER_HOME/zookeeper[^.].*.jar,\n$HADOOP_CONF_DIR,\n/usr/hdp/current/hadoop-client/[^.].*.jar,\n/usr/hdp/current/hadoop-client/lib/(?!slf4j)[^.].*.jar,\n/usr/hdp/current/hadoop-hdfs-client/[^.].*.jar,\n/usr/hdp/current/hadoop-mapreduce-client/[^.].*.jar,\n/usr/hdp/current/hadoop-yarn-client/[^.].*.jar,\n/usr/hdp/current/hadoop-yarn-client/lib/jersey.*.jar,\n/usr/hdp/current/hive-client/lib/hive-accumulo-handler.jar,", "monitor.port.log4j": "4560", -"gc.port.client": "50091", +"gc.port.client": "50092", "tserver.memory.maps.max": "1G", "tserver.sort.buffer.size": "200M", "tserver.cache.index.size": "256M",
ambari git commit: AMBARI-11704. Mahout service check fails due to bad folder permission for /user/ambari-qa
Repository: ambari Updated Branches: refs/heads/trunk ef9a456a7 -> cbcadd2c3 AMBARI-11704. Mahout service check fails due to bad folder permission for /user/ambari-qa Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cbcadd2c Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cbcadd2c Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cbcadd2c Branch: refs/heads/trunk Commit: cbcadd2c3e1006eece7ff7f45122cf606cc2160b Parents: ef9a456 Author: Sumit Mohanty Authored: Thu Jun 4 19:35:18 2015 -0700 Committer: Sumit Mohanty Committed: Thu Jun 4 19:35:18 2015 -0700 -- .../2.1.0.2.0/package/scripts/hdfs_namenode.py| 5 - .../python/stacks/2.0.6/HDFS/test_namenode.py | 18 +- 2 files changed, 13 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/cbcadd2c/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py -- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py index eec864c..d26d145 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py @@ -90,7 +90,10 @@ def namenode(action=None, do_format=True, rolling_restart=False, env=None): user = params.hdfs_user) is_namenode_safe_mode_off = format("hadoop dfsadmin -fs {namenode_address} -safemode get | grep 'Safe mode is OFF'") -is_active_namenode_cmd = as_user(format("hdfs --config {hadoop_conf_dir} haadmin -getServiceState {namenode_id} | grep active"), params.hdfs_user, env={'PATH':params.hadoop_bin_dir}) +if params.dfs_ha_enabled: + is_active_namenode_cmd = as_user(format("hdfs --config {hadoop_conf_dir} haadmin -getServiceState {namenode_id} | grep active"), params.hdfs_user, env={'PATH':params.hadoop_bin_dir}) +else: + is_active_namenode_cmd = None # During normal operations, if HA is enabled and it is in standby, then stay in current state, otherwise, leave safemode. # During Rolling Upgrade, both namenodes must leave safemode. http://git-wip-us.apache.org/repos/asf/ambari/blob/cbcadd2c/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py -- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py index ddab048..b920c17 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py @@ -101,7 +101,7 @@ class TestNamenode(RMFTestCase): ) self.assertResourceCalled('HdfsResource', '/tmp', security_enabled = False, -only_if="ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState None | grep active'", +only_if=None, keytab = UnknownConfigurationMock(), hadoop_bin_dir = '/usr/bin', default_fs = 'wasb://a...@c6401.ambari.apache.org', @@ -117,7 +117,7 @@ class TestNamenode(RMFTestCase): ) self.assertResourceCalled('HdfsResource', '/user/ambari-qa', security_enabled = False, -only_if="ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState None | grep active'", +only_if=None, keytab = UnknownConfigurationMock(), hadoop_bin_dir = '/usr/bin', default_fs = 'wasb://a...@c6401.ambari.apache.org', @@ -133,7 +133,7 @@ class TestNamenode(RMFTestCase): ) self.assertResourceCalled('HdfsResource', None, security_enabled = False, -only_if="ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState None | grep active'", +only_if=None, keytab = UnknownConfigurationMock(), hadoop_bin_dir = '/usr/bin', default_fs = 'wasb://a...@c6401.ambari.apache.org', @@ -217,7 +217,7 @@ class TestNamenode(RMFTestCase): ) self.assertResourceCalled('HdfsResource', '/tmp', security_enabled = False, -only_if = "ambari-sudo.sh su hdfs -l -s /bin/bash -c 'export PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState None | grep active'", +only_if = None, keytab = UnknownConfigurationMock(),
ambari git commit: AMBARI-11699 - Removing A Host Does Not Recalculate Aggregate Alerts (jonathanhurley)
Repository: ambari Updated Branches: refs/heads/trunk d99abd01f -> ef9a456a7 AMBARI-11699 - Removing A Host Does Not Recalculate Aggregate Alerts (jonathanhurley) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ef9a456a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ef9a456a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ef9a456a Branch: refs/heads/trunk Commit: ef9a456a7687d0478ba49cc4e50b1877704be304 Parents: d99abd0 Author: Jonathan Hurley Authored: Thu Jun 4 15:44:54 2015 -0400 Committer: Jonathan Hurley Committed: Thu Jun 4 20:12:55 2015 -0400 -- .../events/AggregateAlertRecalculateEvent.java | 47 + .../alerts/AlertAggregateListener.java | 49 - .../alerts/AlertDefinitionDisabledListener.java | 9 + .../alerts/AlertHashInvalidationListener.java | 5 +- .../alerts/AlertLifecycleListener.java | 6 + .../alerts/AlertMaintenanceModeListener.java| 2 + .../AlertServiceComponentHostListener.java | 11 +- .../alerts/AlertServiceStateListener.java | 11 +- .../alerts/AlertStateChangedListener.java | 2 + .../apache/ambari/server/orm/dao/AlertsDAO.java | 179 --- .../state/alert/AggregateDefinitionMapping.java | 35 .../ambari/server/orm/dao/AlertsDAOTest.java| 8 +- .../alerts/AlertStateChangedEventTest.java | 28 ++- .../state/cluster/AlertDataManagerTest.java | 13 +- 14 files changed, 319 insertions(+), 86 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/ef9a456a/ambari-server/src/main/java/org/apache/ambari/server/events/AggregateAlertRecalculateEvent.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/AggregateAlertRecalculateEvent.java b/ambari-server/src/main/java/org/apache/ambari/server/events/AggregateAlertRecalculateEvent.java new file mode 100644 index 000..4c9641d --- /dev/null +++ b/ambari-server/src/main/java/org/apache/ambari/server/events/AggregateAlertRecalculateEvent.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.server.events; + + +/** + * The {@link AggregateAlertRecalculateEvent} is used to trigger the + * recalculation of all aggregate alerts. + */ +public class AggregateAlertRecalculateEvent extends AlertEvent { + + /** + * Constructor. + * + * @param clusterId + * the ID of the cluster to recalculate aggregate alerts for. + */ + public AggregateAlertRecalculateEvent(long clusterId) { +super(clusterId, null); + } + + /** + * {@inheritDoc} + */ + @Override + public String toString() { +StringBuilder buffer = new StringBuilder("AggregateAlertRecalculateEvent{ "); +buffer.append("cluserId=").append(m_clusterId); +buffer.append("}"); +return buffer.toString(); + } +} http://git-wip-us.apache.org/repos/asf/ambari/blob/ef9a456a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertAggregateListener.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertAggregateListener.java b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertAggregateListener.java index 99542ef..4d2add1 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertAggregateListener.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/events/listeners/alerts/AlertAggregateListener.java @@ -18,8 +18,10 @@ package org.apache.ambari.server.events.listeners.alerts; import java.text.MessageFormat; +import java.util.List; import org.apache.ambari.server.EagerSingleton; +import org.apache.ambari.server.events.AggregateAlertRecalculateEvent; import org.apache.ambari.server.events.AlertReceivedEvent; import org.apache.ambari.server.events.AlertStateChangeEvent; import org.apache.am
ambari git commit: AMBARI-11677. HDP-2.3 stack pig.properties should be updated (srimanth)
Repository: ambari Updated Branches: refs/heads/trunk 7ff1bbeff -> d99abd01f AMBARI-11677. HDP-2.3 stack pig.properties should be updated (srimanth) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d99abd01 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d99abd01 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d99abd01 Branch: refs/heads/trunk Commit: d99abd01fa714f52ae8b9b2936571f58bf7145db Parents: 7ff1bbe Author: Srimanth Gunturi Authored: Thu Jun 4 15:45:02 2015 -0700 Committer: Srimanth Gunturi Committed: Thu Jun 4 17:03:01 2015 -0700 -- .../PIG/configuration/pig-properties.xml| 634 +++ 1 file changed, 634 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/d99abd01/ambari-server/src/main/resources/stacks/HDP/2.3/services/PIG/configuration/pig-properties.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/PIG/configuration/pig-properties.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/PIG/configuration/pig-properties.xml new file mode 100644 index 000..c18e25a --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/PIG/configuration/pig-properties.xml @@ -0,0 +1,634 @@ + + + + + + + +content + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Pig configuration file. All values can be overwritten by command line +# arguments; for a description of the properties, run +# +# pig -h properties +# + + +# +# == Logging properties +# + +# Location of pig log file. If blank, a file with a timestamped slug +# ('pig_1399336559369.log') will be generated in the current working directory. +# +# pig.logfile= +# pig.logfile=/tmp/pig-err.log + +# Log4j configuration file. Set at runtime with the -4 parameter. The source +# distribution has a ./conf/log4j.properties.template file you can rename and +# customize. +# +# log4jconf=./conf/log4j.properties + +# Verbose Output. +# * false (default): print only INFO and above to screen +# * true: Print all log messages to screen +# +# verbose=false + +# Omit timestamps on log messages. (default: false) +# +# brief=false + +# Logging level. debug=OFF|ERROR|WARN|INFO|DEBUG (default: INFO) +# +# debug=INFO + +# Roll up warnings across tasks, so that when millions of mappers suddenly cry +# out in error they are partially silenced. (default, recommended: true) +# +# aggregate.warning=true + +# Should DESCRIBE pretty-print its schema? +# * false (default): print on a single-line, suitable for pasting back in to your script +# * true (recommended): prints on multiple lines with indentation, much more readable +# +# pig.pretty.print.schema=false + +# === Profiling UDFs === + +# Turn on UDF timers? This will cause two counters to be +# tracked for every UDF and LoadFunc in your script: approx_microsecs measures +# approximate time spent inside a UDF approx_invocations reports the approximate +# number of times the UDF was invoked. +# +# * false (default): do not record timing information of UDFs. +# * true: report UDF performance. Uses more counters, but gives more insight +# into script operation +# +# pig.udf.profile=false + +# Specify frequency of profiling (default: every 100th). +# pig.udf.profile.frequency=100 + + +# +# == Site-specific Properties +# + +# Execution Mode. Local mode is much faster, but only suitable for small amounts +# of data. Local mode interprets paths on the local file system; Mapreduce mode +# on the HDFS. Read more under 'Execution Modes' within the Getting Started +# documentation. +# +# * mapreduce (default): use the Hadoop cluster defined in your Hadoop config files +# * local: use local mode +# * tez: use Tez on Hadoop cluster +# * tez_local: use Tez local mode +# +# exectype=mapreduce + +# Bootstrap file with default statements t
ambari git commit: AMBARI-8504 Configuration parameter 'io.compression.codecs' missing in HDP 2.3.GlusterFS stack in ambari-2.1
Repository: ambari Updated Branches: refs/heads/trunk ec16c5f7c -> 7ff1bbeff AMBARI-8504 Configuration parameter 'io.compression.codecs' missing in HDP 2.3.GlusterFS stack in ambari-2.1 Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7ff1bbef Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7ff1bbef Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7ff1bbef Branch: refs/heads/trunk Commit: 7ff1bbeff1167c1d980e2eacdfa2bb42af1c6172 Parents: ec16c5f Author: Scott Creeley Authored: Thu Jun 4 17:21:38 2015 -0400 Committer: Scott Creeley Committed: Thu Jun 4 19:57:36 2015 -0400 -- .../services/HIVE/configuration/hive-site.xml | 8 +- .../services/TEZ/configuration/tez-site.xml | 98 .../2.3.GlusterFS/services/TEZ/kerberos.json| 19 3 files changed, 123 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/7ff1bbef/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HIVE/configuration/hive-site.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HIVE/configuration/hive-site.xml index 6a4e349..c972ee5 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HIVE/configuration/hive-site.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/HIVE/configuration/hive-site.xml @@ -161,7 +161,6 @@ limitations under the License. Enable user impersonation for HiveServer2 true - hive.server2.authentication Authentication mode, default NONE. Options are NONE, NOSASL, KERBEROS, LDAP, PAM and CUSTOM @@ -384,6 +383,11 @@ limitations under the License. +hive.server2.authentication +Authentication mode, default NONE. Options are NONE, NOSASL, KERBEROS, LDAP, PAM and CUSTOM +NONE + + hive.server2.authentication.ldap.url @@ -395,7 +399,7 @@ limitations under the License. hive.server2.authentication.ldap.baseDN -NONE + hive-site http://git-wip-us.apache.org/repos/asf/ambari/blob/7ff1bbef/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/TEZ/configuration/tez-site.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/TEZ/configuration/tez-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/TEZ/configuration/tez-site.xml new file mode 100644 index 000..be534f1 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/TEZ/configuration/tez-site.xml @@ -0,0 +1,98 @@ + + + + + +tez.am.view-acls +* +AM view ACLs. This allows the specified users/groups to view the status of the AM and all DAGs that run within this AM. + Value format: Comma separated list of users, followed by whitespace, followed by a comma separated list of groups. + + + + +tez.runtime.optimize.local.fetch +true +If the shuffle input is on the local host bypass the http fetch and access the files directly. + + boolean + + + + +tez.task.generate.counters.per.io +true +Whether to generate counters on a per-edge basis for a Tez DAG. Helpful for in-depth analysis. + + boolean + + + + +tez.runtime.sorter.class +PIPELINED +Which sorter implementation to use + + value-list + + + PIPELINED + Pipelined Sorter + + + LEGACY + Legacy Sorter + + + 1 + + + + +tez.runtime.pipelined.sorter.sort.threads +2 +Tez runtime pipelined sorter sort threads + + int + + + +tez-site +tez.runtime.sorter.class + + + + + +tez.runtime.io.sort.mb +272 +The size of the sort buffer when output needs to be sorted + + int + MB + + + +tez-site +tez.runtime.sorter.class + + + + + http://git-wip-us.apache.org/repos/asf/ambari/blob/7ff1bbef/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/TEZ/kerberos.json -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/TEZ/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/TEZ/kerberos.json new file mode 100644 index 000..3662ed8 --- /dev/null +++ b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/TEZ/kerberos
ambari git commit: AMBARI-11706: Remove gzip tool dependency on Windows (jluniya)
Repository: ambari Updated Branches: refs/heads/trunk b9f794d47 -> ec16c5f7c AMBARI-11706: Remove gzip tool dependency on Windows (jluniya) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ec16c5f7 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ec16c5f7 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ec16c5f7 Branch: refs/heads/trunk Commit: ec16c5f7c3e96f6800fa4f5594abac9247edb029 Parents: b9f794d Author: Jayush Luniya Authored: Thu Jun 4 16:48:16 2015 -0700 Committer: Jayush Luniya Committed: Thu Jun 4 16:48:16 2015 -0700 -- ambari-web/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/ec16c5f7/ambari-web/pom.xml -- diff --git a/ambari-web/pom.xml b/ambari-web/pom.xml index b616961..8f8b191 100644 --- a/ambari-web/pom.xml +++ b/ambari-web/pom.xml @@ -272,7 +272,7 @@ \ cmd /C brunch -gzip-content.cmd +${basedir}\gzip-content.cmd cmd /C mkdir cmd
ambari git commit: AMBARI-11706: Remove gzip tool dependency on Windows (jluniya)
Repository: ambari Updated Branches: refs/heads/trunk fb25e19f6 -> b9f794d47 AMBARI-11706: Remove gzip tool dependency on Windows (jluniya) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b9f794d4 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b9f794d4 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b9f794d4 Branch: refs/heads/trunk Commit: b9f794d4763f2f9a992ae403e63fe0eb76297832 Parents: fb25e19 Author: Jayush Luniya Authored: Thu Jun 4 16:16:52 2015 -0700 Committer: Jayush Luniya Committed: Thu Jun 4 16:16:52 2015 -0700 -- ambari-web/gzip-content.cmd | 17 + ambari-web/gzip-content.ps1 | 81 ambari-web/pom.xml | 2 +- 3 files changed, 99 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/b9f794d4/ambari-web/gzip-content.cmd -- diff --git a/ambari-web/gzip-content.cmd b/ambari-web/gzip-content.cmd new file mode 100644 index 000..aa50571 --- /dev/null +++ b/ambari-web/gzip-content.cmd @@ -0,0 +1,17 @@ +@echo off +rem Licensed to the Apache Software Foundation (ASF) under one or more +rem contributor license agreements. See the NOTICE file distributed with +rem this work for additional information regarding copyright ownership. +rem The ASF licenses this file to You under the Apache License, Version 2.0 +rem (the "License"); you may not use this file except in compliance with +rem the License. You may obtain a copy of the License at +rem +rem http://www.apache.org/licenses/LICENSE-2.0 +rem +rem Unless required by applicable law or agreed to in writing, software +rem distributed under the License is distributed on an "AS IS" BASIS, +rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +rem See the License for the specific language governing permissions and +rem limitations under the License. + +powershell -File gzip-content.ps1 %* < NUL \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/b9f794d4/ambari-web/gzip-content.ps1 -- diff --git a/ambari-web/gzip-content.ps1 b/ambari-web/gzip-content.ps1 new file mode 100644 index 000..a6042a1 --- /dev/null +++ b/ambari-web/gzip-content.ps1 @@ -0,0 +1,81 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Stop on all errors +$ErrorActionPreference = 'Stop'; + +Function Gzip-File{ + Param( +$inFile, +$outFile = ($inFile + ".gz"), +$force = $false +) + if(-not (Test-Path $inFile)) { +Write-Host "$inFile does not exist" +return $false + } + if((Test-Path $outFile)) { +if(-not $force) { + Write-Host "$outFile already exists" + return $true +} else { + Remove-Item $outFile +} + } + $inputStream = New-Object System.IO.FileStream $inFile, ([IO.FileMode]::Open), ([IO.FileAccess]::Read), ([IO.FileShare]::Read) + $outputStream = New-Object System.IO.FileStream $outFile, ([IO.FileMode]::Create), ([IO.FileAccess]::Write), ([IO.FileShare]::None) + $gzipStream = New-Object System.IO.Compression.GzipStream $outputStream, ([IO.Compression.CompressionMode]::Compress) + + $buffer = New-Object byte[](1024) + while($true){ +$read = $inputStream.Read($buffer, 0, 1024) +if ($read -le 0){break} +$gzipStream.Write($buffer, 0, $read) + } + $gzipStream.Close() + $outputStream.Close() + $inputStream.Close() + Remove-Item $inFile + return $true +} + +$errorFound = $false +$files = @() +$force = $false +ForEach ($arg in $args) { + if($arg -eq "-f" -or $arg -eq "--force") { +$force = $true +continue + } + $files += $arg +} + +ForEach ($file in $files) { + $input = $file + $output = $file + ".gz"; + Write-Host "Running: Gzip-File $input $output $force" + $success = Gzip-File $input $output $force + if(-not $success) { +$errorFound = $true + } +} + +if ($errorFound) { + throw "Failed to gzip all files!"
ambari git commit: AMBARI-11688. Install services request executes 40 seconds. (mpapirkovskyy)
Repository: ambari Updated Branches: refs/heads/trunk e92007d04 -> fb25e19f6 AMBARI-11688. Install services request executes 40 seconds. (mpapirkovskyy) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fb25e19f Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fb25e19f Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fb25e19f Branch: refs/heads/trunk Commit: fb25e19f69549fa129c6370408c5f1b09b0097ce Parents: e92007d Author: Myroslav Papirkovskyy Authored: Tue Jun 2 21:30:39 2015 +0300 Committer: Myroslav Papirkovskyy Committed: Tue Jun 2 21:30:39 2015 +0300 -- .../actionmanager/ActionDBAccessorImpl.java | 2 +- .../server/actionmanager/HostRoleCommand.java | 14 + .../actionmanager/HostRoleCommandFactory.java | 13 .../HostRoleCommandFactoryImpl.java | 17 +++ .../ambari/server/actionmanager/Stage.java | 31 .../AmbariManagementControllerImpl.java | 4 +-- .../internal/ServiceResourceProvider.java | 3 +- .../orm/entities/HostRoleCommandEntity.java | 4 +++ .../org/apache/ambari/server/state/Host.java| 5 .../server/state/ServiceComponentHost.java | 6 .../ambari/server/state/host/HostImpl.java | 5 .../svccomphost/ServiceComponentHostImpl.java | 5 .../stacks/HDPWIN/2.1/services/stack_advisor.py | 10 +++ 13 files changed, 109 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/fb25e19f/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java index c3dfdb5..959ed2d 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ActionDBAccessorImpl.java @@ -262,7 +262,7 @@ public class ActionDBAccessorImpl implements ActionDBAccessor { HostRoleCommandEntity hostRoleCommandEntity = hostRoleCommand.constructNewPersistenceEntity(); hostRoleCommandEntity.setStage(stageEntity); -HostEntity hostEntity = hostDAO.findByName(hostRoleCommandEntity.getHostName()); +HostEntity hostEntity = hostDAO.findById(hostRoleCommandEntity.getHostId()); if (hostEntity == null) { String msg = String.format("Host %s doesn't exist in database", hostRoleCommandEntity.getHostName()); LOG.error(msg); http://git-wip-us.apache.org/repos/asf/ambari/blob/fb25e19f/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/HostRoleCommand.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/HostRoleCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/HostRoleCommand.java index 871ce30..20ec9ea 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/HostRoleCommand.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/HostRoleCommand.java @@ -25,6 +25,7 @@ import org.apache.ambari.server.orm.dao.HostDAO; import org.apache.ambari.server.orm.entities.ExecutionCommandEntity; import org.apache.ambari.server.orm.entities.HostEntity; import org.apache.ambari.server.orm.entities.HostRoleCommandEntity; +import org.apache.ambari.server.state.Host; import org.apache.ambari.server.state.ServiceComponentHostEvent; import com.google.inject.Injector; @@ -104,6 +105,19 @@ public class HostRoleCommand { } @AssistedInject + public HostRoleCommand(Host host, Role role, ServiceComponentHostEvent event, RoleCommand command, + boolean retryAllowed, HostDAO hostDAO, ExecutionCommandDAO executionCommandDAO) { +this.hostDAO = hostDAO; +this.executionCommandDAO = executionCommandDAO; + +this.role = role; +this.event = new ServiceComponentHostEventWrapper(event); +this.roleCommand = command; +this.retryAllowed = retryAllowed; +this.hostEntity = hostDAO.findById(host.getHostId()); + } + + @AssistedInject public HostRoleCommand(@Assisted HostRoleCommandEntity hostRoleCommandEntity, HostDAO hostDAO, ExecutionCommandDAO executionCommandDAO) { this.hostDAO = hostDAO; this.executionCommandDAO = executionCommandDAO; http://git-wip-us.apache.org/repos/asf/ambari/blob/fb25e19f/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/HostRoleCommandFactory.java --
ambari git commit: AMBARI-11701. Blueprint should not require single-node properties for NameNodes in an HDFS NameNode HA cluster. (rnettleton)
Repository: ambari Updated Branches: refs/heads/trunk ec196cff1 -> e92007d04 AMBARI-11701. Blueprint should not require single-node properties for NameNodes in an HDFS NameNode HA cluster. (rnettleton) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e92007d0 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e92007d0 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e92007d0 Branch: refs/heads/trunk Commit: e92007d0467e52e4137086864c2ca03d6a5ac4cb Parents: ec196cf Author: Bob Nettleton Authored: Thu Jun 4 18:27:36 2015 -0400 Committer: Bob Nettleton Committed: Thu Jun 4 18:27:55 2015 -0400 -- .../BlueprintConfigurationProcessor.java| 3 +- .../BlueprintConfigurationProcessorTest.java| 80 2 files changed, 82 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/e92007d0/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java index c6ff56c..ababc29 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java @@ -130,7 +130,8 @@ public class BlueprintConfigurationProcessor { */ private static final PropertyFilter[] clusterUpdatePropertyFilters = { new DependencyEqualsFilter("hbase.security.authorization", "hbase-site", "true"), - new DependencyNotEqualsFilter("hive.server2.authentication", "hive-site", "NONE") }; + new DependencyNotEqualsFilter("hive.server2.authentication", "hive-site", "NONE"), + new HDFSNameNodeHAFilter() }; /** * Configuration properties to be updated http://git-wip-us.apache.org/repos/asf/ambari/blob/e92007d0/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java -- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java index d957836..83ed594 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java @@ -3954,6 +3954,13 @@ public class BlueprintConfigurationProcessorTest { hdfsSiteProperties.put("dfs.secondary.http.address", "localhost:8080"); hdfsSiteProperties.put("dfs.namenode.secondary.http-address", "localhost:8080"); + +// add properties that are used in non-HA HDFS NameNode settings +// to verify that these are eventually removed by the filter +hdfsSiteProperties.put("dfs.namenode.http-address", "localhost:8080"); +hdfsSiteProperties.put("dfs.namenode.https-address", "localhost:8081"); +hdfsSiteProperties.put("dfs.namenode.rpc-address", "localhost:8082"); + // configure the defaultFS to use the nameservice URL coreSiteProperties.put("fs.defaultFS", "hdfs://" + expectedNameService); @@ -4026,6 +4033,79 @@ public class BlueprintConfigurationProcessorTest { assertEquals("instance.volumes should not be modified by cluster update when NameNode HA is enabled.", "hdfs://" + expectedNameService + "/accumulo/test/instance/volumes", accumuloSiteProperties.get("instance.volumes")); +// verify that the non-HA properties are filtered out in HA mode +assertFalse("dfs.namenode.http-address should have been filtered out of this HA configuration", + hdfsSiteProperties.containsKey("dfs.namenode.http-address")); +assertFalse("dfs.namenode.https-address should have been filtered out of this HA configuration", + hdfsSiteProperties.containsKey("dfs.namenode.https-address")); +assertFalse("dfs.namenode.rpc-address should have been filtered out of this HA configuration", + hdfsSiteProperties.containsKey("dfs.namenode.rpc-address")); + + } + + @Test + public void testDoUpdateForClusterWithNameNodeHANotEnabled() throws Exception { +final String expectedHostName = "c6401.apache.ambari.org"; +final String expectedHostNameTwo = "serverTwo"; +final String expectedPortNum = "808080"; +final String expectedHostGroupName = "host_group
ambari git commit: AMBARI-8504 Configuration parameter 'io.compression.codecs' missing in HDP 2.3.GlusterFS stack in ambari-2.1
Repository: ambari Updated Branches: refs/heads/trunk 0160dd62a -> ec196cff1 AMBARI-8504 Configuration parameter 'io.compression.codecs' missing in HDP 2.3.GlusterFS stack in ambari-2.1 Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ec196cff Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ec196cff Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ec196cff Branch: refs/heads/trunk Commit: ec196cff1a520a0f442f6bcae75db95a6c0440dd Parents: 0160dd6 Author: Scott Creeley Authored: Thu Jun 4 17:21:38 2015 -0400 Committer: Scott Creeley Committed: Thu Jun 4 17:21:52 2015 -0400 -- .../stacks/HDP/2.3.GlusterFS/repos/repoinfo.xml | 5 +- .../GLUSTERFS/configuration/core-site.xml | 154 +++ .../services/HBASE/configuration/hbase-site.xml | 11 ++ .../services/HIVE/configuration/hive-site.xml | 62 4 files changed, 230 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/ec196cff/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/repos/repoinfo.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/repos/repoinfo.xml b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/repos/repoinfo.xml index 646b2c0..24b05b0 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/repos/repoinfo.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/repos/repoinfo.xml @@ -16,10 +16,11 @@ limitations under the License. --> - http://public-repo-1.hortonworks.com/HDP/hdp_urlinfo.json + + http://s3.amazonaws.com/dev.hortonworks.com/HDP/hdp_urlinfo.json - http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.3.0.0 + REPLACE_WITH_CENTOS6_URL HDP-2.3 HDP http://git-wip-us.apache.org/repos/asf/ambari/blob/ec196cff/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/GLUSTERFS/configuration/core-site.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/GLUSTERFS/configuration/core-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/GLUSTERFS/configuration/core-site.xml index a861b5f..8b382dd 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/GLUSTERFS/configuration/core-site.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.3.GlusterFS/services/GLUSTERFS/configuration/core-site.xml @@ -39,5 +39,159 @@ glusterfs:///localhost:8020 + + +ha.failover-controller.active-standby-elector.zk.op.retries +120 +ZooKeeper Failover Controller retries setting for your environment + + + + + +io.file.buffer.size +131072 +The size of buffer for use in sequence files. + The size of this buffer should probably be a multiple of hardware + page size (4096 on Intel x86), and it determines how much data is + buffered during read and write operations. + + + +io.serializations +org.apache.hadoop.io.serializer.WritableSerialization + A list of comma-delimited serialization classes that can be used for obtaining serializers and deserializers. + + + + +io.compression.codecs + org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec +A list of the compression codec classes that can be used + for compression/decompression. + + + +fs.trash.interval +360 +Number of minutes after which the checkpoint gets deleted. +If zero, the trash feature is disabled. +This option may be configured both on the server and the client. +If trash is disabled server side then the client side configuration is checked. +If trash is enabled on the server side then the value configured on the server is used and the client configuration value is ignored. + + + + + +ipc.client.idlethreshold +8000 +Defines the threshold number of connections after which + connections will be inspected for idleness. + + + + +ipc.client.connection.maxidletime +3 +The maximum time after which a client will bring down the + connection to the server. + + + + +ipc.client.connect.max.retries +50 +Defines the maximum number of retries for IPC connections. + + + +ipc.server.tcpnodelay +true +Turn on/off Nagle's algorithm for the TCP socket + connection on + the server. Setting to true disables the algorithm and may + decrease latency + with a cost of more/smaller packets. + + + + + +mapreduce
ambari git commit: AMBARI-11674. Configs: Invalid version marker placing.(xiwang)
Repository: ambari Updated Branches: refs/heads/trunk e5e2cfd41 -> 0160dd62a AMBARI-11674. Configs: Invalid version marker placing.(xiwang) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0160dd62 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0160dd62 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0160dd62 Branch: refs/heads/trunk Commit: 0160dd62a525c86262d803b6801d36f18b0d1562 Parents: e5e2cfd Author: Xi Wang Authored: Wed Jun 3 17:31:59 2015 -0700 Committer: Xi Wang Committed: Thu Jun 4 14:20:46 2015 -0700 -- ambari-web/app/styles/widgets.less | 11 --- 1 file changed, 8 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/0160dd62/ambari-web/app/styles/widgets.less -- diff --git a/ambari-web/app/styles/widgets.less b/ambari-web/app/styles/widgets.less index 13a543e..839fc9f 100644 --- a/ambari-web/app/styles/widgets.less +++ b/ambari-web/app/styles/widgets.less @@ -141,6 +141,14 @@ } } +.widget-config-comparison .widget-config&.slider-widget { + overflow-x: visible; +} + +.slider-widget .widget-config-comparison .version-labels { + top: 20px; +} + .directory-textarea-wrapper { float: left; width: 80%; @@ -471,7 +479,4 @@ .compare-mode { background-color: rgba(211, 237, 247, 0.39); padding: 10px 5px 0px 10px; - .version-labels { -min-width: 70px !important; - } }
ambari git commit: AMBARI-11703. HBASE configs validation error on 'hbase.bucketcache.ioengine' (srimanth)
Repository: ambari Updated Branches: refs/heads/trunk 332eca725 -> e5e2cfd41 AMBARI-11703. HBASE configs validation error on 'hbase.bucketcache.ioengine' (srimanth) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e5e2cfd4 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e5e2cfd4 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e5e2cfd4 Branch: refs/heads/trunk Commit: e5e2cfd417d2aae05a9d0f8e771a16acaa0ad1f0 Parents: 332eca7 Author: Srimanth Gunturi Authored: Thu Jun 4 13:55:00 2015 -0700 Committer: Srimanth Gunturi Committed: Thu Jun 4 14:18:02 2015 -0700 -- .../main/resources/stacks/HDP/2.2/services/stack_advisor.py | 8 .../resources/stacks/HDPWIN/2.2/services/stack_advisor.py| 8 2 files changed, 8 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/e5e2cfd4/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py index 93f5f19..ebbdef2 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py @@ -967,7 +967,7 @@ class HDP22StackAdvisor(HDP21StackAdvisor): # Validate bucket cache correct config prop_name = "hbase.bucketcache.ioengine" prop_val = "offheap" -if not (not hbase_site[prop_name] or hbase_site[prop_name] == prop_val): +if prop_name in hbase_site and not (not hbase_site[prop_name] or hbase_site[prop_name] == prop_val): validationItems.append({"config-name": prop_name, "item": self.getWarnItem( "Recommended values of " \ @@ -977,11 +977,11 @@ class HDP22StackAdvisor(HDP21StackAdvisor): prop_name2 = "hbase.bucketcache.size" prop_name3 = "hbase.bucketcache.percentage.in.combinedcache" -if hbase_site[prop_name1] and not hbase_site[prop_name2]: +if prop_name1 in hbase_site and prop_name2 in hbase_site and hbase_site[prop_name1] and not hbase_site[prop_name2]: validationItems.append({"config-name": prop_name2, "item": self.getWarnItem( "If bucketcache ioengine is enabled, {0} should be set".format(prop_name2))}) -if hbase_site[prop_name1] and not hbase_site[prop_name3]: +if prop_name1 in hbase_site and prop_name3 in hbase_site and hbase_site[prop_name1] and not hbase_site[prop_name3]: validationItems.append({"config-name": prop_name3, "item": self.getWarnItem( "If bucketcache ioengine is enabled, {0} should be set".format(prop_name3))}) @@ -1008,7 +1008,7 @@ class HDP22StackAdvisor(HDP21StackAdvisor): hbase_site_properties = getSiteProperties(configurations, "hbase-site") prop_name1 = "hbase.bucketcache.ioengine" -if hbase_site_properties[prop_name1] and hbase_site_properties[prop_name1] == "offheap" and not hbase_env[prop_name]: +if prop_name1 in hbase_site_properties and prop_name in hbase_env and hbase_site_properties[prop_name1] and hbase_site_properties[prop_name1] == "offheap" and not hbase_env[prop_name]: validationItems.append({"config-name": prop_name, "item": self.getWarnItem( "If bucketcache ioengine is enabled, {0} should be set".format(prop_name))}) http://git-wip-us.apache.org/repos/asf/ambari/blob/e5e2cfd4/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py -- diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py index a46d230..7a57759 100644 --- a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py +++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/stack_advisor.py @@ -949,7 +949,7 @@ class HDPWIN22StackAdvisor(HDPWIN21StackAdvisor): # Validate bucket cache correct config prop_name = "hbase.bucketcache.ioengine" prop_val = "offheap" -if not (not hbase_site[prop_name] or hbase_site[prop_name] == prop_val): +if prop_name in hbase_site and not (not hbase_site[prop_name] or hbase_site[prop_name] == prop_val): validationItems.append({"config-name": prop_name, "item": self.getWarnItem( "Recommended values of " \ @@ -959,11 +959,11 @@ class HDPWIN22
ambari git commit: AMBARI-11704. Mahout service check fails due to bad folder permission for /user/ambari-qa (afernandez via smohanty)
Repository: ambari Updated Branches: refs/heads/trunk 98640c636 -> 332eca725 AMBARI-11704. Mahout service check fails due to bad folder permission for /user/ambari-qa (afernandez via smohanty) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/332eca72 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/332eca72 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/332eca72 Branch: refs/heads/trunk Commit: 332eca7253e6fde33c4f7fbce07558752a8028e3 Parents: 98640c6 Author: Sumit Mohanty Authored: Thu Jun 4 14:05:59 2015 -0700 Committer: Sumit Mohanty Committed: Thu Jun 4 14:05:59 2015 -0700 -- .../HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py | 7 +-- 1 file changed, 5 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/332eca72/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py -- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py index 874cc6a..eec864c 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py @@ -139,9 +139,12 @@ def namenode(action=None, do_format=True, rolling_restart=False, env=None): path=[params.hadoop_bin_dir], user=params.hdfs_user ) +pass + pass + +# Always run this on non-HA, or active NameNode during HA. +create_hdfs_directories(is_active_namenode_cmd) - # Always run on this non-HA, or active NameNode during HA. - create_hdfs_directories(is_active_namenode_cmd) elif action == "stop": import params service(
ambari git commit: AMBARI-11702. Enhanced Dashboard: Server Error on toggling add widget.
Repository: ambari Updated Branches: refs/heads/trunk 6a0d1e0a2 -> 98640c636 AMBARI-11702. Enhanced Dashboard: Server Error on toggling add widget. Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/98640c63 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/98640c63 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/98640c63 Branch: refs/heads/trunk Commit: 98640c63692960188d54a981665a2468769751e2 Parents: 6a0d1e0 Author: Siddharth Wagle Authored: Thu Jun 4 13:54:39 2015 -0700 Committer: Siddharth Wagle Committed: Thu Jun 4 13:54:44 2015 -0700 -- .../internal/WidgetLayoutResourceProvider.java | 96 +++- .../ambari/server/orm/dao/WidgetLayoutDAO.java | 11 +++ 2 files changed, 63 insertions(+), 44 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/98640c63/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java index 174a106..7bee12c 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java @@ -50,6 +50,8 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; /** * Resource provider for widget layout resources. @@ -80,6 +82,9 @@ public class WidgetLayoutResourceProvider extends AbstractControllerResourceProv }; @SuppressWarnings("serial") + private static ReadWriteLock lock = new ReentrantReadWriteLock(); + + @SuppressWarnings("serial") public static Set propertyIds = new HashSet() { { add(WIDGETLAYOUT_ID_PROPERTY_ID); @@ -267,55 +272,58 @@ public class WidgetLayoutResourceProvider extends AbstractControllerResourceProv } catch (Exception ex) { throw new AmbariException("WidgetLayout should have numerical id"); } - final WidgetLayoutEntity entity = widgetLayoutDAO.findById(layoutId); - if (entity == null) { -throw new ObjectNotFoundException("There is no widget layout with id " + layoutId); - } - if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(WIDGETLAYOUT_LAYOUT_NAME_PROPERTY_ID { - entity.setLayoutName(propertyMap.get(WIDGETLAYOUT_LAYOUT_NAME_PROPERTY_ID).toString()); - } - if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(WIDGETLAYOUT_SECTION_NAME_PROPERTY_ID { - entity.setSectionName(propertyMap.get(WIDGETLAYOUT_SECTION_NAME_PROPERTY_ID).toString()); - } - if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(WIDGETLAYOUT_DISPLAY_NAME_PROPERTY_ID { - entity.setDisplayName(propertyMap.get(WIDGETLAYOUT_DISPLAY_NAME_PROPERTY_ID).toString()); - } - if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(WIDGETLAYOUT_SCOPE_PROPERTY_ID { - entity.setScope(propertyMap.get(WIDGETLAYOUT_SCOPE_PROPERTY_ID).toString()); - } - - Set widgetsSet = (LinkedHashSet) propertyMap.get(WIDGETLAYOUT_WIDGETS_PROPERTY_ID); + lock.writeLock().lock(); + try { +final WidgetLayoutEntity entity = widgetLayoutDAO.findById(layoutId); +if (entity == null) { + throw new ObjectNotFoundException("There is no widget layout with id " + layoutId); +} +if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(WIDGETLAYOUT_LAYOUT_NAME_PROPERTY_ID { + entity.setLayoutName(propertyMap.get(WIDGETLAYOUT_LAYOUT_NAME_PROPERTY_ID).toString()); +} +if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(WIDGETLAYOUT_SECTION_NAME_PROPERTY_ID { + entity.setSectionName(propertyMap.get(WIDGETLAYOUT_SECTION_NAME_PROPERTY_ID).toString()); +} +if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(WIDGETLAYOUT_DISPLAY_NAME_PROPERTY_ID { + entity.setDisplayName(propertyMap.get(WIDGETLAYOUT_DISPLAY_NAME_PROPERTY_ID).toString()); +} +if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(WIDGETLAYOUT_SCOPE_PROPERTY_ID { + entity.setScope(propertyMap.get(WIDGETLAY
ambari git commit: AMBARI-11678 Perf: Add GZIP support for Ambari Server API (dsen)
Repository: ambari Updated Branches: refs/heads/trunk a4f9081c7 -> 6a0d1e0a2 AMBARI-11678 Perf: Add GZIP support for Ambari Server API (dsen) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6a0d1e0a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6a0d1e0a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6a0d1e0a Branch: refs/heads/trunk Commit: 6a0d1e0a2dbf241fd729d4d50f4f09e45e17baf6 Parents: a4f9081 Author: Dmytro Sen Authored: Thu Jun 4 21:41:38 2015 +0300 Committer: Dmytro Sen Committed: Thu Jun 4 21:41:38 2015 +0300 -- .../src/main/python/ambari_agent/Controller.py | 3 +- .../src/main/python/ambari_agent/security.py| 72 ambari-project/pom.xml | 5 ++ ambari-server/pom.xml | 4 ++ .../server/configuration/Configuration.java | 39 ++- .../ambari/server/controller/AmbariServer.java | 29 +++- .../server/controller/AmbariServerTest.java | 30 +++- 7 files changed, 148 insertions(+), 34 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/6a0d1e0a/ambari-agent/src/main/python/ambari_agent/Controller.py -- diff --git a/ambari-agent/src/main/python/ambari_agent/Controller.py b/ambari-agent/src/main/python/ambari_agent/Controller.py index 94b574a..4e5de6c 100644 --- a/ambari-agent/src/main/python/ambari_agent/Controller.py +++ b/ambari-agent/src/main/python/ambari_agent/Controller.py @@ -393,7 +393,8 @@ class Controller(threading.Thread): try: if self.cachedconnect is None: # Lazy initialization self.cachedconnect = security.CachedHTTPSConnection(self.config) - req = urllib2.Request(url, data, {'Content-Type': 'application/json'}) + req = urllib2.Request(url, data, {'Content-Type': 'application/json', +'Accept-encoding': 'gzip'}) response = self.cachedconnect.request(req) return json.loads(response) except Exception, exception: http://git-wip-us.apache.org/repos/asf/ambari/blob/6a0d1e0a/ambari-agent/src/main/python/ambari_agent/security.py -- diff --git a/ambari-agent/src/main/python/ambari_agent/security.py b/ambari-agent/src/main/python/ambari_agent/security.py index a86d06b..bfaf134 100644 --- a/ambari-agent/src/main/python/ambari_agent/security.py +++ b/ambari-agent/src/main/python/ambari_agent/security.py @@ -15,7 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. - +from StringIO import StringIO +import gzip import httplib import urllib2 import socket @@ -31,8 +32,9 @@ import platform logger = logging.getLogger(__name__) -GEN_AGENT_KEY = 'openssl req -new -newkey rsa:1024 -nodes -keyout "%(keysdir)s'+os.sep+'%(hostname)s.key" '\ - '-subj /OU=%(hostname)s/ -out "%(keysdir)s'+os.sep+'%(hostname)s.csr"' +GEN_AGENT_KEY = 'openssl req -new -newkey rsa:1024 -nodes -keyout "%(keysdir)s' \ ++ os.sep + '%(hostname)s.key" -subj /OU=%(hostname)s/ ' \ +'-out "%(keysdir)s' + os.sep + '%(hostname)s.csr"' class VerifiedHTTPSConnection(httplib.HTTPSConnection): @@ -44,9 +46,11 @@ class VerifiedHTTPSConnection(httplib.HTTPSConnection): def connect(self): self.two_way_ssl_required = self.config.isTwoWaySSLConnection() -logger.debug("Server two-way SSL authentication required: %s" % str(self.two_way_ssl_required)) +logger.debug("Server two-way SSL authentication required: %s" % str( + self.two_way_ssl_required)) if self.two_way_ssl_required is True: - logger.info('Server require two-way SSL authentication. Use it instead of one-way...') + logger.info( +'Server require two-way SSL authentication. Use it instead of one-way...') if not self.two_way_ssl_required: try: @@ -56,8 +60,9 @@ class VerifiedHTTPSConnection(httplib.HTTPSConnection): 'turned off on the server.') except (ssl.SSLError, AttributeError): self.two_way_ssl_required = True -logger.info('Insecure connection to https://' + self.host + ':' + self.port + -'/ failed. Reconnecting using two-way SSL authentication..') +logger.info( + 'Insecure connection to https://' + self.host + ':' + self.port + + '/ failed. Reconnecting using two-way SSL authentication..') if self.two_way_ssl_required: self.certMan = CertificateManager(self.config) @@ -70,21 +75,21 @@ class VerifiedHTTPSConnection(httplib.HTTPSConnection): try: self.sock = ssl.wrap_socket(sock, -keyfile
ambari git commit: AMBARI-11698: SQL Server DB create script should use VARBINARY(max) as the bytea replacement (jluniya)
Repository: ambari Updated Branches: refs/heads/trunk c01865ade -> a4f9081c7 AMBARI-11698: SQL Server DB create script should use VARBINARY(max) as the bytea replacement (jluniya) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a4f9081c Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a4f9081c Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a4f9081c Branch: refs/heads/trunk Commit: a4f9081c7af18a5f08b4aec800e3f4f9b624d2fa Parents: c01865a Author: Jayush Luniya Authored: Thu Jun 4 11:35:29 2015 -0700 Committer: Jayush Luniya Committed: Thu Jun 4 11:35:29 2015 -0700 -- .../main/resources/Ambari-DDL-SQLServer-CREATE.sql| 14 +++--- 1 file changed, 7 insertions(+), 7 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/a4f9081c/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql -- diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql index 07fe02e..20c75b3 100644 --- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql +++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql @@ -238,7 +238,7 @@ CREATE TABLE members ( ); CREATE TABLE execution_command ( - command VARBINARY(8000), + command VARBINARY(MAX), task_id BIGINT NOT NULL, PRIMARY KEY CLUSTERED (task_id) ); @@ -287,9 +287,9 @@ CREATE TABLE stage ( skippable SMALLINT DEFAULT 0 NOT NULL, log_info VARCHAR(255) NOT NULL, request_context VARCHAR(255), - cluster_host_info VARBINARY(8000) NOT NULL, - command_params VARBINARY(8000), - host_params VARBINARY(8000), + cluster_host_info VARBINARY(MAX) NOT NULL, + command_params VARBINARY(MAX), + host_params VARBINARY(MAX), PRIMARY KEY CLUSTERED ( stage_id, request_id @@ -303,7 +303,7 @@ CREATE TABLE request ( create_time BIGINT NOT NULL, end_time BIGINT NOT NULL, exclusive_execution BIT NOT NULL DEFAULT 0, - inputs VARBINARY(8000), + inputs VARBINARY(MAX), request_context VARCHAR(255), request_type VARCHAR(255), request_schedule_id BIGINT, @@ -317,7 +317,7 @@ CREATE TABLE requestresourcefilter ( request_id BIGINT NOT NULL, service_name VARCHAR(255), component_name VARCHAR(255), - hosts VARBINARY(8000), + hosts VARBINARY(MAX), PRIMARY KEY CLUSTERED (filter_id) ); @@ -434,7 +434,7 @@ CREATE TABLE requestschedulebatchrequest ( request_id BIGINT, request_type VARCHAR(255), request_uri VARCHAR(1024), - request_body VARBINARY(8000), + request_body VARBINARY(MAX), request_status VARCHAR(255), return_code SMALLINT, return_message TEXT,
ambari git commit: AMBARI-11672. Ambari 2.0.1 server doesn't start after upgrade from HDP 2.1.7 to 2.2.4. Fixed broken unit test. (swagle)
Repository: ambari Updated Branches: refs/heads/trunk e7982e319 -> c01865ade AMBARI-11672. Ambari 2.0.1 server doesn't start after upgrade from HDP 2.1.7 to 2.2.4. Fixed broken unit test. (swagle) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c01865ad Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c01865ad Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c01865ad Branch: refs/heads/trunk Commit: c01865ade6bf8819a23c951ac2fcf87d60a1a0ef Parents: e7982e3 Author: Siddharth Wagle Authored: Thu Jun 4 10:41:17 2015 -0700 Committer: Siddharth Wagle Committed: Thu Jun 4 10:41:17 2015 -0700 -- .../server/upgrade/UpgradeCatalog210.java | 72 +++ .../server/upgrade/UpgradeCatalog210Test.java | 92 +++- 2 files changed, 161 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/c01865ad/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java index 2cd4811..d83940e 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java @@ -31,14 +31,20 @@ import org.apache.ambari.server.controller.AmbariManagementController; import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo; import org.apache.ambari.server.orm.dao.AlertDefinitionDAO; import org.apache.ambari.server.orm.dao.DaoUtils; +import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO; import org.apache.ambari.server.orm.dao.StackDAO; import org.apache.ambari.server.orm.entities.AlertDefinitionEntity; +import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity; +import org.apache.ambari.server.orm.entities.HostComponentStateEntity; +import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity; +import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK; import org.apache.ambari.server.orm.entities.StackEntity; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Service; import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.state.stack.OsFamily; +import org.apache.ambari.server.utils.VersionUtils; import org.apache.commons.lang.StringUtils; import org.eclipse.persistence.internal.databaseaccess.FieldTypeDefinition; import org.slf4j.Logger; @@ -46,6 +52,9 @@ import org.slf4j.LoggerFactory; import javax.persistence.EntityManager; import javax.persistence.Query; +import javax.persistence.criteria.CriteriaBuilder; +import javax.persistence.criteria.CriteriaDelete; +import javax.persistence.criteria.Root; import java.sql.ResultSet; import java.sql.SQLException; import java.text.MessageFormat; @@ -884,6 +893,69 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog { addMissingConfigs(); updateAlertDefinitions(); +removeStormRestApiServiceComponent(); + } + + /** + * Delete STORM_REST_API component if HDP is upgraded past 2.2 and the + * Component still exists. + */ + protected void removeStormRestApiServiceComponent() { +AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class); +Clusters clusters = ambariManagementController.getClusters(); + +if (clusters != null) { + Map clusterMap = clusters.getClusters(); + for (final Cluster cluster : clusterMap.values()) { +StackId stackId = cluster.getCurrentStackVersion(); +if (stackId != null && stackId.getStackName().equals("HDP") && + VersionUtils.compareVersions(stackId.getStackVersion(), "2.2") >= 0) { + + executeInTransaction(new Runnable() { +@Override +public void run() { + ServiceComponentDesiredStateDAO dao = injector.getInstance(ServiceComponentDesiredStateDAO.class); + ServiceComponentDesiredStateEntityPK entityPK = new ServiceComponentDesiredStateEntityPK(); + entityPK.setClusterId(cluster.getClusterId()); + entityPK.setServiceName("STORM"); + entityPK.setComponentName("STORM_REST_API"); + ServiceComponentDesiredStateEntity entity = dao.findByPK(entityPK); + if (entity != null) { +EntityManager em = getEntityManagerProvider().get(); +CriteriaBuilder cb = em.getCriteriaBuilder(); + +try { +
ambari git commit: AMBARI-11696. HBase configuration property hbase_max_direct_memory_size not found (smohanty via srimanth)
Repository: ambari Updated Branches: refs/heads/trunk cddf20e33 -> e7982e319 AMBARI-11696. HBase configuration property hbase_max_direct_memory_size not found (smohanty via srimanth) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e7982e31 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e7982e31 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e7982e31 Branch: refs/heads/trunk Commit: e7982e319efdb7a57d2bb5bd0cdbe7f56a55dcba Parents: cddf20e Author: Srimanth Gunturi Authored: Thu Jun 4 10:28:01 2015 -0700 Committer: Srimanth Gunturi Committed: Thu Jun 4 10:28:01 2015 -0700 -- .../HBASE/0.96.0.2.0/package/scripts/params_linux.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/e7982e31/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py -- diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py index da26979..e8585cd 100644 --- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py +++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py @@ -58,6 +58,7 @@ daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh" region_mover = "/usr/lib/hbase/bin/region_mover.rb" region_drainer = "/usr/lib/hbase/bin/draining_servers.rb" hbase_cmd = "/usr/lib/hbase/bin/hbase" +hbase_max_direct_memory_size = None # hadoop parameters for 2.2+ if Script.is_hdp_stack_greater_or_equal("2.2"): @@ -66,7 +67,7 @@ if Script.is_hdp_stack_greater_or_equal("2.2"): region_drainer = format('/usr/hdp/current/hbase-client/bin/draining_servers.rb') hbase_cmd = format('/usr/hdp/current/hbase-client/bin/hbase') - hbase_max_direct_memory_size = config['configurations']['hbase-env']['hbase_max_direct_memory_size'] + hbase_max_direct_memory_size = default('configurations/hbase-env/hbase_max_direct_memory_size', None) daemon_script=format("/usr/hdp/current/{component_directory}/bin/hbase-daemon.sh") region_mover = format("/usr/hdp/current/{component_directory}/bin/region_mover.rb")
[1/2] ambari git commit: AMBARI-11654. Fix idempotent issue for PostgreSQL (dlysnichenko)
Repository: ambari Updated Branches: refs/heads/trunk 2f1fd9103 -> cddf20e33 http://git-wip-us.apache.org/repos/asf/ambari/blob/cddf20e3/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeTest.java -- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeTest.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeTest.java index 3af417f..9e516e7 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeTest.java @@ -62,6 +62,7 @@ import org.apache.ambari.server.orm.dao.ViewInstanceDAO; import org.apache.ambari.server.utils.VersionUtils; import org.apache.ambari.server.view.ViewRegistry; import org.easymock.EasyMock; +import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -103,6 +104,7 @@ public class UpgradeTest { } @Test + @Ignore public void testUpgrade() throws Exception { //not all tests close database properly, ensure it is empty try {
[2/2] ambari git commit: AMBARI-11654. Fix idempotent issue for PostgreSQL (dlysnichenko)
AMBARI-11654. Fix idempotent issue for PostgreSQL (dlysnichenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cddf20e3 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cddf20e3 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cddf20e3 Branch: refs/heads/trunk Commit: cddf20e330235a8c136c5f4fecb7a053ee53527e Parents: 2f1fd91 Author: Lisnichenko Dmitro Authored: Thu Jun 4 19:50:54 2015 +0300 Committer: Lisnichenko Dmitro Committed: Thu Jun 4 19:50:54 2015 +0300 -- .../apache/ambari/server/orm/DBAccessor.java| 203 ++--- .../ambari/server/orm/DBAccessorImpl.java | 203 +++-- .../server/orm/helpers/dbms/DbmsHelper.java | 29 ++- .../orm/helpers/dbms/GenericDbmsHelper.java | 80 ++- .../server/orm/helpers/dbms/MySqlHelper.java| 14 ++ .../server/orm/helpers/dbms/PostgresHelper.java | 25 ++ .../server/upgrade/AbstractUpgradeCatalog.java | 46 .../server/upgrade/UpgradeCatalog150.java | 28 +-- .../server/upgrade/UpgradeCatalog161.java | 12 +- .../server/upgrade/UpgradeCatalog170.java | 4 +- .../server/upgrade/UpgradeCatalog200.java | 17 +- .../server/upgrade/UpgradeCatalog210.java | 226 +-- .../ambari/server/utils/CustomStringUtils.java | 70 ++ .../ambari/server/orm/DBAccessorImplTest.java | 34 ++- .../ambari/server/upgrade/UpgradeTest.java | 2 + 15 files changed, 777 insertions(+), 216 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/cddf20e3/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java index 2c9277a..27dd320 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/DBAccessor.java @@ -51,7 +51,7 @@ public interface DBAccessor { * @param primaryKeyColumns * @throws SQLException */ - public void createTable(String tableName, List columnInfo, + void createTable(String tableName, List columnInfo, String... primaryKeyColumns) throws SQLException; /** @@ -61,7 +61,7 @@ public interface DBAccessor { * @param columnNames * @throws SQLException */ - public void createIndex(String indexName, String tableName, + void createIndex(String indexName, String tableName, String... columnNames) throws SQLException; /** @@ -72,7 +72,7 @@ public interface DBAccessor { * @param referenceColumn * @throws SQLException */ - public void addFKConstraint(String tableName, + void addFKConstraint(String tableName, String constraintName, String keyColumn, String referenceTableName, @@ -90,7 +90,7 @@ public interface DBAccessor { * @param ignoreFailure * @throws SQLException */ - public void addFKConstraint(String tableName, + void addFKConstraint(String tableName, String constraintName, String keyColumn, String referenceTableName, @@ -109,7 +109,7 @@ public interface DBAccessor { * @param ignoreFailure * @throws SQLException */ - public void addFKConstraint(String tableName, + void addFKConstraint(String tableName, String constraintName, String[] keyColumns, String referenceTableName, @@ -127,7 +127,7 @@ public interface DBAccessor { * @param ignoreFailure * @throws SQLException */ - public void addFKConstraint(String tableName, + void addFKConstraint(String tableName, String constraintName, String[] keyColumns, String referenceTableName, @@ -139,10 +139,39 @@ public interface DBAccessor { * @param columnInfo * @throws SQLException */ - public void addColumn(String tableName, + void addColumn(String tableName, DBColumnInfo columnInfo) throws SQLException; /** + * Add unique table constraint + * @param constraintName name of the constraint + * @param tableName name of the table + * @param columnNames list of columns + * @throws SQLException + */ + void addUniqueConstraint(String tableName, String constraintName, String... columnNames) +throws SQLException; + + /** + * + * @param tableName name of the table + * @param constraintName na
ambari git commit: AMBARI-11640. During initial Stack Install, tarballs will not be copied to HDFS because the current_version is not yet known (alejandro)
Repository: ambari Updated Branches: refs/heads/trunk ef601102c -> 2f1fd9103 AMBARI-11640. During initial Stack Install, tarballs will not be copied to HDFS because the current_version is not yet known (alejandro) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2f1fd910 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2f1fd910 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2f1fd910 Branch: refs/heads/trunk Commit: 2f1fd9103abac21aaed8174ed66bb31229734118 Parents: ef60110 Author: Alejandro Fernandez Authored: Thu Jun 4 09:42:31 2015 -0700 Committer: Alejandro Fernandez Committed: Thu Jun 4 09:42:31 2015 -0700 -- .../python/resource_management/libraries/functions/copy_tarball.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/2f1fd910/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py -- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py index 7b4fb07..de05da2 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py @@ -74,7 +74,7 @@ def _get_single_version_from_hdp_select(): with open(tmp_file, 'r+') as file: out = file.read() except: -pass +Logger.error("Could not parse output of {0}".format(str(tmp_file))) finally: try: if os.path.exists(tmp_file):
ambari git commit: AMBARI-11640. During initial Stack Install, tarballs will not be copied to HDFS because the current_version is not yet known (alejandro)
Repository: ambari Updated Branches: refs/heads/trunk 0bc81fd9d -> ef601102c AMBARI-11640. During initial Stack Install, tarballs will not be copied to HDFS because the current_version is not yet known (alejandro) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ef601102 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ef601102 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ef601102 Branch: refs/heads/trunk Commit: ef601102c0f5469439654d7f28229df7065dd291 Parents: 0bc81fd Author: Alejandro Fernandez Authored: Tue Jun 2 15:35:24 2015 -0700 Committer: Alejandro Fernandez Committed: Thu Jun 4 09:32:22 2015 -0700 -- .../libraries/functions/copy_tarball.py | 60 ++-- .../package/scripts/atlas_plugin_utils.py | 4 +- .../HIVE/0.12.0.2.0/package/scripts/hive.py | 1 + .../stacks/2.0.6/HIVE/test_hive_server.py | 22 --- 4 files changed, 71 insertions(+), 16 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/ef601102/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py -- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py index f8a2570..7b4fb07 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/copy_tarball.py @@ -22,9 +22,13 @@ __all__ = ["copy_to_hdfs", ] import os import uuid +import tempfile +import re +from resource_management.libraries.script.script import Script from resource_management.libraries.resources.hdfs_resource import HdfsResource from resource_management.libraries.functions.default import default +from resource_management.core import shell from resource_management.core.logger import Logger STACK_VERSION_PATTERN = "{{ stack_version }}" @@ -51,6 +55,45 @@ TARBALL_MAP = { } } + +def _get_single_version_from_hdp_select(): + """ + Call "hdp-select versions" and return the version string if only one version is available. + :return: Returns a version string if successful, and None otherwise. + """ + # Ubuntu returns: "stdin: is not a tty", as subprocess output, so must use a temporary file to store the output. + tmpfile = tempfile.NamedTemporaryFile() + tmp_dir = Script.get_tmp_dir() + tmp_file = os.path.join(tmp_dir, "copy_tarball_out.txt") + hdp_version = None + + out = None + get_hdp_versions_cmd = "/usr/bin/hdp-select versions > {0}".format(tmp_file) + try: +code, stdoutdata = shell.call(get_hdp_versions_cmd, logoutput=True) +with open(tmp_file, 'r+') as file: + out = file.read() + except: +pass + finally: +try: + if os.path.exists(tmp_file): +os.remove(tmp_file) +except: + pass + if code != 0 or out is None or out == "": +Logger.error("Could not verify HDP version by calling '{0}'. Return Code: {1}, Output: {2}.".format(get_hdp_versions_cmd, str(code), str(out))) +return None + + matches = re.findall(r"([\d\.]+\-\d+)", out) + + if matches and len(matches) == 1: +hdp_version = matches[0] + elif matches and len(matches) > 1: +Logger.error("Found multiple matches for HDP version, cannot identify the correct one from: {0}".format(", ".join(matches))) + + return hdp_version + def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=None, custom_dest_file=None, force_execute=False): """ :param name: Tarball name, e.g., tez, hive, pig, sqoop. @@ -65,11 +108,11 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=Non import params if params.stack_name is None or params.stack_name.upper() not in TARBALL_MAP: -Logger.error("Cannot copy %s tarball to HDFS because stack %s does not support this operation." % (str(name), str(params.stack_name))) +Logger.error("Cannot copy {0} tarball to HDFS because stack {1} does not support this operation.".format(str(name), str(params.stack_name))) return -1 if name is None or name.lower() not in TARBALL_MAP[params.stack_name.upper()]: -Logger.warning("Cannot copy tarball to HDFS because %s is not supported in stack for this operation." % (str(name), str(params.stack_name))) +Logger.warning("Cannot copy tarball to HDFS because {0} is not supported in stack {1} for this operation.".format(str(name), str(params.stack_name))) return -1 (source_file, dest_file) = TARBALL_MAP[params.stack_name.upper()][name.lower()] @@ -86,17 +129,24 @@ def copy_to_hdfs(name, user_group, owner, file_mode=0444, custom_source_file=Non if i
ambari git commit: AMBARI-11694. After enabling Kerberos via the REST API, the Ambari UI behaves as if the cluster is not Kerberized (alexantonenko)
Repository: ambari Updated Branches: refs/heads/trunk f9ed7a3a7 -> 0bc81fd9d AMBARI-11694. After enabling Kerberos via the REST API, the Ambari UI behaves as if the cluster is not Kerberized (alexantonenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0bc81fd9 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0bc81fd9 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0bc81fd9 Branch: refs/heads/trunk Commit: 0bc81fd9df4ac8a0ec5139a56a6940d4ca710bfb Parents: f9ed7a3 Author: Alex Antonenko Authored: Thu Jun 4 18:46:34 2015 +0300 Committer: Alex Antonenko Committed: Thu Jun 4 18:46:34 2015 +0300 -- ambari-web/app/controllers/main/admin/kerberos.js | 13 + ambari-web/app/mixins/wizard/addSecurityConfigs.js | 2 +- 2 files changed, 10 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/0bc81fd9/ambari-web/app/controllers/main/admin/kerberos.js -- diff --git a/ambari-web/app/controllers/main/admin/kerberos.js b/ambari-web/app/controllers/main/admin/kerberos.js index a4ca131..ecd79a6 100644 --- a/ambari-web/app/controllers/main/admin/kerberos.js +++ b/ambari-web/app/controllers/main/admin/kerberos.js @@ -23,6 +23,7 @@ require('controllers/main/admin/kerberos/step4_controller'); App.MainAdminKerberosController = App.KerberosWizardStep4Controller.extend({ name: 'mainAdminKerberosController', securityEnabled: false, + defaultKerberosLoaded: false, dataIsLoaded: false, isRecommendedLoaded: true, kdc_type: '', @@ -269,10 +270,14 @@ App.MainAdminKerberosController = App.KerberosWizardStep4Controller.extend({ self.loadClusterDescriptorConfigs().then(function() { dfd.resolve(); }, function() { -// if kerberos descriptor doesn't exist in cluster artifacts we have to kerberize cluster. -// Show `Enable kerberos` button and set unsecure status. -self.set('securityEnabled', false); -dfd.resolve(); +// if kerberos descriptor doesn't exist in cluster artifacts get the default descriptor +self.loadStackDescriptorConfigs().then(function() { + self.set('defaultKerberosLoaded', true); + dfd.resolve(); +}, function() { + self.set('securityEnabled', false); + dfd.resolve(); +}); }); } else { dfd.resolve(); http://git-wip-us.apache.org/repos/asf/ambari/blob/0bc81fd9/ambari-web/app/mixins/wizard/addSecurityConfigs.js -- diff --git a/ambari-web/app/mixins/wizard/addSecurityConfigs.js b/ambari-web/app/mixins/wizard/addSecurityConfigs.js index 43cf9e2..7e436ae 100644 --- a/ambari-web/app/mixins/wizard/addSecurityConfigs.js +++ b/ambari-web/app/mixins/wizard/addSecurityConfigs.js @@ -664,7 +664,7 @@ App.AddSecurityConfigs = Em.Mixin.create({ * @method loadStackDescriptorConfigs */ loadDescriptorConfigs: function() { -if (App.router.get('mainAdminKerberosController.securityEnabled')) { +if (App.router.get('mainAdminKerberosController.securityEnabled') && !App.router.get('mainAdminKerberosController.defaultKerberosLoaded')) { return this.loadClusterDescriptorConfigs(); } else { return this.loadStackDescriptorConfigs();
[2/2] ambari git commit: AMBARI-11686. NameNode HA: going back and forth between Select Hosts and Review page adds additional NameNodes (alexantonenko)
AMBARI-11686. NameNode HA: going back and forth between Select Hosts and Review page adds additional NameNodes (alexantonenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/75bce8ae Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/75bce8ae Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/75bce8ae Branch: refs/heads/trunk Commit: 75bce8aee6fa8b4f68fca1a5aba483553401468a Parents: 36a1c66 Author: Alex Antonenko Authored: Thu Jun 4 16:35:06 2015 +0300 Committer: Alex Antonenko Committed: Thu Jun 4 18:38:26 2015 +0300 -- .../admin/highAvailability/nameNode/wizard_controller.js | 5 + ambari-web/app/routes/high_availability_routes.js| 8 ++-- 2 files changed, 11 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/75bce8ae/ambari-web/app/controllers/main/admin/highAvailability/nameNode/wizard_controller.js -- diff --git a/ambari-web/app/controllers/main/admin/highAvailability/nameNode/wizard_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/nameNode/wizard_controller.js index d725bde..ae2fa60 100644 --- a/ambari-web/app/controllers/main/admin/highAvailability/nameNode/wizard_controller.js +++ b/ambari-web/app/controllers/main/admin/highAvailability/nameNode/wizard_controller.js @@ -101,6 +101,11 @@ App.HighAvailabilityWizardController = App.WizardController.extend({ this.set('content.masterComponentHosts', masterComponentHosts); }, + clearMasterComponentHosts: function() { +this.set('content.masterComponentHosts', null); +this.setDBProperty('masterComponentHosts', null); + }, + saveHdfsUser: function () { App.db.setHighAvailabilityWizardHdfsUser(this.get('content.hdfsUser')); }, http://git-wip-us.apache.org/repos/asf/ambari/blob/75bce8ae/ambari-web/app/routes/high_availability_routes.js -- diff --git a/ambari-web/app/routes/high_availability_routes.js b/ambari-web/app/routes/high_availability_routes.js index c91929a..1e25ab3 100644 --- a/ambari-web/app/routes/high_availability_routes.js +++ b/ambari-web/app/routes/high_availability_routes.js @@ -130,6 +130,10 @@ module.exports = App.WizardRoute.extend({ step2: Em.Route.extend({ route: '/step2', +enter: function(router) { + var controller = router.get('highAvailabilityWizardController'); + controller.clearMasterComponentHosts(); +}, connectOutlets: function (router) { var controller = router.get('highAvailabilityWizardController'); controller.dataLoading().done(function () { @@ -137,7 +141,7 @@ module.exports = App.WizardRoute.extend({ controller.loadAllPriorSteps().done(function () { controller.connectOutlet('highAvailabilityWizardStep2', controller.get('content')); }); - }) + }); }, unroutePath: function () { return false; @@ -173,7 +177,7 @@ module.exports = App.WizardRoute.extend({ controller.loadAllPriorSteps().done(function () { controller.connectOutlet('highAvailabilityWizardStep3', controller.get('content')); }); - }) + }); }, unroutePath: function () { return false;
[1/2] ambari git commit: AMBARI-11693. Service account issues (alexantonenko)
Repository: ambari Updated Branches: refs/heads/trunk 36a1c6699 -> f9ed7a3a7 AMBARI-11693. Service account issues (alexantonenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f9ed7a3a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f9ed7a3a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f9ed7a3a Branch: refs/heads/trunk Commit: f9ed7a3a7e285386769486ce704a06628c428048 Parents: 75bce8a Author: Alex Antonenko Authored: Thu Jun 4 18:08:47 2015 +0300 Committer: Alex Antonenko Committed: Thu Jun 4 18:38:26 2015 +0300 -- ambari-web/app/controllers/wizard.js | 3 +++ ambari-web/app/utils/config.js | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/f9ed7a3a/ambari-web/app/controllers/wizard.js -- diff --git a/ambari-web/app/controllers/wizard.js b/ambari-web/app/controllers/wizard.js index 1177272..6693ff7 100644 --- a/ambari-web/app/controllers/wizard.js +++ b/ambari-web/app/controllers/wizard.js @@ -844,6 +844,9 @@ App.WizardController = Em.Controller.extend(App.LocalStorage, App.ThemesMappingM property.supportsFinal = Boolean(supportsFinal.find(function (configType) { return property.filename.startsWith(configType); })); +if (property.serviceName == 'MISC' && property.name == 'yarn_user') { + property.supportsFinal = false; +} }); loadAdvancedConfigResult.pushObjects(properties); }); http://git-wip-us.apache.org/repos/asf/ambari/blob/f9ed7a3a/ambari-web/app/utils/config.js -- diff --git a/ambari-web/app/utils/config.js b/ambari-web/app/utils/config.js index 15522ac..4b0631b 100644 --- a/ambari-web/app/utils/config.js +++ b/ambari-web/app/utils/config.js @@ -1059,6 +1059,7 @@ App.config = Em.Object.create({ 'user_group': 'Hadoop Group', 'mapred_user': 'MapReduce User', 'zk_user': 'ZooKeeper User', + 'metadata_user': 'Atlas User', 'ignore_groupsusers_create': 'Skip group modifications during install', 'override_hbase_uid': 'Have Ambari manage UIDs' }; @@ -1082,7 +1083,6 @@ App.config = Em.Object.create({ } if (config.property_name == 'proxyuser_group') propertyData.belongsToService = proxyUserGroupServices; } - if (config.property_type.contains('PASSWORD')) { propertyData.displayType = "password"; }
ambari git commit: AMBARI-11675 - Hive Upgrade Fails Because Of Missing Database Library (jonathanhurley)
Repository: ambari Updated Branches: refs/heads/trunk cc6f4a682 -> 36a1c6699 AMBARI-11675 - Hive Upgrade Fails Because Of Missing Database Library (jonathanhurley) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/36a1c669 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/36a1c669 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/36a1c669 Branch: refs/heads/trunk Commit: 36a1c669970bf219c01157b40385218608685a38 Parents: cc6f4a6 Author: Jonathan Hurley Authored: Wed Jun 3 21:50:00 2015 -0400 Committer: Jonathan Hurley Committed: Thu Jun 4 11:20:33 2015 -0400 -- .../package/scripts/hive_metastore.py | 20 .../stacks/2.1/HIVE/test_hive_metastore.py | 52 2 files changed, 72 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/36a1c669/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py -- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py index 1813ee3..e6f3e97 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py @@ -17,6 +17,8 @@ See the License for the specific language governing permissions and limitations under the License. """ +import os + from resource_management.core.logger import Logger from resource_management.core.resources.system import Execute from resource_management.libraries.script import Script @@ -32,6 +34,7 @@ from resource_management.libraries.functions.security_commons import validate_se from resource_management.libraries.functions.security_commons import FILE_TYPE_XML from hive import hive +from hive import jdbc_connector from hive_service import hive_service from ambari_commons.os_family_impl import OsFamilyImpl from ambari_commons import OSConst @@ -149,11 +152,16 @@ class HiveMetastoreDefault(HiveMetastore): else: self.put_structured_out({"securityState": "UNSECURED"}) + def upgrade_schema(self, env): """ Executes the schema upgrade binary. This is its own function because it could be called as a standalone task from the upgrade pack, but is safe to run it for each metastore instance. + +The metastore schema upgrade requires a database driver library for most +databases. During an upgrade, it's possible that the library is not present, +so this will also attempt to copy/download the appropriate driver. """ Logger.info("Upgrading Hive Metastore") import params @@ -163,6 +171,18 @@ class HiveMetastoreDefault(HiveMetastore): kinit_command=format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser_principal}; ") Execute(kinit_command,user=params.smokeuser) +# ensure that the JDBC drive is present for the schema tool; if it's not +# present, then download it first +if params.hive_jdbc_driver in params.hive_jdbc_drivers_list and params.hive_use_existing_db: + target_directory = format("/usr/hdp/{version}/hive/lib") + if not os.path.exists(params.target): +# download it +jdbc_connector() + + Execute(('cp', params.target, target_directory), +path=["/bin", "/usr/bin/"], sudo = True) + +# build the schema tool command binary = format("/usr/hdp/{version}/hive/bin/schematool") # the conf.server directory changed locations between HDP 2.2 and 2.3 http://git-wip-us.apache.org/repos/asf/ambari/blob/36a1c669/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py -- diff --git a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py index 246f206..5b924ae 100644 --- a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py +++ b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py @@ -22,6 +22,7 @@ import os from mock.mock import MagicMock, call, patch from stacks.utils.RMFTestCase import * +@patch("platform.linux_distribution", new = MagicMock(return_value="Linux")) class TestHiveMetastore(RMFTestCase): COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package" STACK_VERSION = "2.0.6" @@ -525,3 +526,54 @@ class TestHiveMetastore(RMFTestCase): self.assertEquals( "conf-select set-conf-dir --package hive --stack-version 2.3.0.0-1234 --conf-version 0", mocks_dict['call'].call_args_list
ambari git commit: AMBARI-11691. Hive Start with non-root ambari-agent fail (aonishuk)
Repository: ambari Updated Branches: refs/heads/trunk 09f6e93f5 -> cc6f4a682 AMBARI-11691. Hive Start with non-root ambari-agent fail (aonishuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cc6f4a68 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cc6f4a68 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cc6f4a68 Branch: refs/heads/trunk Commit: cc6f4a68209fedc97bcc2fb15697c1f176d6b183 Parents: 09f6e93 Author: Andrew Onishuk Authored: Thu Jun 4 17:51:35 2015 +0300 Committer: Andrew Onishuk Committed: Thu Jun 4 17:51:35 2015 +0300 -- .../common-services/HIVE/0.12.0.2.0/package/scripts/hive.py| 1 + .../src/test/python/stacks/2.0.6/HIVE/test_hive_client.py | 2 ++ .../src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py | 6 +- .../src/test/python/stacks/2.0.6/HIVE/test_hive_server.py | 2 ++ .../src/test/python/stacks/2.1/HIVE/test_hive_metastore.py | 2 ++ 5 files changed, 12 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/cc6f4a68/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py -- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py index 32edf71..888a4c1 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py @@ -253,6 +253,7 @@ def hive(name=None): File(format("/usr/lib/ambari-agent/{check_db_connection_jar_name}"), content = DownloadSource(format("{jdk_location}{check_db_connection_jar_name}")), + mode = 0644, ) if name == 'metastore': http://git-wip-us.apache.org/repos/asf/ambari/blob/cc6f4a68/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py -- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py index 11e1d16..7e1fcca 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py @@ -95,6 +95,7 @@ class TestHiveClient(RMFTestCase): ) self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar', content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'), +mode = 0644, ) self.assertNoMoreResources() @@ -170,6 +171,7 @@ class TestHiveClient(RMFTestCase): ) self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar', content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'), +mode = 0644, ) self.assertNoMoreResources() http://git-wip-us.apache.org/repos/asf/ambari/blob/cc6f4a68/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py -- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py index e46311c..2d049a9 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py @@ -220,6 +220,7 @@ class TestHiveMetastore(RMFTestCase): ) self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar', content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'), +mode = 0644, ) self.assertResourceCalled('File', '/tmp/start_metastore_script', content = StaticFile('startMetastore.sh'), @@ -323,6 +324,7 @@ class TestHiveMetastore(RMFTestCase): ) self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar', content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'), +mode = 0644, ) self.assertResourceCalled('File', '/tmp/start_metastore_script', content = StaticFile('startMetastore.sh'), @@ -449,7 +451,9 @@ class TestHiveMetastore(RMFTestCase): mode = 0644) self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar', -content = DownloadSource('http://c6
ambari git commit: AMBARI-11690. Move Namenode hung at Configure Component step (onechiporenko)
Repository: ambari Updated Branches: refs/heads/trunk 4117e4bee -> 09f6e93f5 AMBARI-11690. Move Namenode hung at Configure Component step (onechiporenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/09f6e93f Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/09f6e93f Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/09f6e93f Branch: refs/heads/trunk Commit: 09f6e93f59847af3db424ea07eedaba2c98b0bc1 Parents: 4117e4b Author: Oleg Nechiporenko Authored: Thu Jun 4 17:42:09 2015 +0300 Committer: Oleg Nechiporenko Committed: Thu Jun 4 17:42:09 2015 +0300 -- .../main/service/reassign/step4_controller.js | 3 +++ .../main/service/reassign/step4_controller_test.js | 17 - 2 files changed, 19 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/09f6e93f/ambari-web/app/controllers/main/service/reassign/step4_controller.js -- diff --git a/ambari-web/app/controllers/main/service/reassign/step4_controller.js b/ambari-web/app/controllers/main/service/reassign/step4_controller.js index fe547c3..edc9b8f 100644 --- a/ambari-web/app/controllers/main/service/reassign/step4_controller.js +++ b/ambari-web/app/controllers/main/service/reassign/step4_controller.js @@ -506,6 +506,9 @@ App.ReassignMasterWizardStep4Controller = App.HighAvailabilityProgressPageContro if (App.Service.find().someProperty('serviceName', 'HBASE')) { urlParams.push('(type=hbase-site&tag=' + data.Clusters.desired_configs['hbase-site'].tag + ')'); } +if (App.Service.find().someProperty('serviceName', 'ACCUMULO')) { + urlParams.push('(type=accumulo-site&tag=' + data.Clusters.desired_configs['accumulo-site'].tag + ')'); +} break; case 'SECONDARY_NAMENODE': urlParams.push('(type=hdfs-site&tag=' + data.Clusters.desired_configs['hdfs-site'].tag + ')'); http://git-wip-us.apache.org/repos/asf/ambari/blob/09f6e93f/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js -- diff --git a/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js b/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js index 8e4e0d9..0713abf 100644 --- a/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js +++ b/ambari-web/test/controllers/main/service/reassign/step4_controller_test.js @@ -511,7 +511,8 @@ describe('App.ReassignMasterWizardStep4Controller', function () { 'yarn-site': {tag: 5}, 'oozie-site': {tag: 6}, 'webhcat-site': {tag: 7}, - 'yarn-env': {tag: 8} + 'yarn-env': {tag: 8}, + 'accumulo-site': {tag: 9} } } }; @@ -544,6 +545,20 @@ describe('App.ReassignMasterWizardStep4Controller', function () { "(type=hbase-site&tag=3)" ]); }); + +it('get config of NAMENODE when ACCUMULO installed', function () { + services = [ +{ + serviceName: 'ACCUMULO' +} + ]; + expect(controller.getConfigUrlParams('NAMENODE', data)).to.eql([ +"(type=hdfs-site&tag=1)", +"(type=core-site&tag=2)", +"(type=accumulo-site&tag=9)" + ]); +}); + }); describe('#onLoadConfigsTags()', function () {
ambari git commit: AMBARI-11689. All status commands fail in non-root agent mode (aonishuk)
Repository: ambari Updated Branches: refs/heads/trunk b3ed80611 -> 4117e4bee AMBARI-11689. All status commands fail in non-root agent mode (aonishuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4117e4be Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4117e4be Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4117e4be Branch: refs/heads/trunk Commit: 4117e4beeabe83ab7b9657e8870ed52e43b5ecfd Parents: b3ed806 Author: Andrew Onishuk Authored: Thu Jun 4 17:34:44 2015 +0300 Committer: Andrew Onishuk Committed: Thu Jun 4 17:34:44 2015 +0300 -- .../python/resource_management/core/sudo.py | 38 .../libraries/functions/check_process_status.py | 2 +- .../libraries/functions/flume_agent_helper.py | 4 ++- 3 files changed, 27 insertions(+), 17 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/4117e4be/ambari-common/src/main/python/resource_management/core/sudo.py -- diff --git a/ambari-common/src/main/python/resource_management/core/sudo.py b/ambari-common/src/main/python/resource_management/core/sudo.py index 4ceaaed..f864d64 100644 --- a/ambari-common/src/main/python/resource_management/core/sudo.py +++ b/ambari-common/src/main/python/resource_management/core/sudo.py @@ -19,12 +19,14 @@ Ambari Agent """ +import time import os import tempfile import shutil import stat from resource_management.core import shell from resource_management.core.logger import Logger +from resource_management.core.exceptions import Fail from ambari_commons.os_check import OSCheck if os.geteuid() == 0: @@ -101,6 +103,10 @@ if os.geteuid() == 0: self.st_uid, self.st_gid, self.st_mode = stat_val.st_uid, stat_val.st_gid, stat_val.st_mode & 0 return Stat(path) + def kill(pid, signal): +os.kill(pid, signal) + + else: # os.chown replacement @@ -143,28 +149,23 @@ else: def rmtree(path): shell.checked_call(["rm","-rf", path], sudo=True) - def stat(path): -class Stat: - def __init__(self, path): -stat_val = os.stat(path) -self.st_uid, self.st_gid, self.st_mode = stat_val.st_uid, stat_val.st_gid, stat_val.st_mode & 0 - -return Stat(path) - # fp.write replacement def create_file(filename, content, encoding=None): """ if content is None, create empty file """ -tmpf = tempfile.NamedTemporaryFile() +content = content if content else "" +content = content.encode(encoding) if encoding else content -if content: - content = content.encode(encoding) if encoding else content - with open(tmpf.name, "wb") as fp: -fp.write(content) +tmpf_name = tempfile.gettempdir() + os.sep + tempfile.template + str(time.time()) -with tmpf: - shell.checked_call(["cp", "-f", tmpf.name, filename], sudo=True) +try: + with open(tmpf_name, "wb") as fp: +fp.write(content) + + shell.checked_call(["cp", "-f", tmpf_name, filename], sudo=True) +finally: + os.unlink(tmpf_name) # fp.read replacement def read_file(filename, encoding=None): @@ -217,3 +218,10 @@ else: stat_val = os.stat(path) self.st_uid, self.st_gid, self.st_mode = stat_val.st_uid, stat_val.st_gid, stat_val.st_mode & 0 return Stat(path) + + # os.kill replacement + def kill(pid, signal): +try: + shell.checked_call(["kill", "-"+str(signal), str(pid)], sudo=True) +except Fail as ex: + raise OSError(str(ex)) \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/4117e4be/ambari-common/src/main/python/resource_management/libraries/functions/check_process_status.py -- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/check_process_status.py b/ambari-common/src/main/python/resource_management/libraries/functions/check_process_status.py index 6f39c28..7961f00 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/check_process_status.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/check_process_status.py @@ -54,7 +54,7 @@ def check_process_status(pid_file): # If sig is 0, then no signal is sent, but error checking is still # performed; this can be used to check for the existence of a # process ID or process group ID. -os.kill(pid, 0) +sudo.kill(pid, 0) except OSError: Logger.info("Process with pid {0} is not running. Stale pid file" " at {1}".format(pid, pid_file)) http://git-wip-us.apache.org/repos/asf/ambari/blob/4117e4be/ambari-common/src/main/p
ambari git commit: AMBARI-11683 RU: Configs merge check popup is displayed even if there are no conflict. (ababiichuk)
Repository: ambari Updated Branches: refs/heads/trunk f1625d780 -> b3ed80611 AMBARI-11683 RU: Configs merge check popup is displayed even if there are no conflict. (ababiichuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b3ed8061 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b3ed8061 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b3ed8061 Branch: refs/heads/trunk Commit: b3ed8061144d6bf5c35b68e4ab9e6ccd141b7884 Parents: f1625d7 Author: aBabiichuk Authored: Thu Jun 4 15:53:42 2015 +0300 Committer: aBabiichuk Committed: Thu Jun 4 16:35:04 2015 +0300 -- .../main/admin/stack_and_upgrade_controller.js | 4 ++-- .../admin/stack_and_upgrade_controller_test.js | 17 - 2 files changed, 18 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/b3ed8061/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js -- diff --git a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js index 223685b..3bdd317 100644 --- a/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js +++ b/ambari-web/app/controllers/main/admin/stack_and_upgrade_controller.js @@ -459,9 +459,9 @@ App.MainAdminStackAndUpgradeController = Em.Controller.extend(App.LocalStorage, var title = Em.I18n.t('popup.clusterCheck.Upgrade.title'); var alert = Em.I18n.t('popup.clusterCheck.Upgrade.alert'); App.showClusterCheckPopup(data, header, title, alert); -} else if (data.items.someProperty('UpgradeChecks.status', "WARNING") && data.items.someProperty('UpgradeChecks.id', "CONFIG_MERGE")) { +} else if (data.items.someProperty('UpgradeChecks.id', "CONFIG_MERGE") && Em.get(data.items.findProperty('UpgradeChecks.id', "CONFIG_MERGE"), 'UpgradeChecks.status') == 'WARNING') { var self = this, -configsMergeCheckData = data.items.findProperty('UpgradeChecks.id', "CONFIG_MERGE").UpgradeChecks.failed_detail; +configsMergeCheckData = Em.get(data.items.findProperty('UpgradeChecks.id', "CONFIG_MERGE"), 'UpgradeChecks.failed_detail'); this.set('requestInProgress', false); App.showUpgradeConfigsMergePopup(configsMergeCheckData, params.label, function () { self.upgrade(params); http://git-wip-us.apache.org/repos/asf/ambari/blob/b3ed8061/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js -- diff --git a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js index 7041a38..b8b4fe3 100644 --- a/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js +++ b/ambari-web/test/controllers/main/admin/stack_and_upgrade_controller_test.js @@ -279,7 +279,22 @@ describe('App.MainAdminStackAndUpgradeController', function() { showClusterCheckPopupCalledCount: 0, showUpgradeConfigsMergePopupCalledCount: 1, upgradeCalledCount: 0, -title: 'warnings popup is displayed is configs merge conflicts are present' +title: 'warnings popup is displayed if configs merge conflicts are present' + }, + { +check: { + "check": "Configuration Merge Check", + "status": "PASS", + "reason": "", + "failed_on": [], + "failed_detail": [], + "check_type": "CLUSTER", + "id": "CONFIG_MERGE" +}, +showClusterCheckPopupCalledCount: 0, +showUpgradeConfigsMergePopupCalledCount: 0, +upgradeCalledCount: 1, +title: 'upgrade is started if configs merge conflicts are absent' }, { check: {
ambari git commit: Revert "AMBARI-11672. Ambari 2.0.1 server doesn't start after upgrade from HDP 2.1.7 to 2.2.4. (swagle)"
Repository: ambari Updated Branches: refs/heads/trunk d46949d91 -> f1625d780 Revert "AMBARI-11672. Ambari 2.0.1 server doesn't start after upgrade from HDP 2.1.7 to 2.2.4. (swagle)" This reverts commit 33f24d57537c5055b896d5b011e535be96d31c8f. Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f1625d78 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f1625d78 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f1625d78 Branch: refs/heads/trunk Commit: f1625d780b3b70a53b57da703c75f1716b15a3d9 Parents: d46949d Author: Vitaly Brodetskyi Authored: Thu Jun 4 07:47:52 2015 +0300 Committer: Vitaly Brodetskyi Committed: Thu Jun 4 07:49:20 2015 +0300 -- .../server/upgrade/UpgradeCatalog210.java | 94 +++- .../server/upgrade/UpgradeCatalog210Test.java | 81 - 2 files changed, 11 insertions(+), 164 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/f1625d78/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java index 2e40461..e331475 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java @@ -31,20 +31,14 @@ import org.apache.ambari.server.controller.AmbariManagementController; import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo; import org.apache.ambari.server.orm.dao.AlertDefinitionDAO; import org.apache.ambari.server.orm.dao.DaoUtils; -import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO; import org.apache.ambari.server.orm.dao.StackDAO; import org.apache.ambari.server.orm.entities.AlertDefinitionEntity; -import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity; -import org.apache.ambari.server.orm.entities.HostComponentStateEntity; -import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity; -import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK; import org.apache.ambari.server.orm.entities.StackEntity; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Service; import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.state.stack.OsFamily; -import org.apache.ambari.server.utils.VersionUtils; import org.apache.commons.lang.StringUtils; import org.eclipse.persistence.internal.databaseaccess.FieldTypeDefinition; import org.slf4j.Logger; @@ -52,9 +46,6 @@ import org.slf4j.LoggerFactory; import javax.persistence.EntityManager; import javax.persistence.Query; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaDelete; -import javax.persistence.criteria.Root; import java.sql.ResultSet; import java.sql.SQLException; import java.text.MessageFormat; @@ -480,21 +471,21 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog { dbAccessor.executeQuery("ALTER TABLE " + SERVICE_CONFIG_HOSTS_TABLE + " DROP CONSTRAINT serviceconfighosts_pkey"); } dbAccessor.executeQuery("ALTER TABLE " + CONFIG_GROUP_HOST_MAPPING_TABLE + - " ADD CONSTRAINT configgrouphostmapping_pkey PRIMARY KEY (config_group_id, host_id)"); +" ADD CONSTRAINT configgrouphostmapping_pkey PRIMARY KEY (config_group_id, host_id)"); dbAccessor.executeQuery("ALTER TABLE " + CLUSTER_HOST_MAPPING_TABLE + - " ADD CONSTRAINT clusterhostmapping_pkey PRIMARY KEY (cluster_id, host_id)"); +" ADD CONSTRAINT clusterhostmapping_pkey PRIMARY KEY (cluster_id, host_id)"); dbAccessor.executeQuery("ALTER TABLE " + HOST_CONFIG_MAPPING_TABLE + - " ADD CONSTRAINT hostconfigmapping_pkey PRIMARY KEY (cluster_id, host_id, type_name, create_timestamp)"); +" ADD CONSTRAINT hostconfigmapping_pkey PRIMARY KEY (cluster_id, host_id, type_name, create_timestamp)"); dbAccessor.executeQuery("ALTER TABLE " + HOST_COMPONENT_STATE_TABLE + - " ADD CONSTRAINT hostcomponentstate_pkey PRIMARY KEY (cluster_id, component_name, host_id, service_name)"); +" ADD CONSTRAINT hostcomponentstate_pkey PRIMARY KEY (cluster_id, component_name, host_id, service_name)"); dbAccessor.executeQuery("ALTER TABLE " + HOST_COMPONENT_DESIRED_STATE_TABLE + - " ADD CONSTRAINT hostcomponentdesiredstate_pkey PRIMARY KEY (cluster_id, component_name, host_id, service_name)"); +" ADD CONSTRAINT hostcomponentdesiredstate_pkey PRIMARY KEY (cluster_id, compone
ambari git commit: AMBARI-11682. Yarn "Capacity Scheduler"-config has Undo-button on the installer (onechiporenko)
Repository: ambari Updated Branches: refs/heads/trunk 2209b7203 -> d46949d91 AMBARI-11682. Yarn "Capacity Scheduler"-config has Undo-button on the installer (onechiporenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d46949d9 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d46949d9 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d46949d9 Branch: refs/heads/trunk Commit: d46949d9189021af6b4f1911c53fe7a53b4661e2 Parents: 2209b72 Author: Oleg Nechiporenko Authored: Thu Jun 4 15:25:48 2015 +0300 Committer: Oleg Nechiporenko Committed: Thu Jun 4 15:25:48 2015 +0300 -- ambari-web/app/utils/config.js | 8 ++-- ambari-web/test/utils/config_test.js | 3 ++- 2 files changed, 8 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/d46949d9/ambari-web/app/utils/config.js -- diff --git a/ambari-web/app/utils/config.js b/ambari-web/app/utils/config.js index 551dfa0..15522ac 100644 --- a/ambari-web/app/utils/config.js +++ b/ambari-web/app/utils/config.js @@ -1434,8 +1434,12 @@ App.config = Em.Object.create({ var savedIsFinal = fileConfigs.someProperty('savedIsFinal', true); var recommendedIsFinal = fileConfigs.someProperty('recommendedIsFinal', true); complexConfig.value = value; - complexConfig.savedValue = savedValue; - complexConfig.recommendedValue = recommendedValue; + if (savedValue) { +complexConfig.savedValue = savedValue; + } + if (recommendedValue) { +complexConfig.recommendedValue = recommendedValue; + } complexConfig.isFinal = isFinal; complexConfig.savedIsFinal = savedIsFinal; complexConfig.recommendedIsFinal = recommendedIsFinal; http://git-wip-us.apache.org/repos/asf/ambari/blob/d46949d9/ambari-web/test/utils/config_test.js -- diff --git a/ambari-web/test/utils/config_test.js b/ambari-web/test/utils/config_test.js index 48e27d1..b5f717f 100644 --- a/ambari-web/test/utils/config_test.js +++ b/ambari-web/test/utils/config_test.js @@ -122,7 +122,8 @@ describe('App.config', function () { var result = App.config.fileConfigsIntoTextarea.call(App.config, configs, filename); expect(result.length).to.equal(1); expect(result[0].value).to.equal(''); - expect(result[0].recommendedValue).to.equal(''); + expect(Em.isNone(result[0].recommendedValue)).to.be.true; + expect(Em.isNone(result[0].savedValue)).to.be.true; }); it("filename has configs that shouldn't be included in textarea", function () { var configs = [
ambari git commit: AMBARI-11667. Start NN in HA, Mapreduce, Oozie and some other are failing with HTTP SPNEGO configured.(vbrodetskyi)
Repository: ambari Updated Branches: refs/heads/trunk 0dc911e3a -> 2209b7203 AMBARI-11667. Start NN in HA, Mapreduce, Oozie and some other are failing with HTTP SPNEGO configured.(vbrodetskyi) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2209b720 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2209b720 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2209b720 Branch: refs/heads/trunk Commit: 2209b7203d92e25efd0b12fd1eb1bde61a039c72 Parents: 0dc911e Author: Vitaly Brodetskyi Authored: Thu Jun 4 05:06:27 2015 +0300 Committer: Vitaly Brodetskyi Committed: Thu Jun 4 05:06:27 2015 +0300 -- .../libraries/functions/jmx.py | 19 --- .../libraries/functions/namenode_ha_utils.py | 14 +++--- .../libraries/providers/hdfs_resource.py | 6 -- .../package/scripts/namenode_ha_state.py | 4 ++-- .../2.1.0.2.0/package/scripts/params_linux.py| 4 .../python/stacks/2.0.6/HDFS/test_journalnode.py | 6 -- 6 files changed, 37 insertions(+), 16 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/2209b720/ambari-common/src/main/python/resource_management/libraries/functions/jmx.py -- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/jmx.py b/ambari-common/src/main/python/resource_management/libraries/functions/jmx.py index a3cfb1b..be346e5 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/jmx.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/jmx.py @@ -19,13 +19,26 @@ limitations under the License. ''' import urllib2 import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set. +from resource_management.core import shell +from resource_management.core.logger import Logger -def get_value_from_jmx(qry, property): +def get_value_from_jmx(qry, property, security_enabled, run_user, is_https_enabled): try: -response = urllib2.urlopen(qry) -data = response.read() +if security_enabled: + cmd = ['curl', '--negotiate', '-u', ':'] +else: + cmd = ['curl'] + +if is_https_enabled: + cmd.append("-k") + +cmd.append(qry) + +_, data = shell.checked_call(cmd, user=run_user, quiet=False) + if data: data_dict = json.loads(data) return data_dict["beans"][0][property] except: +Logger.exception("Getting jmx metrics from NN failed. URL: " + str(qry)) return None \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/2209b720/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py -- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py b/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py index ab53ba7..0d9e28b 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/namenode_ha_utils.py @@ -31,7 +31,7 @@ NAMENODE_HTTP_FRAGMENT = 'dfs.namenode.http-address.{0}.{1}' NAMENODE_HTTPS_FRAGMENT = 'dfs.namenode.https-address.{0}.{1}' JMX_URI_FRAGMENT = "http://{0}/jmx?qry=Hadoop:service=NameNode,name=NameNodeStatus"; -def get_namenode_states(hdfs_site): +def get_namenode_states(hdfs_site, security_enabled, run_user): """ return format [('nn1', 'hdfs://hostname1:port1'), ('nn2', 'hdfs://hostname2:port2')] , [], [] """ @@ -59,7 +59,7 @@ def get_namenode_states(hdfs_site): value = str(hdfs_site[key]) jmx_uri = JMX_URI_FRAGMENT.format(value) - state = get_value_from_jmx(jmx_uri,'State') + state = get_value_from_jmx(jmx_uri, 'State', security_enabled, run_user, is_https_enabled) if state == HDFS_NN_STATE_ACTIVE: active_namenodes.append((nn_unique_id, value)) @@ -86,17 +86,17 @@ def is_ha_enabled(hdfs_site): return False -def get_active_namenode(hdfs_site): +def get_active_namenode(hdfs_site, security_enabled, run_user): """ return format is nn_unique_id and it's address ('nn1', 'hdfs://hostname1:port1') """ - active_namenodes = get_namenode_states(hdfs_site)[0] + active_namenodes = get_namenode_states(hdfs_site, security_enabled, run_user)[0] if active_namenodes: return active_namenodes[0] else: return UnknownConfiguration('fs_root') -def get_property_for_active_namenode(hdfs_site, property_name): +def get_property_for_active_namenode(hdfs_site, property_name,
ambari git commit: AMBARI-11642. Oozie RU prereq is not a default value, need to add Doozie.connection.retry.count=5 (dlysnichenko)
Repository: ambari Updated Branches: refs/heads/trunk 705c35a8b -> 0dc911e3a AMBARI-11642. Oozie RU prereq is not a default value, need to add Doozie.connection.retry.count=5 (dlysnichenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0dc911e3 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0dc911e3 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0dc911e3 Branch: refs/heads/trunk Commit: 0dc911e3a83eb7352f2ea32d0381e1b23c2cdafc Parents: 705c35a Author: Lisnichenko Dmitro Authored: Thu Jun 4 12:14:03 2015 +0300 Committer: Lisnichenko Dmitro Committed: Thu Jun 4 12:14:03 2015 +0300 -- .../common-services/OOZIE/5.0.0.2.3/configuration/oozie-env.xml | 4 1 file changed, 4 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/0dc911e3/ambari-server/src/main/resources/common-services/OOZIE/5.0.0.2.3/configuration/oozie-env.xml -- diff --git a/ambari-server/src/main/resources/common-services/OOZIE/5.0.0.2.3/configuration/oozie-env.xml b/ambari-server/src/main/resources/common-services/OOZIE/5.0.0.2.3/configuration/oozie-env.xml index ac12d7c..fe80bf5 100644 --- a/ambari-server/src/main/resources/common-services/OOZIE/5.0.0.2.3/configuration/oozie-env.xml +++ b/ambari-server/src/main/resources/common-services/OOZIE/5.0.0.2.3/configuration/oozie-env.xml @@ -136,6 +136,10 @@ export OOZIE_ADMIN_PORT={{oozie_server_admin_port}} # # export OOZIE_BASE_URL="http://${OOZIE_HTTP_HOSTNAME}:${OOZIE_HTTP_PORT}/oozie"; export JAVA_LIBRARY_PATH={{hadoop_lib_home}}/native/Linux-amd64-64 + +# At least 1 minute of retry time to account for server downtime during +# upgrade/downgrade +export OOZIE_CLIENT_OPTS="${OOZIE_CLIENT_OPTS} -Doozie.connection.retry.count=5 "