ambari git commit: AMBARI-10480. BE: Extend stack-advisor to recommend property_value_attributes for Hive/Tez - fixes (srimanth)
Repository: ambari Updated Branches: refs/heads/trunk bcc80c684 -> 2a7ec815f AMBARI-10480. BE: Extend stack-advisor to recommend property_value_attributes for Hive/Tez - fixes (srimanth) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2a7ec815 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2a7ec815 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2a7ec815 Branch: refs/heads/trunk Commit: 2a7ec815fede26a3282983e6b61fc0cd3d487d96 Parents: bcc80c6 Author: Srimanth Gunturi Authored: Wed Apr 15 18:58:14 2015 -0700 Committer: Srimanth Gunturi Committed: Wed Apr 15 18:58:24 2015 -0700 -- .../stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml | 4 ++-- .../src/main/resources/stacks/HDP/2.2/services/stack_advisor.py | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/2a7ec815/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml index f849c87..1e0feb2 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml +++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-env.xml @@ -90,11 +90,11 @@ export METASTORE_PORT={{hive_metastore_port}} value-list - On + on On - Off + off Off http://git-wip-us.apache.org/repos/asf/ambari/blob/2a7ec815/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py -- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py index e5f8b5c..c3bf9ff 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py +++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py @@ -197,6 +197,9 @@ class HDP22StackAdvisor(HDP21StackAdvisor): container_size = "512" +if not "yarn-site" in configurations: + self.recommendYARNConfigurations(configurations, clusterData, services, hosts) + if "yarn-site" in configurations and \ "yarn.scheduler.minimum-allocation-mb" in configurations["yarn-site"]["properties"]: container_size = configurations["yarn-site"]["properties"]["yarn.scheduler.minimum-allocation-mb"]
ambari git commit: AMBARI-10509. Delete selected widgets from Widget Browser from the widget layout.(XIWANG)
Repository: ambari Updated Branches: refs/heads/trunk 546475471 -> bcc80c684 AMBARI-10509. Delete selected widgets from Widget Browser from the widget layout.(XIWANG) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bcc80c68 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bcc80c68 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bcc80c68 Branch: refs/heads/trunk Commit: bcc80c68424cb2b310a34b5ac32c2043858f55c4 Parents: 5464754 Author: Xi Wang Authored: Wed Apr 15 15:52:46 2015 -0700 Committer: Xi Wang Committed: Wed Apr 15 17:10:36 2015 -0700 -- .../controllers/main/service/info/summary.js| 105 ++- ambari-web/app/messages.js | 2 + .../app/styles/enhanced_service_dashboard.less | 3 +- .../modal_popups/widget_browser_popup.hbs | 14 ++- ambari-web/app/utils/ajax/ajax.js | 10 ++ 5 files changed, 103 insertions(+), 31 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/bcc80c68/ambari-web/app/controllers/main/service/info/summary.js -- diff --git a/ambari-web/app/controllers/main/service/info/summary.js b/ambari-web/app/controllers/main/service/info/summary.js index 878fb0c..5a37bc3 100644 --- a/ambari-web/app/controllers/main/service/info/summary.js +++ b/ambari-web/app/controllers/main/service/info/summary.js @@ -402,24 +402,26 @@ App.MainServiceInfoSummaryController = Em.Controller.extend({ var addedWidgetsNames = this.get('widgets').mapProperty('widgetName'); if (data.items[0] && data.items.length) { this.set("allSharedWidgets", -data.items.map(function (widget) { +data.items.filter(function (widget) { + return widget.Widgets.widget_type != "HEATMAP"; +}).map(function (widget) { var widgetType = widget.Widgets.widget_type; var widgetName = widget.Widgets.widget_name; - if (widgetType != "HEATMAP") { -return Em.Object.create({ - iconPath: "/img/widget-" + widgetType.toLowerCase() + ".png", - widgetName: widgetName, - displayName: widget.Widgets.display_name, - description: widget.Widgets.description, - widgetType: widgetType, - serviceName: widget.Widgets.metrics.mapProperty('service_name').uniq().join('-'), - added: addedWidgetsNames.contains(widgetName) -}); - } + return Em.Object.create({ +id: widget.Widgets.id, +widgetName: widgetName, +displayName: widget.Widgets.display_name, +description: widget.Widgets.description, +widgetType: widgetType, +iconPath: "/img/widget-" + widgetType.toLowerCase() + ".png", +serviceName: widget.Widgets.metrics.mapProperty('service_name').uniq().join('-'), +added: addedWidgetsNames.contains(widgetName), +isShared: true + }); }) ); - this.set('isAllSharedWidgetsLoaded', true); } +this.set('isAllSharedWidgetsLoaded', true); }, allSharedWidgets: [], @@ -449,24 +451,26 @@ App.MainServiceInfoSummaryController = Em.Controller.extend({ var addedWidgetsNames = this.get('widgets').mapProperty('widgetName'); if (data.items[0] && data.items.length) { this.set("mineWidgets", -data.items.map(function (widget) { +data.items.filter(function (widget) { + return widget.Widgets.widget_type != "HEATMAP"; +}).map(function (widget) { var widgetType = widget.Widgets.widget_type; var widgetName = widget.Widgets.widget_name; - if (widgetType != "HEATMAP") { -return Em.Object.create({ - iconPath: "/img/widget-" + widgetType.toLowerCase() + ".png", - widgetName: widgetName, - displayName: widget.Widgets.display_name, - description: widget.Widgets.description, - widgetType: widgetType, - serviceName: widget.Widgets.metrics.mapProperty('service_name').uniq().join('-'), - added: addedWidgetsNames.contains(widgetName) -}); - } + return Em.Object.create({ +id: widget.Widgets.id, +widgetName: widgetName, +displayName: widget.Widgets.display_name, +description: widget.Widgets.description, +widgetType: widgetType, +iconPath: "/img/widget-" + widgetType.toLowerCase() + ".png", +serviceName: widget.Widgets.metrics.mapProperty('service_name').uniq().join('-'), +added: addedWidgetsNames.contains(widgetName), +isShared: false + });
ambari git commit: AMBARI-10507. Local root user's group being assigned to hadoop. (Adam Westerman via yusaku)
Repository: ambari Updated Branches: refs/heads/branch-2.0.maint ee9fb1f4e -> bb58fe921 AMBARI-10507. Local root user's group being assigned to hadoop. (Adam Westerman via yusaku) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/bb58fe92 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/bb58fe92 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/bb58fe92 Branch: refs/heads/branch-2.0.maint Commit: bb58fe9214b310956a492c2cc8dc6ddd63de6f39 Parents: ee9fb1f Author: Yusaku Sako Authored: Wed Apr 15 14:15:39 2015 -0700 Committer: Yusaku Sako Committed: Wed Apr 15 16:52:51 2015 -0700 -- .../RANGER/0.4.0/configuration/admin-properties.xml | 5 + 1 file changed, 1 insertion(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/bb58fe92/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/admin-properties.xml -- diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/admin-properties.xml b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/admin-properties.xml index ae03e2e..aefb200 100644 --- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/admin-properties.xml +++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/admin-properties.xml @@ -42,7 +42,6 @@ db_root_user root -USER Database admin user @@ -68,7 +67,6 @@ db_user rangeradmin -USER Database user-id used for the Ranger schema @@ -88,7 +86,6 @@ audit_db_user rangerlogger -USER Database user-id for storing auditlog information @@ -177,4 +174,4 @@ - \ No newline at end of file +
[2/8] ambari git commit: AMBARI-9993. Add support for management of Phoenix Query Server to HDP Stack (Nick Dimiduk via alejandro)
http://git-wip-us.apache.org/repos/asf/ambari/blob/54647547/ambari-server/src/test/python/stacks/2.3/configs/hbase_default.json -- diff --git a/ambari-server/src/test/python/stacks/2.3/configs/hbase_default.json b/ambari-server/src/test/python/stacks/2.3/configs/hbase_default.json new file mode 100644 index 000..ebf126b --- /dev/null +++ b/ambari-server/src/test/python/stacks/2.3/configs/hbase_default.json @@ -0,0 +1,417 @@ +{ +"configuration_attributes": { +"core-site": { +"final": { +"fs.defaultFS": "true" +} +}, +"hbase-policy": {}, +"hbase-log4j": {}, +"ranger-hdfs-plugin-properties": {}, +"hbase-env": {}, +"hdfs-site": { +"final": { +"dfs.support.append": "true", +"dfs.namenode.http-address": "true" +} +}, +"zoo.cfg": {}, +"hadoop-env": {}, +"hadoop-policy": {}, +"hdfs-log4j": {}, +"hbase-site": {}, +"ranger-hbase-plugin-properties": {}, +"zookeeper-env": {}, +"zookeeper-log4j": {}, +"cluster-env": {} +}, +"commandParams": { +"service_package_folder": "common-services/HBASE/0.96.0.2.0/package", +"script": "scripts/hbase_regionserver.py", +"hooks_folder": "HDP/2.0.6/hooks", +"version": "2.3.0.0-1606", +"excluded_hosts": "host1", +"command_timeout": "900", +"script_type": "PYTHON" +}, +"roleCommand": "CUSTOM_COMMAND", +"kerberosCommandParams": [], +"clusterName": "c1", +"hostname": "c6405.ambari.apache.org", +"hostLevelParams": { +"jdk_location": "http://c6405.ambari.apache.org:8080/resources/";, +"ambari_db_rca_password": "mapred", +"java_home": "/usr/jdk64/jdk1.8.0_40", +"ambari_db_rca_url": "jdbc:postgresql://c6405.ambari.apache.org/ambarirca", +"jce_name": "jce_policy-8.zip", +"custom_command": "RESTART", +"oracle_jdbc_url": "http://c6405.ambari.apache.org:8080/resources//ojdbc6.jar";, +"repo_info": "[{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.3.0.0-1606\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.3\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.3.0.0\",\"latestBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.3.0.0-1606\",\"baseSaved\":true},{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.20\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"baseSaved\":true}]";, +"group_list": "[\"hadoop\",\"users\"]", +"agentCacheDir": "/var/lib/ambari-agent/cache", +"stack_version": "2.3", +"stack_name": "HDP", +"db_name": "ambari", +"jdk_name": "jdk-8u40-linux-x64.tar.gz", +"ambari_db_rca_driver": "org.postgresql.Driver", +"java_version": "8", +"ambari_db_rca_username": "mapred", +"db_driver_filename": "mysql-connector-java.jar", +"user_list": "[\"zookeeper\",\"ambari-qa\",\"hdfs\",\"hbase\"]", +"mysql_jdbc_url": "http://c6405.ambari.apache.org:8080/resources//mysql-connector-java.jar"; +}, +"commandType": "EXECUTION_COMMAND", +"roleParams": { +"component_category": "SLAVE" +}, +"serviceName": "HBASE", +"role": "HBASE_REGIONSERVER", +"forceRefreshConfigTags": [], +"taskId": 54, +"public_hostname": "c6405.ambari.apache.org", +"configurations": { +"core-site": { +"io.serializations": "org.apache.hadoop.io.serializer.WritableSerialization", +"proxyuser_group": "users", +"fs.trash.interval": "360", +"ha.failover-controller.active-standby-elector.zk.op.retries": "120", +"hadoop.http.authentication.simple.anonymous.allowed": "true", +"hadoop.security.authentication": "simple", +"io.compression.codecs": "org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.SnappyCodec", +"ipc.client.connection.maxidletime": "3", +"mapreduce.jobtracker.webinterface.trusted": "false", +"hadoop.security.authorization": "false", +"net.topology.script.file.name": "/etc/hadoop/conf/topology_script.py", +"ipc.server.tcpnodelay": "true", +"ipc.client.connect.max.retries": "50", +"hadoop.security.auth_to_local": "\nDEFAULT", +"io.f
[1/8] ambari git commit: AMBARI-9993. Add support for management of Phoenix Query Server to HDP Stack (Nick Dimiduk via alejandro)
Repository: ambari Updated Branches: refs/heads/trunk b58969228 -> 546475471 http://git-wip-us.apache.org/repos/asf/ambari/blob/54647547/ambari-server/src/test/python/stacks/2.3/configs/hbase_secure.json -- diff --git a/ambari-server/src/test/python/stacks/2.3/configs/hbase_secure.json b/ambari-server/src/test/python/stacks/2.3/configs/hbase_secure.json new file mode 100644 index 000..ea57e66 --- /dev/null +++ b/ambari-server/src/test/python/stacks/2.3/configs/hbase_secure.json @@ -0,0 +1,738 @@ +{ +"configuration_attributes": { +"ranger-knox-plugin-properties": {}, +"gateway-log4j": {}, +"ranger-hdfs-plugin-properties": {}, +"hbase-policy": {}, +"kerberos-env": {}, +"storm-site": {}, +"hdfs-site": {}, +"storm-env": {}, +"hbase-site": {}, +"knox-env": {}, +"hadoop-policy": {}, +"hdfs-log4j": {}, +"ranger-hbase-plugin-properties": {}, +"krb5-conf": {}, +"ldap-log4j": {}, +"core-site": {}, +"hadoop-env": {}, +"zookeeper-log4j": {}, +"topology": {}, +"hbase-log4j": {}, +"oozie-site": {}, +"gateway-site": {}, +"hbase-env": {}, +"zookeeper-env": {}, +"zoo.cfg": {}, +"ranger-storm-plugin-properties": {}, +"webhcat-site": {}, +"users-ldif": {}, +"cluster-env": {} +}, +"commandParams": { +"service_package_folder": "common-services/HBASE/0.96.0.2.0/package", +"script": "scripts/hbase_regionserver.py", +"hooks_folder": "HDP/2.0.6/hooks", +"version": "2.3.0.0-1606", +"excluded_hosts": "host1", +"command_timeout": "900", +"script_type": "PYTHON" +}, +"roleCommand": "CUSTOM_COMMAND", +"kerberosCommandParams": [], +"clusterName": "c1", +"hostname": "c6405.ambari.apache.org", +"hostLevelParams": { +"jdk_location": "http://c6405.ambari.apache.org:8080/resources/";, +"ambari_db_rca_password": "mapred", +"java_home": "/usr/jdk64/jdk1.8.0_40", +"ambari_db_rca_url": "jdbc:postgresql://c6405.ambari.apache.org/ambarirca", +"jce_name": "jce_policy-8.zip", +"custom_command": "RESTART", +"oracle_jdbc_url": "http://c6405.ambari.apache.org:8080/resources//ojdbc6.jar";, +"repo_info": "[{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.3.0.0-1606\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.3\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.3.0.0\",\"latestBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.3.0.0-1606\",\"baseSaved\":true},{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.20\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6\",\"baseSaved\":true}]";, +"group_list": "[\"hadoop\",\"users\",\"knox\"]", +"agentCacheDir": "/var/lib/ambari-agent/cache", +"stack_version": "2.3", +"stack_name": "HDP", +"db_name": "ambari", +"jdk_name": "jdk-8u40-linux-x64.tar.gz", +"ambari_db_rca_driver": "org.postgresql.Driver", +"java_version": "8", +"ambari_db_rca_username": "mapred", +"db_driver_filename": "mysql-connector-java.jar", +"user_list": "[\"storm\",\"zookeeper\",\"ambari-qa\",\"hdfs\",\"hbase\",\"knox\"]", +"mysql_jdbc_url": "http://c6405.ambari.apache.org:8080/resources//mysql-connector-java.jar";, +"clientsToUpdateConfigs": "[\"*\"]" +}, +"commandType": "EXECUTION_COMMAND", +"roleParams": { +"component_category": "SLAVE" +}, +"serviceName": "HBASE", +"role": "HBASE_REGIONSERVER", +"forceRefreshConfigTags": [], +"taskId": 115, +"public_hostname": "c6405.ambari.apache.org", +"configurations": { +"ranger-knox-plugin-properties": { +"XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS": "900", +"KNOX_HOME": "/usr/hdp/current/knox-server", +"XAAUDIT.HDFS.DESTINATION_DIRECTORY": "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/%app-type%/%time:MMdd%", +"XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY": "__REPLACE__LOG_DIR/hadoop/%app-type%/audit", +"common.name.for.certificate": "-", +"XAAUDIT.HDFS.IS_ENABLED": "false", +"XAAUDIT.HDFS.LOCAL_BUFFER_FILE": "%time:MMdd-HHmm.ss%.log", +"SSL_KEYSTORE_PASSWORD": "myKeyFilePassword", +"XAAUDIT.DB.IS_ENABLED": "true", +
[7/8] ambari git commit: AMBARI-9993. Add support for management of Phoenix Query Server to HDP Stack (Nick Dimiduk via alejandro)
http://git-wip-us.apache.org/repos/asf/ambari/blob/54647547/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/metrics.json -- diff --git a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/metrics.json b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/metrics.json new file mode 100644 index 000..7bbf7be --- /dev/null +++ b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/metrics.json @@ -0,0 +1,13549 @@ +{ + "HBASE_REGIONSERVER": { +"Component": [ + { +"type": "ganglia", +"metrics": { + "default": { +"metrics/cpu/cpu_idle":{ + "metric":"cpu_idle", + "pointInTime":true, + "temporal":true +}, +"metrics/cpu/cpu_nice":{ + "metric":"cpu_nice", + "pointInTime":true, + "temporal":true +}, +"metrics/cpu/cpu_system":{ + "metric":"cpu_system", + "pointInTime":true, + "temporal":true +}, +"metrics/cpu/cpu_user":{ + "metric":"cpu_user", + "pointInTime":true, + "temporal":true +}, +"metrics/cpu/cpu_wio":{ + "metric":"cpu_wio", + "pointInTime":true, + "temporal":true +}, +"metrics/disk/disk_free":{ + "metric":"disk_free", + "pointInTime":true, + "temporal":true +}, +"metrics/disk/disk_total":{ + "metric":"disk_total", + "pointInTime":true, + "temporal":true +}, +"metrics/load/load_fifteen":{ + "metric":"load_fifteen", + "pointInTime":true, + "temporal":true +}, +"metrics/load/load_five":{ + "metric":"load_five", + "pointInTime":true, + "temporal":true +}, +"metrics/load/load_one":{ + "metric":"load_one", + "pointInTime":true, + "temporal":true +}, +"metrics/memory/mem_buffers":{ + "metric":"mem_buffers", + "pointInTime":true, + "temporal":true +}, +"metrics/memory/mem_cached":{ + "metric":"mem_cached", + "pointInTime":true, + "temporal":true +}, +"metrics/memory/mem_free":{ + "metric":"mem_free", + "pointInTime":true, + "temporal":true +}, +"metrics/memory/mem_shared":{ + "metric":"mem_shared", + "pointInTime":true, + "temporal":true +}, +"metrics/memory/mem_total":{ + "metric":"mem_total", + "pointInTime":true, + "temporal":true +}, +"metrics/memory/swap_free":{ + "metric":"swap_free", + "pointInTime":true, + "temporal":true +}, +"metrics/memory/swap_total":{ + "metric":"swap_total", + "pointInTime":true, + "temporal":true +}, +"metrics/network/bytes_in":{ + "metric":"bytes_in", + "pointInTime":true, + "temporal":true +}, +"metrics/network/bytes_out":{ + "metric":"bytes_out", + "pointInTime":true, + "temporal":true +}, +"metrics/network/pkts_in":{ + "metric":"pkts_in", + "pointInTime":true, + "temporal":true +}, +"metrics/network/pkts_out":{ + "metric":"pkts_out", + "pointInTime":true, + "temporal":true +}, +"metrics/process/proc_run":{ + "metric":"proc_run", + "pointInTime":true, + "temporal":true +}, +"metrics/process/proc_total":{ + "metric":"proc_total", + "pointInTime":true, + "temporal":true +}, +"metrics/disk/read_count":{ + "metric":"read_count", + "pointInTime":true, + "temporal":true +}, +"metrics/disk/write_count":{ + "metric":"write_count", + "pointInTime":true, + "temporal":true +}, +"metrics/disk/read_bytes":{ + "metric":"read_bytes", + "pointInTime":true, + "temporal":true +}, +"metrics/disk/write_bytes":{ + "metric":"write_bytes", + "pointInTime":true, + "temporal":true +
[8/8] ambari git commit: AMBARI-9993. Add support for management of Phoenix Query Server to HDP Stack (Nick Dimiduk via alejandro)
AMBARI-9993. Add support for management of Phoenix Query Server to HDP Stack (Nick Dimiduk via alejandro) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/54647547 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/54647547 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/54647547 Branch: refs/heads/trunk Commit: 5464754717b0be739df6171ee5b6119548ec7bd9 Parents: b589692 Author: Alejandro Fernandez Authored: Wed Apr 15 15:52:27 2015 -0700 Committer: Alejandro Fernandez Committed: Wed Apr 15 15:52:27 2015 -0700 -- .../common-services/HBASE/1.1.0.2.3/alerts.json | 157 + .../HBASE/1.1.0.2.3/configuration/hbase-env.xml | 155 + .../1.1.0.2.3/configuration/hbase-log4j.xml | 143 + .../1.1.0.2.3/configuration/hbase-policy.xml|53 + .../1.1.0.2.3/configuration/hbase-site.xml | 502 + .../HBASE/1.1.0.2.3/kerberos.json | 105 + .../HBASE/1.1.0.2.3/metainfo.xml| 158 + .../HBASE/1.1.0.2.3/metrics.json| 13549 + .../1.1.0.2.3/package/files/draining_servers.rb | 164 + .../1.1.0.2.3/package/files/hbaseSmokeVerify.sh |34 + .../HBASE/1.1.0.2.3/package/scripts/__init__.py |19 + .../1.1.0.2.3/package/scripts/functions.py |40 + .../HBASE/1.1.0.2.3/package/scripts/hbase.py| 176 + .../1.1.0.2.3/package/scripts/hbase_client.py |66 + .../package/scripts/hbase_decommission.py |93 + .../1.1.0.2.3/package/scripts/hbase_master.py | 148 + .../package/scripts/hbase_regionserver.py | 156 + .../1.1.0.2.3/package/scripts/hbase_service.py |51 + .../1.1.0.2.3/package/scripts/hbase_upgrade.py |37 + .../HBASE/1.1.0.2.3/package/scripts/params.py |25 + .../1.1.0.2.3/package/scripts/params_linux.py | 249 + .../1.1.0.2.3/package/scripts/params_windows.py |37 + .../package/scripts/phoenix_queryserver.py |55 + .../package/scripts/phoenix_service.py |44 + .../1.1.0.2.3/package/scripts/service_check.py |97 + .../package/scripts/setup_ranger_hbase.py | 202 + .../1.1.0.2.3/package/scripts/status_params.py |41 + .../HBASE/1.1.0.2.3/package/scripts/upgrade.py |49 + ...-metrics2-hbase.properties-GANGLIA-MASTER.j2 | 105 + ...doop-metrics2-hbase.properties-GANGLIA-RS.j2 | 104 + .../package/templates/hbase-smoke.sh.j2 |44 + .../package/templates/hbase_client_jaas.conf.j2 |23 + .../templates/hbase_grant_permissions.j2|39 + .../package/templates/hbase_master_jaas.conf.j2 |26 + .../templates/hbase_regionserver_jaas.conf.j2 |26 + .../package/templates/regionservers.j2 |20 + .../HBASE/1.1.0.2.3/widgets.json| 192 + .../stacks/2.3/HBASE/test_hbase_client.py | 213 + .../stacks/2.3/HBASE/test_hbase_master.py | 678 + .../stacks/2.3/HBASE/test_hbase_regionserver.py | 601 + .../2.3/HBASE/test_hbase_service_check.py | 131 + .../2.3/HBASE/test_phoenix_queryserver.py | 160 + .../stacks/2.3/configs/default.hbasedecom.json | 713 + .../stacks/2.3/configs/hbase_default.json | 417 + .../python/stacks/2.3/configs/hbase_secure.json | 738 + 45 files changed, 20835 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/54647547/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/alerts.json -- diff --git a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/alerts.json b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/alerts.json new file mode 100644 index 000..d10897a --- /dev/null +++ b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/alerts.json @@ -0,0 +1,157 @@ +{ + "HBASE": { +"service": [ + { +"name": "hbase_regionserver_process_percent", +"label": "Percent RegionServers Available", +"description": "This service-level alert is triggered if the configured percentage of RegionServer processes cannot be determined to be up and listening on the network for the configured warning and critical thresholds. It aggregates the results of RegionServer process down checks.", +"interval": 1, +"scope": "SERVICE", +"enabled": true, +"source": { + "type": "AGGREGATE", + "alert_name": "hbase_regionserver_process", + "reporting": { +"ok": { + "text": "affected: [{1}], total: [{0}]" +}, +"warning": { + "text": "affected: [{1}], total: [{0}]", + "value": 0.1 +}, +"critical": { + "text": "affected: [{1}], total: [{0}]", + "value": 0.3 +
[6/8] ambari git commit: AMBARI-9993. Add support for management of Phoenix Query Server to HDP Stack (Nick Dimiduk via alejandro)
http://git-wip-us.apache.org/repos/asf/ambari/blob/54647547/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/files/draining_servers.rb -- diff --git a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/files/draining_servers.rb b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/files/draining_servers.rb new file mode 100644 index 000..5bcb5b6 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/files/draining_servers.rb @@ -0,0 +1,164 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Add or remove servers from draining mode via zookeeper + +require 'optparse' +include Java + +import org.apache.hadoop.hbase.HBaseConfiguration +import org.apache.hadoop.hbase.client.HBaseAdmin +import org.apache.hadoop.hbase.zookeeper.ZKUtil +import org.apache.commons.logging.Log +import org.apache.commons.logging.LogFactory + +# Name of this script +NAME = "draining_servers" + +# Do command-line parsing +options = {} +optparse = OptionParser.new do |opts| + opts.banner = "Usage: ./hbase org.jruby.Main #{NAME}.rb [options] add|remove|list || ..." + opts.separator 'Add remove or list servers in draining mode. Can accept either hostname to drain all region servers' + + 'in that host, a host:port pair or a host,port,startCode triplet. More than one server can be given separated by space' + opts.on('-h', '--help', 'Display usage information') do +puts opts +exit + end + options[:debug] = false + opts.on('-d', '--debug', 'Display extra debug logging') do +options[:debug] = true + end +end +optparse.parse! + +# Return array of servernames where servername is hostname+port+startcode +# comma-delimited +def getServers(admin) + serverInfos = admin.getClusterStatus().getServerInfo() + servers = [] + for server in serverInfos +servers << server.getServerName() + end + return servers +end + +def getServerNames(hostOrServers, config) + ret = [] + + for hostOrServer in hostOrServers +# check whether it is already serverName. No need to connect to cluster +parts = hostOrServer.split(',') +if parts.size() == 3 + ret << hostOrServer +else + admin = HBaseAdmin.new(config) if not admin + servers = getServers(admin) + + hostOrServer = hostOrServer.gsub(/:/, ",") + for server in servers +ret << server if server.start_with?(hostOrServer) + end +end + end + + admin.close() if admin + return ret +end + +def addServers(options, hostOrServers) + config = HBaseConfiguration.create() + servers = getServerNames(hostOrServers, config) + + zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, "draining_servers", nil) + parentZnode = zkw.drainingZNode + + begin +for server in servers + node = ZKUtil.joinZNode(parentZnode, server) + ZKUtil.createAndFailSilent(zkw, node) +end + ensure +zkw.close() + end +end + +def removeServers(options, hostOrServers) + config = HBaseConfiguration.create() + servers = getServerNames(hostOrServers, config) + + zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, "draining_servers", nil) + parentZnode = zkw.drainingZNode + + begin +for server in servers + node = ZKUtil.joinZNode(parentZnode, server) + ZKUtil.deleteNodeFailSilent(zkw, node) +end + ensure +zkw.close() + end +end + +# list servers in draining mode +def listServers(options) + config = HBaseConfiguration.create() + + zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, "draining_servers", nil) + parentZnode = zkw.drainingZNode + + servers = ZKUtil.listChildrenNoWatch(zkw, parentZnode) + servers.each {|server| puts server} +end + +hostOrServers = ARGV[1..ARGV.size()] + +# Create a logger and disable the DEBUG-level annoying client logging +def configureLogging(options) + apacheLogger = LogFactory.getLog(NAME) + # Configure log4j to not spew so much + unless (options[:debug]) +logger = org.apache.log4j.Logger.getLogger("org.apache.hadoop.hbase") +logger.setLevel(org.apache.log4j.Level::WAR
[4/8] ambari git commit: AMBARI-9993. Add support for management of Phoenix Query Server to HDP Stack (Nick Dimiduk via alejandro)
http://git-wip-us.apache.org/repos/asf/ambari/blob/54647547/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_regionserver.py -- diff --git a/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_regionserver.py b/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_regionserver.py new file mode 100644 index 000..153555e --- /dev/null +++ b/ambari-server/src/test/python/stacks/2.3/HBASE/test_hbase_regionserver.py @@ -0,0 +1,601 @@ +#!/usr/bin/env python + +''' +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +''' +from mock.mock import MagicMock, patch +from stacks.utils.RMFTestCase import * +from unittest import skip + +@patch("platform.linux_distribution", new = MagicMock(return_value="Linux")) +@patch("os.path.exists", new = MagicMock(return_value=True)) +class TestHbaseRegionServer(RMFTestCase): + COMMON_SERVICES_PACKAGE_DIR = "HBASE/1.1.0.2.3/package" + STACK_VERSION = "2.3" + + def test_configure_default(self): +self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py", + classname = "HbaseRegionServer", + command = "configure", + config_file="hbase_default.json", + hdp_stack_version = self.STACK_VERSION, + target = RMFTestCase.TARGET_COMMON_SERVICES +) + +self.assert_configure_default() +self.assertNoMoreResources() + + def test_start_default(self): +self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py", + classname = "HbaseRegionServer", + command = "start", + config_file="hbase_default.json", + hdp_stack_version = self.STACK_VERSION, + target = RMFTestCase.TARGET_COMMON_SERVICES +) + +self.assert_configure_default() +self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf start regionserver', + not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1', + user = 'hbase' +) +self.assertNoMoreResources() + + def test_stop_default(self): +self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py", + classname = "HbaseRegionServer", + command = "stop", + config_file="hbase_default.json", + hdp_stack_version = self.STACK_VERSION, + target = RMFTestCase.TARGET_COMMON_SERVICES +) + +self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config /etc/hbase/conf stop regionserver', +on_timeout = '! ( ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 && ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1 ) || ambari-sudo.sh -H -E kill -9 `cat /var/run/hbase/hbase-hbase-regionserver.pid`', +timeout = 30, +user = 'hbase', +) + +self.assertResourceCalled('Execute', 'rm -f /var/run/hbase/hbase-hbase-regionserver.pid', +) +self.assertNoMoreResources() + + def test_configure_secured(self): +self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py", + classname = "HbaseRegionServer", + command = "configure", + config_file="hbase_secure.json", + hdp_stack_version = self.STACK_VERSION, + target = RMFTestCase.TARGET_COMMON_SERVICES +) + +self.assert_configure_secured() +self.assertNoMoreResources() + + def test_start_secured(self): +self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_regionserver.py", + classname = "HbaseRegionServer", + command = "start", + config_file="hbase_secure.json", + hdp_stack_version = self.STACK_VERSION, + target = RMFTestCase.TARGET_COMMON_SERVICES +) + +self.assert_configure_secured() +self.assertResou
[5/8] ambari git commit: AMBARI-9993. Add support for management of Phoenix Query Server to HDP Stack (Nick Dimiduk via alejandro)
http://git-wip-us.apache.org/repos/asf/ambari/blob/54647547/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2 -- diff --git a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2 b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2 new file mode 100644 index 000..462bef4 --- /dev/null +++ b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-RS.j2 @@ -0,0 +1,104 @@ +{# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#} + +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# See http://wiki.apache.org/hadoop/GangliaMetrics +# +# Make sure you know whether you are using ganglia 3.0 or 3.1. +# If 3.1, you will have to patch your hadoop instance with HADOOP-4675 +# And, yes, this file is named hadoop-metrics.properties rather than +# hbase-metrics.properties because we're leveraging the hadoop metrics +# package and hadoop-metrics.properties is an hardcoded-name, at least +# for the moment. +# +# See also http://hadoop.apache.org/hbase/docs/current/metrics.html + +# HBase-specific configuration to reset long-running stats (e.g. compactions) +# If this variable is left out, then the default is no expiration. +hbase.extendedperiod = 3600 + +{% if has_metric_collector %} + +*.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar +*.sink.timeline.slave.host.name={{hostname}} +hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink +hbase.period=10 +hbase.collector={{metric_collector_host}}:{{metric_collector_port}} + +jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink +jvm.period=10 +jvm.collector={{metric_collector_host}}:{{metric_collector_port}} + +rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink +rpc.period=10 +rpc.collector={{metric_collector_host}}:{{metric_collector_port}} + +hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink +hbase.sink.timeline.period=10 +hbase.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}} + +{% else %} + +# Configuration of the "hbase" context for ganglia +# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter) +# hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext +hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext31 +hbase.period=10 +hbase.servers={{ganglia_server_host}}:8656 + +# Configuration of the "jvm" context for ganglia +# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter) +# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext +jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext31 +jvm.period=10 +jvm.servers={{ganglia_server_host}}:8656 + +# Configuration of the "rpc" context for ganglia +# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter) +# rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext +rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext31 +rpc.period=10 +rpc.servers={{ganglia_server_host}}:8656 + +#Ganglia following hadoop example +hbase.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31 +hbase.sink.ganglia.period=10 + +# default for supportsparse is false +*.sink.gang
[3/8] ambari git commit: AMBARI-9993. Add support for management of Phoenix Query Server to HDP Stack (Nick Dimiduk via alejandro)
http://git-wip-us.apache.org/repos/asf/ambari/blob/54647547/ambari-server/src/test/python/stacks/2.3/configs/default.hbasedecom.json -- diff --git a/ambari-server/src/test/python/stacks/2.3/configs/default.hbasedecom.json b/ambari-server/src/test/python/stacks/2.3/configs/default.hbasedecom.json new file mode 100644 index 000..e40c3c4 --- /dev/null +++ b/ambari-server/src/test/python/stacks/2.3/configs/default.hbasedecom.json @@ -0,0 +1,713 @@ +{ +"roleCommand": "SERVICE_CHECK", +"clusterName": "c1", +"hostname": "c6401.ambari.apache.org", +"hostLevelParams": { +"jdk_location": "http://c6401.ambari.apache.org:8080/resources/";, +"ambari_db_rca_password": "mapred", +"ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca", +"jce_name": "UnlimitedJCEPolicyJDK7.zip", +"stack_version": "2.0", +"stack_name": "HDP", +"ambari_db_rca_driver": "org.postgresql.Driver", +"jdk_name": "jdk-7u67-linux-x64.tar.gz", +"ambari_db_rca_username": "mapred", +"java_home": "/usr/jdk64/jdk1.7.0_45", +"java_version": "8", +"db_name": "ambari" +}, +"commandType": "EXECUTION_COMMAND", +"roleParams": {}, +"serviceName": "OOZIE", +"role": "OOZIE_SERVICE_CHECK", +"commandParams": { +"command_timeout": "300", +"service_package_folder": "OOZIE", +"script_type": "PYTHON", +"script": "scripts/service_check.py", +"excluded_hosts": "host1", +"mark_draining_only": "true" +}, +"taskId": 152, +"public_hostname": "c6401.ambari.apache.org", +"configurations": { +"mapred-site": { +"mapreduce.jobhistory.address": "c6402.ambari.apache.org:10020", +"mapreduce.cluster.administrators": " hadoop", +"mapreduce.reduce.input.buffer.percent": "0.0", +"mapreduce.output.fileoutputformat.compress": "false", +"mapreduce.framework.name": "yarn", +"mapreduce.map.speculative": "false", +"mapreduce.reduce.shuffle.merge.percent": "0.66", +"yarn.app.mapreduce.am.resource.mb": "683", +"mapreduce.map.java.opts": "-Xmx273m", +"mapreduce.application.classpath": "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*,$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*", +"mapreduce.job.reduce.slowstart.completedmaps": "0.05", +"mapreduce.output.fileoutputformat.compress.type": "BLOCK", +"mapreduce.reduce.speculative": "false", +"mapreduce.reduce.java.opts": "-Xmx546m", +"mapreduce.am.max-attempts": "2", +"yarn.app.mapreduce.am.admin-command-opts": "-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN", +"mapreduce.reduce.log.level": "INFO", +"mapreduce.map.sort.spill.percent": "0.7", +"mapreduce.task.timeout": "30", +"mapreduce.map.memory.mb": "341", +"mapreduce.task.io.sort.factor": "100", +"mapreduce.jobhistory.intermediate-done-dir": "/mr-history/tmp", +"mapreduce.reduce.memory.mb": "683", +"yarn.app.mapreduce.am.log.level": "INFO", +"mapreduce.map.log.level": "INFO", +"mapreduce.shuffle.port": "13562", +"mapreduce.admin.user.env": "LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:/usr/lib/hadoop/lib/native/`$JAVA_HOME/bin/java -d32 -version &> /dev/null;if [ $? -eq 0 ]; then echo Linux-i386-32; else echo Linux-amd64-64;fi`", +"mapreduce.map.output.compress": "false", +"yarn.app.mapreduce.am.staging-dir": "/user", +"mapreduce.reduce.shuffle.parallelcopies": "30", +"mapreduce.reduce.shuffle.input.buffer.percent": "0.7", +"mapreduce.jobhistory.webapp.address": "c6402.ambari.apache.org:19888", +"mapreduce.jobhistory.done-dir": "/mr-history/done", +"mapreduce.admin.reduce.child.java.opts": "-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN", +"mapreduce.task.io.sort.mb": "136", +"yarn.app.mapreduce.am.command-opts": "-Xmx546m", +"mapreduce.admin.map.child.java.opts": "-Djava.net.preferIPv4Stack=true -Dhadoop.metrics.log.level=WARN" +}, +"oozie-site": { +"oozie.service.PurgeService.purge.interval": "3600", +"oozie.service.CallableQueueService.queue.size": "1000", +"oozie.service.SchemaService.wf.ext.schemas": "shell-action-0.1.xsd,email-action-0.1.xsd,hive-action-0.2.xsd,sqoop-action-0.2.xsd,ssh-action-0.1.xsd,distcp-action-0.1.xsd,shell-action-0.2.xsd,oozie-sla-0.1.xsd,oozie-sla-0.2.xsd,hive-action-0.3.xsd", +"oozie.service.JPAService.jdbc.url": "jdbc:derby:${oozie.data.dir}/${oozie.db.schema.name}-db;c
ambari git commit: AMBARI-10507. Local root user's group being assigned to hadoop. (Adam Westerman via yusaku)
Repository: ambari Updated Branches: refs/heads/trunk f05543d39 -> b58969228 AMBARI-10507. Local root user's group being assigned to hadoop. (Adam Westerman via yusaku) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b5896922 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b5896922 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b5896922 Branch: refs/heads/trunk Commit: b58969228db8a34fb1c3c8a249b8691bf83bce21 Parents: f05543d Author: Yusaku Sako Authored: Wed Apr 15 14:15:39 2015 -0700 Committer: Yusaku Sako Committed: Wed Apr 15 14:15:39 2015 -0700 -- .../RANGER/0.4.0/configuration/admin-properties.xml | 5 + 1 file changed, 1 insertion(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/b5896922/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/admin-properties.xml -- diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/admin-properties.xml b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/admin-properties.xml index ae03e2e..aefb200 100644 --- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/admin-properties.xml +++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/configuration/admin-properties.xml @@ -42,7 +42,6 @@ db_root_user root -USER Database admin user @@ -68,7 +67,6 @@ db_user rangeradmin -USER Database user-id used for the Ranger schema @@ -88,7 +86,6 @@ audit_db_user rangerlogger -USER Database user-id for storing auditlog information @@ -177,4 +174,4 @@ - \ No newline at end of file +
ambari git commit: AMBARI-10505. Equals predicate does not work for host component metrics. (swagle)
Repository: ambari Updated Branches: refs/heads/trunk e9617f7f6 -> f05543d39 AMBARI-10505. Equals predicate does not work for host component metrics. (swagle) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f05543d3 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f05543d3 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f05543d3 Branch: refs/heads/trunk Commit: f05543d393aed760f49f799a447366d45b3a128b Parents: e9617f7 Author: Siddharth Wagle Authored: Wed Apr 15 13:16:17 2015 -0700 Committer: Siddharth Wagle Committed: Wed Apr 15 13:58:56 2015 -0700 -- .../ambari/server/api/query/QueryImpl.java | 2 +- .../ambari/server/api/query/QueryImplTest.java | 73 2 files changed, 74 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/f05543d3/ambari-server/src/main/java/org/apache/ambari/server/api/query/QueryImpl.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/query/QueryImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/api/query/QueryImpl.java index 0ab9d07..2319683 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/api/query/QueryImpl.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/api/query/QueryImpl.java @@ -445,7 +445,7 @@ public class QueryImpl implements Query, ResourceInstance { new QueryResult(request, queryPredicate, subResourcePredicate, map, new QueryResponseImpl(resourceSet))); } } - clusterController.populateResources(resourceType, providerResourceSet, request, null); + clusterController.populateResources(resourceType, providerResourceSet, request, subResourcePredicate); subResource.queryForSubResources(); } } http://git-wip-us.apache.org/repos/asf/ambari/blob/f05543d3/ambari-server/src/test/java/org/apache/ambari/server/api/query/QueryImplTest.java -- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/query/QueryImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/query/QueryImplTest.java index 5ac91fd..01361d2 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/api/query/QueryImplTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/api/query/QueryImplTest.java @@ -23,6 +23,7 @@ package org.apache.ambari.server.api.query; import static org.easymock.EasyMock.anyBoolean; import static org.easymock.EasyMock.anyObject; import static org.easymock.EasyMock.capture; +import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.createNiceMock; import static org.easymock.EasyMock.eq; import static org.easymock.EasyMock.expect; @@ -301,6 +302,78 @@ public class QueryImplTest { } @Test + public void testExecute_SubResourcePropertyPredicate() throws Exception { +ResourceDefinition resourceDefinition = new ClusterResourceDefinition(); + +Map mapIds = new HashMap(); +mapIds.put(Resource.Type.Cluster, "c1"); +mapIds.put(Resource.Type.Host, "h1"); + +ClusterController clusterController = createNiceMock(ClusterController.class); +QueryResponse clusterResponse = createNiceMock(QueryResponse.class); +QueryResponse hostResponse = createNiceMock(QueryResponse.class); +Schema clusterSchema = createNiceMock("ClusterSchema", Schema.class); +Schema hostSchema = createNiceMock("HostSchema", Schema.class); +Renderer renderer = createNiceMock(Renderer.class); +Resource clusterResource = createMock("ClusterResource", Resource.class); +Resource hostResource = createMock("HostResource", Resource.class); +Set clusterResources = Collections.singleton(clusterResource); +Set hostResources = Collections.singleton(hostResource); +Iterable iterable = createNiceMock(Iterable.class); +Iterator iterator = createNiceMock(Iterator.class); + + expect(clusterController.getSchema(Resource.Type.Cluster)).andReturn(clusterSchema).anyTimes(); + expect(clusterController.getSchema(Resource.Type.Host)).andReturn(hostSchema).anyTimes(); + +expect(clusterController.getResources(eq(Resource.Type.Cluster), + anyObject(org.apache.ambari.server.controller.spi.Request.class), anyObject(Predicate.class))). + andReturn(clusterResponse); + +// Expect this call with a predicate passed down +expect(clusterController.getResources(eq(Resource.Type.Host), + anyObject(org.apache.ambari.server.controller.spi.Request.class), anyObject(Predicate.class))). + andReturn(hostResponse); + +expect(iterable.iterator()).andReturn(iterator).anyTimes(); +expect(iterator.hasNext()).andReturn(fal
ambari git commit: AMBARI-10276. Install wizard (step 3): Removing hosts during bootstrap, will case unexpected behaviour (alexantonenko)
Repository: ambari Updated Branches: refs/heads/branch-2.0.maint 1db1f436b -> ee9fb1f4e AMBARI-10276. Install wizard (step 3): Removing hosts during bootstrap, will case unexpected behaviour (alexantonenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ee9fb1f4 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ee9fb1f4 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ee9fb1f4 Branch: refs/heads/branch-2.0.maint Commit: ee9fb1f4e0ce31b5ce71a935806216ffb6059c55 Parents: 1db1f43 Author: Alex Antonenko Authored: Mon Mar 30 19:54:19 2015 +0300 Committer: Yusaku Sako Committed: Wed Apr 15 13:07:43 2015 -0700 -- ambari-web/app/controllers/wizard/step3_controller.js | 14 +++--- ambari-web/app/templates/wizard/step3.hbs | 12 +++- ambari-web/test/controllers/wizard/step3_test.js | 9 - 3 files changed, 18 insertions(+), 17 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/ee9fb1f4/ambari-web/app/controllers/wizard/step3_controller.js -- diff --git a/ambari-web/app/controllers/wizard/step3_controller.js b/ambari-web/app/controllers/wizard/step3_controller.js index 21f9e33..30f1bfe 100644 --- a/ambari-web/app/controllers/wizard/step3_controller.js +++ b/ambari-web/app/controllers/wizard/step3_controller.js @@ -68,7 +68,9 @@ App.WizardStep3Controller = Em.Controller.extend({ * is Retry button disabled * @type {bool} */ - isRetryDisabled: true, + isRetryDisabled: function() { +return (this.get('isBackDisabled')) ? this.get('isBackDisabled') : !this.get('bootHosts').filterProperty('bootStatus', 'FAILED').length; + }.property('bootHosts.@each.bootStatus', 'isBackDisabled'), /** * Is Back button disabled @@ -216,7 +218,6 @@ App.WizardStep3Controller = Em.Controller.extend({ this.set('registrationStartedAt', null); this.set('isLoaded', false); this.set('isSubmitDisabled', true); -this.set('isRetryDisabled', true); this.set('stopChecking', false); }, @@ -235,10 +236,10 @@ App.WizardStep3Controller = Em.Controller.extend({ }); App.router.get(this.get('content.controllerName')).launchBootstrap(bootStrapData, function (requestId) { if (requestId == '0') { -var controller = App.router.get(App.clusterStatus.wizardControllerName); -controller.registerErrPopup(Em.I18n.t('common.information'), Em.I18n.t('installer.step2.evaluateStep.hostRegInProgress')); +self.startBootstrap(); } else if (requestId) { self.set('content.installOptions.bootRequestId', requestId); + App.router.get(self.get('content.controllerName')).save('installOptions'); self.startBootstrap(); } }); @@ -324,7 +325,8 @@ App.WizardStep3Controller = Em.Controller.extend({ * @method removeHost */ removeHost: function (hostInfo) { -this.removeHosts([hostInfo]); +if (!this.get('isBackDisabled')) + this.removeHosts([hostInfo]); }, /** @@ -411,7 +413,6 @@ App.WizardStep3Controller = Em.Controller.extend({ */ retrySelectedHosts: function () { if (!this.get('isRetryDisabled')) { - this.set('isRetryDisabled', true); var selectedHosts = this.get('bootHosts').filterProperty('bootStatus', 'FAILED'); selectedHosts.forEach(function (_host) { _host.set('bootStatus', 'DONE'); @@ -1207,7 +1208,6 @@ App.WizardStep3Controller = Em.Controller.extend({ */ stopRegistration: function () { this.set('isSubmitDisabled', !this.get('bootHosts').someProperty('bootStatus', 'REGISTERED')); -this.set('isRetryDisabled', !this.get('bootHosts').someProperty('bootStatus', 'FAILED')); }, /** http://git-wip-us.apache.org/repos/asf/ambari/blob/ee9fb1f4/ambari-web/app/templates/wizard/step3.hbs -- diff --git a/ambari-web/app/templates/wizard/step3.hbs b/ambari-web/app/templates/wizard/step3.hbs index cb75709..b34c64d 100644 --- a/ambari-web/app/templates/wizard/step3.hbs +++ b/ambari-web/app/templates/wizard/step3.hbs @@ -24,10 +24,12 @@ - - {{t installer.step3.removeSelected}} - +{{#unless isBackDisabled}} + +{{t installer.step3.removeSelected}} + +{{/unless}} {{#unless isRetryDisabled}} @@ -91,7 +93,7 @@ data-toggle="modal" {{action hostLogPopup host target="controller"}}>{{host.bootStatusForDisplay}} - + {{t common.remove}} {{#if view.isRetryable}} http://git-wip-us.apache.org/repos/asf/ambari/blob/ee9
ambari git commit: AMBARI-10504 - Views : create example view for Cluster Association and Auto Create (tbeerbower)
Repository: ambari Updated Branches: refs/heads/trunk b92bfc352 -> e9617f7f6 AMBARI-10504 - Views : create example view for Cluster Association and Auto Create (tbeerbower) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e9617f7f Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e9617f7f Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e9617f7f Branch: refs/heads/trunk Commit: e9617f7f6bb5784da84113a691379f0316ec59d2 Parents: b92bfc3 Author: tbeerbower Authored: Wed Apr 15 15:38:01 2015 -0400 Committer: tbeerbower Committed: Wed Apr 15 15:38:14 2015 -0400 -- .../examples/cluster-view/docs/index.md | 175 +++ ambari-views/examples/cluster-view/pom.xml | 84 + .../view/cluster/ClusterConfigServlet.java | 65 +++ .../cluster/ClusterConfigurationService.java| 47 + .../ambari/view/cluster/PropertyService.java| 68 +++ .../ambari/view/cluster/SettingService.java | 47 + .../src/main/resources/WEB-INF/web.xml | 37 .../cluster-view/src/main/resources/view.xml| 90 ++ ambari-views/examples/pom.xml | 1 + 9 files changed, 614 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/e9617f7f/ambari-views/examples/cluster-view/docs/index.md -- diff --git a/ambari-views/examples/cluster-view/docs/index.md b/ambari-views/examples/cluster-view/docs/index.md new file mode 100644 index 000..2305609 --- /dev/null +++ b/ambari-views/examples/cluster-view/docs/index.md @@ -0,0 +1,175 @@ + + +Cluster View Example +== + +Description +- +The Cluster View is an example of a basic REST service that serves the configuration parameter values. +It demonstrates the basics of view / cluster association and cluster configuration for view instances. +It also shows how to auto-create an instance of a view and automatically associate it with a cluster. + +Package +- +All views are packaged as a view archive. The view archive contains the configuration +file and various optional components of the view. + +###view.xml + +The view.xml file is the only required file for a view archive. The view.xml is the configuration that describes the view and view instances for Ambari. + +Note the following in the view.xml for the CLUSTER view: + +#cluster-config + +Some of the parameter elements in the view.xml are created with a cluster-config element. + + + hdfs_user + The hdfs_user value from the hadoop-env configuration. Requires cluster association. + HDFS User + not available + hadoop-env/hdfs_user + + +Including a cluster-config element means that the value for the parameter's property will be acquired from cluster configuration if the view instance is associated with a cluster. +In this example, if an instance of this view is associated with a cluster then the value returned for the 'hdfs_user' property will come from the cluster's 'hadoop-env/hdfs_user' configuration. + +#auto-instance + +The view.xml contains an auto-instance element. + + + AUTO_INSTANCE + Auto Create instance for the CLUSTER view + This view instance is auto created when the HDFS service is added to a cluster. + +setting1 +value1 + + +setting2 +value2 + + HDP-2.* + +HDFS + + + +The auto-instance element describes an instance of the view that will be automatically created when the matching services are added to a cluster with a matching stack id. +In this example, an instance of the cluster view will be created when the HDFS service is added to a HDP-2.* cluster. + +Build +- + +The view can be built as a maven project. + +cd ambari-views/examples/cluster-view +mvn clean package + +The build will produce the view archive. + +ambari-views/examples/cluster-view/target/cluster-view-???.jar + +Deploy +-- + +Place the view archive on the Ambari Server and start to deploy. + +cp cluster-view-???.jar /var/lib/ambari-server/resources/views/ +ambari-server start + + +View Instances +- + +When you first start Ambari, you should see an instance of the CLUSTER view named INSTANCE_1. The instance is defined in the view.xml and is created unconditionally when the view is first deployed. + +Access the view instance end point: + +api/v1/views/CLUSTER/versions/0.1.0/instances/INSTANCE_1 + +Access the view UI: + +/views/CLUSTER/0.1.0/INSTANCE_1 + +At this point, the instance is not associated with any cluster so accessing the view properties from within the view code should show the default value of 'not available' for the
ambari git commit: AMBARI-10484. Change response structure for widget layout API.
Repository: ambari Updated Branches: refs/heads/trunk 9420d3816 -> b92bfc352 AMBARI-10484. Change response structure for widget layout API. Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b92bfc35 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b92bfc35 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b92bfc35 Branch: refs/heads/trunk Commit: b92bfc352d41a84327824fcba34fa44c541db795 Parents: 9420d38 Author: Siddharth Wagle Authored: Wed Apr 15 12:20:45 2015 -0700 Committer: Siddharth Wagle Committed: Wed Apr 15 12:20:45 2015 -0700 -- .../WidgetLayoutResourceDefinition.java | 8 +++--- .../internal/WidgetLayoutResourceProvider.java | 28 ++-- .../WidgetLayoutResourceProviderTest.java | 12 - 3 files changed, 24 insertions(+), 24 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/b92bfc35/ambari-server/src/main/java/org/apache/ambari/server/api/resources/WidgetLayoutResourceDefinition.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/WidgetLayoutResourceDefinition.java b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/WidgetLayoutResourceDefinition.java index 2d2ca1e..a9bbd9c 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/api/resources/WidgetLayoutResourceDefinition.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/api/resources/WidgetLayoutResourceDefinition.java @@ -68,13 +68,13 @@ public class WidgetLayoutResourceDefinition extends BaseResourceDefinition { TreeNode parent = resultNode.getParent(); for (TreeNode node : parent.getChildren()) { -if (node.getObject().getPropertiesMap().get("WidgetLayouts") != null && - node.getObject().getPropertiesMap().get("WidgetLayouts").get("WidgetInfo") != null) { +if (node.getObject().getPropertiesMap().get("WidgetLayoutInfo") != null && + node.getObject().getPropertiesMap().get("WidgetLayoutInfo").get("widgets") != null) { - ArrayList widgetsList = (ArrayList) node.getObject().getPropertiesMap().get("WidgetLayouts").get("WidgetInfo"); + ArrayList widgetsList = (ArrayList) node.getObject().getPropertiesMap().get("WidgetLayoutInfo").get("widgets"); for (Object widgetObject : widgetsList) { HashMap widgetMap = (HashMap) widgetObject; -String widgetId = ((WidgetResponse) widgetMap.get("Widget")).getId().toString(); +String widgetId = ((WidgetResponse) widgetMap.get("WidgetInfo")).getId().toString(); String widgetHref = href.substring(0, href.indexOf("/widget_layouts") + 1) + "widgets/" + widgetId; widgetMap.put("href", widgetHref); http://git-wip-us.apache.org/repos/asf/ambari/blob/b92bfc35/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java -- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java index ba3bce9..8fa5bc6 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/WidgetLayoutResourceProvider.java @@ -59,14 +59,14 @@ public class WidgetLayoutResourceProvider extends AbstractControllerResourceProv // - Property ID constants - - public static final String WIDGETLAYOUT_ID_PROPERTY_ID = PropertyHelper.getPropertyId("WidgetLayouts", "id"); - public static final String WIDGETLAYOUT_CLUSTER_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("WidgetLayouts", "cluster_name"); - public static final String WIDGETLAYOUT_SECTION_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("WidgetLayouts", "section_name"); - public static final String WIDGETLAYOUT_LAYOUT_NAME_PROPERTY_ID = PropertyHelper.getPropertyId("WidgetLayouts", "layout_name"); - public static final String WIDGETLAYOUT_SCOPE_PROPERTY_ID = PropertyHelper.getPropertyId("WidgetLayouts", "scope"); - public static final String WIDGETLAYOUT_INFO_PROPERTY_ID = PropertyHelper.getPropertyId("WidgetLayouts", "WidgetInfo"); - public static final String WIDGETLAYOUT_USERNAME_PROPERTY_ID = PropertyHelper.getPropertyId("WidgetLayouts", "user_name"); - public static final Stri
ambari git commit: AMBARI-10444. RU Hacks and Technical Debt - unit tests for HBase, Hive, Falcon (dlysnichenko)
Repository: ambari Updated Branches: refs/heads/trunk 308633d21 -> 9420d3816 AMBARI-10444. RU Hacks and Technical Debt - unit tests for HBase, Hive, Falcon (dlysnichenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9420d381 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9420d381 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9420d381 Branch: refs/heads/trunk Commit: 9420d3816c6be50c677b6fa4165d9537a9ff2ca5 Parents: 308633d Author: Lisnichenko Dmitro Authored: Wed Apr 15 21:21:40 2015 +0300 Committer: Lisnichenko Dmitro Committed: Wed Apr 15 21:25:13 2015 +0300 -- .../HBASE/0.96.0.2.0/package/scripts/upgrade.py | 4 +-- .../stacks/2.0.6/HBASE/test_hbase_master.py | 17 + .../2.0.6/HBASE/test_hbase_regionserver.py | 36 .../stacks/2.0.6/HIVE/test_hive_client.py | 17 + .../stacks/2.0.6/HIVE/test_hive_server.py | 17 + .../stacks/2.0.6/HIVE/test_webhcat_server.py| 18 ++ .../stacks/2.1/FALCON/test_falcon_client.py | 19 ++- .../stacks/2.1/FALCON/test_falcon_server.py | 28 ++- .../stacks/2.1/HIVE/test_hive_metastore.py | 20 ++- 9 files changed, 171 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/9420d381/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py -- diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py index 6f2e258..5a8caf8 100644 --- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py +++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py @@ -20,7 +20,7 @@ limitations under the License. """ from resource_management import * from resource_management.core.resources.system import Execute -from resource_management.core.shell import call +from resource_management.core import shell from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version from resource_management.libraries.functions.decorator import retry @@ -43,7 +43,7 @@ def post_regionserver(env): @retry(times=15, sleep_time=2, err_class=Fail) def call_and_match(cmd, user, regex): - code, out = call(cmd, user=user) + code, out = shell.call(cmd, user=user) if not (out and re.search(regex, out)): raise Fail("Could not verify RS available") http://git-wip-us.apache.org/repos/asf/ambari/blob/9420d381/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py -- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py index 48431cf..2a89a40 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py @@ -17,6 +17,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' +import json from mock.mock import MagicMock, patch from stacks.utils.RMFTestCase import * @@ -675,3 +676,19 @@ class TestHBaseMaster(RMFTestCase): user = 'hbase') self.assertNoMoreResources() + + def test_pre_rolling_restart(self): +config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json" +with open(config_file, "r") as f: + json_content = json.load(f) +version = '2.2.1.0-3242' +json_content['commandParams']['version'] = version +self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hbase_master.py", + classname = "HbaseMaster", + command = "pre_rolling_restart", + config_dict = json_content, + hdp_stack_version = self.STACK_VERSION, + target = RMFTestCase.TARGET_COMMON_SERVICES) +self.assertResourceCalled('Execute', + 'hdp-select set hbase-master %s' % version,) +self.assertNoMoreResources() \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/9420d381/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py -- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py b/ambari-server/src/test/python/stacks/2.0.6/H
svn commit: r1673907 - /ambari/site/index.html
Author: jluniya Date: Wed Apr 15 18:04:18 2015 New Revision: 1673907 URL: http://svn.apache.org/r1673907 Log: AMBARI-10492: Remove Nagios and Ganglia from ambari site (jluniya) Modified: ambari/site/index.html Modified: ambari/site/index.html URL: http://svn.apache.org/viewvc/ambari/site/index.html?rev=1673907&r1=1673906&r2=1673907&view=diff == --- ambari/site/index.html (original) +++ ambari/site/index.html Wed Apr 15 18:04:18 2015 @@ -1,13 +1,13 @@ http://www.w3.org/1999/xhtml"; xml:lang="en" lang="en"> - + @@ -269,7 +269,7 @@ - Last Published: 2015-04-07 | + Last Published: 2015-04-15 | Version: 2.0.0 @@ -625,7 +625,7 @@ -IntroductionThe Apache Ambari project is aimed at mak ing Hadoop management simpler by developing software for provisioning, managing, and monitoring Apache Hadoop clusters. Ambari provides an intuitive, easy-to-use Hadoop management web UI backed by its RESTful APIs.Ambari enables System Administrators to:Provision a Hadoop ClusterAmbari provides a step-by-step wizard for installing Hadoop services across any number of hosts.Ambari handles configuration of Hadoop services for the cluster.Manage a Hadoop ClusterAmbari provides central management for starting, stopping, and reconfiguring Hadoop services across the entire cluster.Monitor a Hadoop ClusterAmbari provides a dashboard for monitoring health and status of the Hadoop cluster.Ambari leverages http://ganglia.sourceforge.net/";> Ganglia for metrics collection.Ambari leverages http://www.nagios.o rg/"> Nagios for system alerting and will send emails when your attention is needed (e.g., a node goes down, remaining disk space is low, etc).Ambari enables Application Developers and System Integrators to:Easily integrate Hadoop provisioning, management, and monitoring capabilities to their own applications with the https://github.com/apache/ambari/blob/trunk/ambari-server/docs/api/v1/index.md";> Ambari REST APIs.Getting Started with AmbariFollow the https://cwiki.apache.org/confluence/display/AMBARI/Installation+Guide+for+Ambari+2.0.0";> installation guide for Ambari 2.0.0.Note: Ambari currently supports the 64-bit version of the following Operating Systems:RHEL (Redhat Enterprise Linux) 5 and 6CentOS 5 and 6OEL (Oracle Enterprise Linux) 5 and 6< li>SLES (SuSE Linux Enterprise Server) 11Ubuntu 12Get InvolvedVisit the https://cwiki.apache.org/confluence/display/AMBARI/Ambari";> Ambari Wiki for design documents, roadmap, development guidelines, etc.http://www.meetup.com/Apache-Ambari-User-Group";> Join the Ambari User Meetup Group. You can see the slides from http://www.meetup.com/Apache-Ambari-User-Group/events/109316812/";> April 2, 2013, http://www.meetup.com/Apache-Ambari-User-Group/events/119184782/";> June 25, 2013, and http://www.meetup.com/Apache-Ambari-User-Group/events/134373312/";> September 25, 2013 meetups.What's New?Check out the work going on for the upcoming release s. +IntroductionThe Apache Ambari project is aimed at mak ing Hadoop management simpler by developing software for provisioning, managing, and monitoring Apache Hadoop clusters. Ambari provides an intuitive, easy-to-use Hadoop management web UI backed by its RESTful APIs.Ambari enables System Administrators to:Provision a Hadoop ClusterAmbari provides a step-by-step wizard for installing Hadoop services across any number of hosts.Ambari handles configuration of Hadoop services for the cluster.Manage a Hadoop ClusterAmbari provides central management for starting, stopping, and reconfiguring Hadoop services across the entire cluster.Monitor a Hadoop ClusterAmbari provides a dashboard for monitoring health and status of the Hadoop cluster.Ambari leverages https://issues.apache.org/jira/browse/AMBARI-5707";> Ambari Metrics System for metrics collection.Ambari leverages https://issues.apache.org/jira/browse/AMBARI-6354";> Ambari Alert Framework for system alerting and will notify you when your attention is needed (e.g., a node goes down, remaining disk space is low, etc).Ambari enables Application Developers and System Integrators to:Easily integrate Hadoop provisioning, management, and monitoring capabilities to their own applications with the https://github.com/apache/ambari/blob/trunk/ambari-server/docs/api/v1/index.md";> Ambari REST APIs.Getting Started with AmbariFollow the https://cwiki.apache.org/confluence/display/AMBARI/Installation+Guide+for+Ambari+2.0.0";> installation guide for Ambari 2.0.0.Note: Ambari currently supports the 64-bit version of the following Operating Systems:RHEL (Redhat Enterprise Linux) 5 and 6< /li>CentOS 5 and
ambari git commit: AMBARI-10492: Remove Nagios and Ganglia from ambari site (jluniya)
Repository: ambari Updated Branches: refs/heads/trunk 4af2453a5 -> 308633d21 AMBARI-10492: Remove Nagios and Ganglia from ambari site (jluniya) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/308633d2 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/308633d2 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/308633d2 Branch: refs/heads/trunk Commit: 308633d2184786e93a289395ad4c490bf476fd22 Parents: 4af2453 Author: Jayush Luniya Authored: Wed Apr 15 10:59:21 2015 -0700 Committer: Jayush Luniya Committed: Wed Apr 15 10:59:21 2015 -0700 -- docs/src/site/apt/index.apt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/308633d2/docs/src/site/apt/index.apt -- diff --git a/docs/src/site/apt/index.apt b/docs/src/site/apt/index.apt index 34a95ff..b6866fe 100644 --- a/docs/src/site/apt/index.apt +++ b/docs/src/site/apt/index.apt @@ -40,9 +40,9 @@ Introduction * Ambari provides a dashboard for monitoring health and status of the Hadoop cluster. -* Ambari leverages {{{http://ganglia.sourceforge.net/} Ganglia}} for metrics collection. +* Ambari leverages {{{https://issues.apache.org/jira/browse/AMBARI-5707} Ambari Metrics System}} for metrics collection. -* Ambari leverages {{{http://www.nagios.org/} Nagios}} for system alerting and will send emails when your attention is needed (e.g., a node goes down, remaining disk space is low, etc). +* Ambari leverages {{{https://issues.apache.org/jira/browse/AMBARI-6354} Ambari Alert Framework}} for system alerting and will notify you when your attention is needed (e.g., a node goes down, remaining disk space is low, etc). []
ambari git commit: AMBARI-10458 [WinTP2] HBase: hbase.regionserver.global.memstore.size is not initialized
Repository: ambari Updated Branches: refs/heads/trunk cfdb955bd -> 4af2453a5 AMBARI-10458 [WinTP2] HBase: hbase.regionserver.global.memstore.size is not initialized Added the missing parameter Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4af2453a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4af2453a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4af2453a Branch: refs/heads/trunk Commit: 4af2453a52f0079ed779b9df16b4d01b7256df2d Parents: cfdb955 Author: Florian Barca Authored: Wed Apr 15 10:00:57 2015 -0700 Committer: Florian Barca Committed: Wed Apr 15 10:00:57 2015 -0700 -- .../HDPWIN/2.2/services/HBASE/configuration/hbase-site.xml | 9 + 1 file changed, 9 insertions(+) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/4af2453a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HBASE/configuration/hbase-site.xml -- diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HBASE/configuration/hbase-site.xml b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HBASE/configuration/hbase-site.xml index e5b893f..b1bbe51 100644 --- a/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HBASE/configuration/hbase-site.xml +++ b/ambari-server/src/main/resources/stacks/HDPWIN/2.2/services/HBASE/configuration/hbase-site.xml @@ -61,5 +61,14 @@ worse, we OOME. + +hbase.regionserver.global.memstore.size +0.4 +Maximum size of all memstores in a region server before new + updates are blocked and flushes are forced. Defaults to 40% of heap. + Updates are blocked and flushes are forced until size of all memstores + in a region server hits hbase.regionserver.global.memstore.size.lower.limit. + +
ambari git commit: AMBARI-10095. HiveServer2 does not install/start when running w/o sudo (aonishuk)
Repository: ambari Updated Branches: refs/heads/branch-2.0.maint 0ff32cd45 -> 1db1f436b AMBARI-10095. HiveServer2 does not install/start when running w/o sudo (aonishuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1db1f436 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1db1f436 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1db1f436 Branch: refs/heads/branch-2.0.maint Commit: 1db1f436bb75ae7c4ccd353bc48f3fbca6915c5c Parents: 0ff32cd Author: Andrew Onishuk Authored: Wed Apr 15 18:34:03 2015 +0300 Committer: Andrew Onishuk Committed: Wed Apr 15 18:34:03 2015 +0300 -- .../HIVE/0.12.0.2.0/package/scripts/hive_service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/1db1f436/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py -- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py index bbd9dee..55ff6c9 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_service.py @@ -129,8 +129,8 @@ def hive_service(name, action='start', rolling_restart=False): def check_fs_root(): import params metatool_cmd = format("hive --config {hive_server_conf_dir} --service metatool") - cmd = as_user(format("{metatool_cmd} -listFSRoot 2>/dev/null", env={'PATH': params.execute_path}), params.hive_user) \ -+ format(" | grep hdfs:// | cut -f1,2,3 -d '/' | grep -v '{fs_root}' | head -1") + cmd = as_user(format("{metatool_cmd} -listFSRoot", env={'PATH': params.execute_path}), params.hive_user) \ ++ format(" 2>/dev/null | grep hdfs:// | cut -f1,2,3 -d '/' | grep -v '{fs_root}' | head -1") code, out = shell.call(cmd) if code == 0 and out.strip() != "" and params.fs_root.strip() != out.strip():
ambari git commit: AMBARI-10502. Toggle widget with depended_by not making call to /recommendations (onechiporenko)
Repository: ambari Updated Branches: refs/heads/trunk ba25a16a3 -> cfdb955bd AMBARI-10502. Toggle widget with depended_by not making call to /recommendations (onechiporenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cfdb955b Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cfdb955b Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cfdb955b Branch: refs/heads/trunk Commit: cfdb955bd3e24212672066f3a179e39a1d35e8b4 Parents: ba25a16 Author: Oleg Nechiporenko Authored: Wed Apr 15 18:26:46 2015 +0300 Committer: Oleg Nechiporenko Committed: Wed Apr 15 18:26:46 2015 +0300 -- ambari-web/app/mixins/common/configs/enhanced_configs.js | 3 ++- .../common/configs/widgets/combo_config_widget_view.js| 3 +++ .../common/configs/widgets/list_config_widget_view.js | 2 ++ .../common/configs/widgets/slider_config_widget_view.js | 10 -- .../common/configs/widgets/time_interval_spinner_view.js | 2 ++ .../common/configs/widgets/toggle_config_widget_view.js | 2 ++ ambari-web/app/views/common/controls_view.js | 10 +- .../configs/widgets/time_interval_spinner_view_test.js| 3 +++ 8 files changed, 19 insertions(+), 16 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/cfdb955b/ambari-web/app/mixins/common/configs/enhanced_configs.js -- diff --git a/ambari-web/app/mixins/common/configs/enhanced_configs.js b/ambari-web/app/mixins/common/configs/enhanced_configs.js index d9caca0..ae6ceee 100644 --- a/ambari-web/app/mixins/common/configs/enhanced_configs.js +++ b/ambari-web/app/mixins/common/configs/enhanced_configs.js @@ -348,11 +348,12 @@ App.EnhancedConfigsMixin = Em.Mixin.create({ /** * get array of config objects for current service depends on config group * for default group - it will be current stepConfigs - * for not default group - overriden property in case there is such property in group + * for not default group - overridden property in case there is such property in group * otherwise - property from default group * @param stepConfigs * @returns {Object[]} * @private + * @method _getConfigsByGroup */ _getConfigsByGroup: function(stepConfigs) { if (this.get('selectedConfigGroup.isDefault') || this.get('controller.name') === 'wizardStep7Controller') { http://git-wip-us.apache.org/repos/asf/ambari/blob/cfdb955b/ambari-web/app/views/common/configs/widgets/combo_config_widget_view.js -- diff --git a/ambari-web/app/views/common/configs/widgets/combo_config_widget_view.js b/ambari-web/app/views/common/configs/widgets/combo_config_widget_view.js index 7e7c46f..e5aaadb 100644 --- a/ambari-web/app/views/common/configs/widgets/combo_config_widget_view.js +++ b/ambari-web/app/views/common/configs/widgets/combo_config_widget_view.js @@ -103,6 +103,8 @@ App.ComboConfigWidgetView = App.ConfigWidgetView.extend({ setConfigValue: function(e) { this.set('config.value', e.context); this.set('content.value', this.generateWidgetValue(e.context)); +this.get('controller').removeCurrentFromDependentList(this.get('config')); +this.sendRequestRorDependentConfigs(this.get('config')); }, /** @@ -110,6 +112,7 @@ App.ComboConfigWidgetView = App.ConfigWidgetView.extend({ * @method restoreValue */ restoreValue: function() { +this._super(); this.setConfigValue({ context: this.get('config.defaultValue') }); } http://git-wip-us.apache.org/repos/asf/ambari/blob/cfdb955b/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js -- diff --git a/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js b/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js index ab1db7a..18cd348 100644 --- a/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js +++ b/ambari-web/app/views/common/configs/widgets/list_config_widget_view.js @@ -219,6 +219,8 @@ App.ListConfigWidgetView = App.ConfigWidgetView.extend({ option.set('order', orderCounter); option.toggleProperty('isSelected'); this.incrementProperty('orderCounter'); +this.get('controller').removeCurrentFromDependentList(this.get('config')); +this.sendRequestRorDependentConfigs(this.get('config')); return false; }, http://git-wip-us.apache.org/repos/asf/ambari/blob/cfdb955b/ambari-web/app/views/common/configs/widgets/slider_config_widget_view.js -- diff --git a/ambari-web/app/views/common/configs/widgets/slider_config_widget_view.js b/ambari-web
ambari git commit: AMBARI-10499 Installer configs showing error even when values present. (ababiichuk)
Repository: ambari Updated Branches: refs/heads/trunk ecb141f9e -> ba25a16a3 AMBARI-10499 Installer configs showing error even when values present. (ababiichuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ba25a16a Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ba25a16a Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ba25a16a Branch: refs/heads/trunk Commit: ba25a16a3239148bc8158a1b95cb3be3e2a98dbb Parents: ecb141f Author: aBabiichuk Authored: Wed Apr 15 17:41:55 2015 +0300 Committer: aBabiichuk Committed: Wed Apr 15 18:03:03 2015 +0300 -- .../mixins/common/configs/enhanced_configs.js | 26 ++-- .../unit_convert/base_unit_convert_mixin.js | 2 +- .../convert_unit_widget_view_mixin.js | 3 +- .../widgets/slider_config_widget_view.js| 143 --- .../widgets/slider_config_widget_view_test.js | 28 5 files changed, 106 insertions(+), 96 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/ba25a16a/ambari-web/app/mixins/common/configs/enhanced_configs.js -- diff --git a/ambari-web/app/mixins/common/configs/enhanced_configs.js b/ambari-web/app/mixins/common/configs/enhanced_configs.js index 6fb63a7..d9caca0 100644 --- a/ambari-web/app/mixins/common/configs/enhanced_configs.js +++ b/ambari-web/app/mixins/common/configs/enhanced_configs.js @@ -177,8 +177,7 @@ App.EnhancedConfigsMixin = Em.Mixin.create({ getRecommendationsForDependencies: function(changedConfigs, initial, onComplete) { if (Em.isArray(changedConfigs) && changedConfigs.length > 0 || initial) { var recommendations = this.get('hostGroups'); - var configs = this._getConfigsByGroup(this.get('stepConfigs')); - recommendations.blueprint.configurations = blueprintUtils.buildConfigsJSON(this.get('services'), configs); + recommendations.blueprint.configurations = blueprintUtils.buildConfigsJSON(this.get('services'), this.get('stepConfigs')); var dataToSend = { recommend: 'configurations', @@ -298,7 +297,7 @@ App.EnhancedConfigsMixin = Em.Mixin.create({ var dependentConfig = dependentConfigs.filterProperty('propertyName', cp.get('name')).findProperty('fileName', App.config.getConfigTagFromFileName(cp.get('filename'))); if (dependentConfig) { var valueToSave = dependentConfig.saveRecommended ? dependentConfig.recommendedValue : dependentConfig.value; - if (selectedGroup.get('isDefault')) { + if (!selectedGroup || selectedGroup.get('isDefault')) { cp.set('value', valueToSave); } else { if (serviceConfigs.get('serviceName') !== self.get('content.serviceName')) { @@ -327,16 +326,21 @@ App.EnhancedConfigsMixin = Em.Mixin.create({ /** * get config group object for current service * @param serviceName - * @returns {*} + * @returns {App.ConfigGroup|null} */ getGroupForService: function(serviceName) { -if (this.get('content.serviceName') === serviceName) { - return this.get('selectedConfigGroup') +if (this.get('stepConfigs.length') === 0) return null; +if (this.get('name') === 'wizardStep7Controller') { + return this.get('stepConfigs').findProperty('serviceName', serviceName).get('selectedConfigGroup'); } else { - if (this.get('selectedConfigGroup.isDefault')) { -return this.get('dependentConfigGroups').filterProperty('service.serviceName', serviceName).findProperty('isDefault'); + if (this.get('content.serviceName') === serviceName) { +return this.get('selectedConfigGroup') } else { -return this.get('dependentConfigGroups').findProperty('name', this.get('groupsToSave')[serviceName]); +if (this.get('selectedConfigGroup.isDefault')) { + return this.get('dependentConfigGroups').filterProperty('service.serviceName', serviceName).findProperty('isDefault'); +} else { + return this.get('dependentConfigGroups').findProperty('name', this.get('groupsToSave')[serviceName]); +} } } }, @@ -442,10 +446,10 @@ App.EnhancedConfigsMixin = Em.Mixin.create({ var stackProperty = App.StackConfigProperty.find(propertyName + '_' + key); if (stackProperty && stackProperty.get('valueAttributes')) { -if (configs[key].property_attributes[propertyName].min) { +if (configs[key].property_attributes[propertyName].minimum) { stackProperty.set('valueAttributes.minimum', configs[key].property_attributes[propertyName].minimum); } -if (configs[key].property_attributes[propertyName].max) { +if (configs[key].property_attributes[propertyName].maxim
ambari git commit: AMBARI-10498 Create widget wizard: Implement step-3 "Name and Description". (atkach)
Repository: ambari Updated Branches: refs/heads/trunk 70fb033b7 -> ecb141f9e AMBARI-10498 Create widget wizard: Implement step-3 "Name and Description". (atkach) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ecb141f9 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ecb141f9 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ecb141f9 Branch: refs/heads/trunk Commit: ecb141f9e6b69180da8b3d6f5a922a1298c999ba Parents: 70fb033 Author: Andrii Tkach Authored: Wed Apr 15 15:46:58 2015 +0300 Committer: Andrii Tkach Committed: Wed Apr 15 17:45:47 2015 +0300 -- .../service/widgets/create/step1_controller.js | 6 +- .../service/widgets/create/step2_controller.js | 74 +++-- .../service/widgets/create/step3_controller.js | 68 +++- .../service/widgets/create/wizard_controller.js | 105 --- ambari-web/app/messages.js | 4 +- ambari-web/app/mixins/common/widget_mixin.js| 13 ++- ambari-web/app/models/widget_property.js| 18 +--- ambari-web/app/routes/add_widget.js | 3 +- ambari-web/app/styles/application.less | 2 +- .../app/styles/enhanced_service_dashboard.less | 21 .../main/service/widgets/create/step2.hbs | 2 +- .../service/widgets/create/step2_add_metric.hbs | 2 +- .../main/service/widgets/create/step3.hbs | 30 +- .../main/service/widgets/create/wizard.hbs | 10 +- .../service/widgets/create/expression_view.js | 29 ++--- .../main/service/widgets/create/step2_view.js | 3 +- .../main/service/widgets/create/step3_view.js | 13 +++ .../main/service/widgets/create/wizard_view.js | 10 +- 18 files changed, 294 insertions(+), 119 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/ecb141f9/ambari-web/app/controllers/main/service/widgets/create/step1_controller.js -- diff --git a/ambari-web/app/controllers/main/service/widgets/create/step1_controller.js b/ambari-web/app/controllers/main/service/widgets/create/step1_controller.js index 13e2719..8c9917f 100644 --- a/ambari-web/app/controllers/main/service/widgets/create/step1_controller.js +++ b/ambari-web/app/controllers/main/service/widgets/create/step1_controller.js @@ -34,7 +34,7 @@ App.WidgetWizardStep1Controller = Em.Controller.extend({ /** * @type {boolean} */ - isSubmitDisabled: function() { + isSubmitDisabled: function () { return !this.get('widgetType'); }.property('widgetType'), @@ -43,11 +43,11 @@ App.WidgetWizardStep1Controller = Em.Controller.extend({ */ options: App.WidgetType.find(), - loadStep: function() { + loadStep: function () { this.clearStep(); }, - clearStep: function() { + clearStep: function () { this.set('widgetType', ''); }, http://git-wip-us.apache.org/repos/asf/ambari/blob/ecb141f9/ambari-web/app/controllers/main/service/widgets/create/step2_controller.js -- diff --git a/ambari-web/app/controllers/main/service/widgets/create/step2_controller.js b/ambari-web/app/controllers/main/service/widgets/create/step2_controller.js index b707d13..cdf8dc8 100644 --- a/ambari-web/app/controllers/main/service/widgets/create/step2_controller.js +++ b/ambari-web/app/controllers/main/service/widgets/create/step2_controller.js @@ -22,16 +22,26 @@ App.WidgetWizardStep2Controller = Em.Controller.extend({ name: "widgetWizardStep2Controller", /** + * views of properties * @type {Array} */ - widgetProperties: [], - widgetValues: {}, + widgetPropertiesViews: [], - expressionData: { -values: [], -metrics: [] - }, - widgetPropertiesData: {}, + /** + * actual values of properties in API format + * @type {object} + */ + widgetProperties: {}, + + /** + * @type {Array} + */ + widgetValues: [], + + /** + * @type {Array} + */ + widgetMetrics: [], propertiesMap: { "warning_threshold": { @@ -62,14 +72,14 @@ App.WidgetWizardStep2Controller = Em.Controller.extend({ */ filteredMetrics: function () { var type = this.get('content.widgetType'); -return this.get('content.widgetMetrics').filter(function (metric) { +return this.get('content.allMetrics').filter(function (metric) { if (type === 'GRAPH') { return metric.temporal; } else { return metric.point_in_time; } }, this); - }.property('content.widgetMetrics'), + }.property('content.allMetrics'), /** * update preview widget with latest expression data @@ -100,7 +110,8 @@ App.WidgetWizardStep2Controller = Em.Controller.extend({ break; } } -this.set('expressionData', expressionData)
ambari git commit: AMBARI-10501. shell.call and shell.checked_call should be always mocked in RMFTestCase (aonishuk)
Repository: ambari Updated Branches: refs/heads/trunk 80d8eb388 -> 70fb033b7 AMBARI-10501. shell.call and shell.checked_call should be always mocked in RMFTestCase (aonishuk) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/70fb033b Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/70fb033b Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/70fb033b Branch: refs/heads/trunk Commit: 70fb033b77cc1360eedeefcd5bf5bb571382ca1c Parents: 80d8eb3 Author: Andrew Onishuk Authored: Wed Apr 15 17:42:59 2015 +0300 Committer: Andrew Onishuk Committed: Wed Apr 15 17:42:59 2015 +0300 -- .../python/stacks/2.0.6/HDFS/test_datanode.py | 46 ++-- .../python/stacks/2.0.6/HDFS/test_namenode.py | 22 -- .../test/python/stacks/2.0.6/HDFS/test_zkfc.py | 2 - .../stacks/2.0.6/HIVE/test_hive_server.py | 26 +-- .../stacks/2.0.6/OOZIE/test_oozie_server.py | 22 -- .../stacks/2.0.6/YARN/test_nodemanager.py | 46 ++-- .../src/test/python/stacks/utils/RMFTestCase.py | 25 ++- 7 files changed, 91 insertions(+), 98 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/70fb033b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py -- diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py index 2c0c9f6..8a90ec8 100644 --- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py +++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py @@ -459,9 +459,8 @@ class TestDatanode(RMFTestCase): @patch('time.sleep') - @patch.object(shell, "call") - def test_post_rolling_restart(self, process_mock, time_mock): -process_output = """ + def test_post_rolling_restart(self, time_mock): +shell_call_output = """ Live datanodes (2): Name: 192.168.64.102:50010 (c6401.ambari.apache.org) @@ -481,57 +480,58 @@ class TestDatanode(RMFTestCase): Xceivers: 2 Last contact: Fri Dec 12 20:47:21 UTC 2014 """ - -process_mock.return_value = (0, process_output) - +mocks_dict = {} self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py", classname = "DataNode", command = "post_rolling_restart", config_file = "default.json", hdp_stack_version = self.STACK_VERSION, - target = RMFTestCase.TARGET_COMMON_SERVICES + target = RMFTestCase.TARGET_COMMON_SERVICES, + call_mocks = [(0, shell_call_output)], + mocks_dict = mocks_dict ) + +self.assertTrue(mocks_dict['call'].called) +self.assertEqual(mocks_dict['call'].call_count,1) -self.assertTrue(process_mock.called) -self.assertEqual(process_mock.call_count,1) @patch('time.sleep') - @patch.object(shell, "call") - def test_post_rolling_restart_datanode_not_ready(self, process_mock, time_mock): -process_mock.return_value = (0, 'There are no DataNodes here!') - + def test_post_rolling_restart_datanode_not_ready(self, time_mock): +mocks_dict = {} try: self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py", classname = "DataNode", command = "post_rolling_restart", config_file = "default.json", hdp_stack_version = self.STACK_VERSION, - target = RMFTestCase.TARGET_COMMON_SERVICES + target = RMFTestCase.TARGET_COMMON_SERVICES, + call_mocks = [(0, 'There are no DataNodes here!')], + mocks_dict = mocks_dict ) self.fail('Missing DataNode should have caused a failure') except Fail,fail: - self.assertTrue(process_mock.called) - self.assertEqual(process_mock.call_count,12) + self.assertTrue(mocks_dict['call'].called) + self.assertEqual(mocks_dict['call'].call_count,12) @patch('time.sleep') - @patch.object(shell, "call") - def test_post_rolling_restart_bad_returncode(self, process_mock, time_mock): -process_mock.return_value = (0, 'some') - + def test_post_rolling_restart_bad_returncode(self, time_mock): try: + mocks_dict = {} self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/datanode.py", classname = "DataNode", command = "post_rolling_restart", config_file = "default.json", hdp_stack_version = self.STACK_
ambari git commit: AMBARI-10421 - [WinTP2] Merge HDPWIN HIVE package scripts to common services (init_metastore_schema)
Repository: ambari Updated Branches: refs/heads/trunk 7a68f8e49 -> 80d8eb388 AMBARI-10421 - [WinTP2] Merge HDPWIN HIVE package scripts to common services (init_metastore_schema) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/80d8eb38 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/80d8eb38 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/80d8eb38 Branch: refs/heads/trunk Commit: 80d8eb388258c16506498973872d412e7361f93d Parents: 7a68f8e Author: Artem Baranchuk Authored: Wed Apr 15 16:18:52 2015 +0300 Committer: Artem Baranchuk Committed: Wed Apr 15 16:18:52 2015 +0300 -- .../HIVE/0.12.0.2.0/package/scripts/params_windows.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/80d8eb38/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py -- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py index a9395a6..d84b226 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_windows.py @@ -51,4 +51,4 @@ hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.op hive_exclude_packages = [] Metastore Schema -init_metastore_schema = config['configurations']['hive-site']['datanucleus.autoCreateSchema'] \ No newline at end of file +init_metastore_schema = not config['configurations']['hive-site']['datanucleus.autoCreateSchema'] \ No newline at end of file
[1/3] ambari git commit: AMBARI-10421 - [WinTP2] Merge HDPWIN HIVE package scripts to common services
Repository: ambari Updated Branches: refs/heads/trunk 316021844 -> 7a68f8e49 http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.postgres.sql -- diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.postgres.sql b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.postgres.sql deleted file mode 100644 index 61769f6..000 --- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.postgres.sql +++ /dev/null @@ -1,1405 +0,0 @@ --- --- PostgreSQL database dump --- - -SET statement_timeout = 0; -SET client_encoding = 'UTF8'; -SET standard_conforming_strings = off; -SET check_function_bodies = false; -SET client_min_messages = warning; -SET escape_string_warning = off; - -SET search_path = public, pg_catalog; - -SET default_tablespace = ''; - -SET default_with_oids = false; - --- --- Name: BUCKETING_COLS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace: --- - -CREATE TABLE "BUCKETING_COLS" ( -"SD_ID" bigint NOT NULL, -"BUCKET_COL_NAME" character varying(256) DEFAULT NULL::character varying, -"INTEGER_IDX" bigint NOT NULL -); - - --- --- Name: CDS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace: --- - -CREATE TABLE "CDS" ( -"CD_ID" bigint NOT NULL -); - - --- --- Name: COLUMNS_OLD; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace: --- - -CREATE TABLE "COLUMNS_OLD" ( -"SD_ID" bigint NOT NULL, -"COMMENT" character varying(256) DEFAULT NULL::character varying, -"COLUMN_NAME" character varying(128) NOT NULL, -"TYPE_NAME" character varying(4000) NOT NULL, -"INTEGER_IDX" bigint NOT NULL -); - - --- --- Name: COLUMNS_V2; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace: --- - -CREATE TABLE "COLUMNS_V2" ( -"CD_ID" bigint NOT NULL, -"COMMENT" character varying(4000), -"COLUMN_NAME" character varying(128) NOT NULL, -"TYPE_NAME" character varying(4000), -"INTEGER_IDX" integer NOT NULL -); - - --- --- Name: DATABASE_PARAMS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace: --- - -CREATE TABLE "DATABASE_PARAMS" ( -"DB_ID" bigint NOT NULL, -"PARAM_KEY" character varying(180) NOT NULL, -"PARAM_VALUE" character varying(4000) DEFAULT NULL::character varying -); - - --- --- Name: DBS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace: --- - -CREATE TABLE "DBS" ( -"DB_ID" bigint NOT NULL, -"DESC" character varying(4000) DEFAULT NULL::character varying, -"DB_LOCATION_URI" character varying(4000) NOT NULL, -"NAME" character varying(128) DEFAULT NULL::character varying -); - - --- --- Name: DB_PRIVS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace: --- - -CREATE TABLE "DB_PRIVS" ( -"DB_GRANT_ID" bigint NOT NULL, -"CREATE_TIME" bigint NOT NULL, -"DB_ID" bigint, -"GRANT_OPTION" smallint NOT NULL, -"GRANTOR" character varying(128) DEFAULT NULL::character varying, -"GRANTOR_TYPE" character varying(128) DEFAULT NULL::character varying, -"PRINCIPAL_NAME" character varying(128) DEFAULT NULL::character varying, -"PRINCIPAL_TYPE" character varying(128) DEFAULT NULL::character varying, -"DB_PRIV" character varying(128) DEFAULT NULL::character varying -); - - --- --- Name: GLOBAL_PRIVS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace: --- - -CREATE TABLE "GLOBAL_PRIVS" ( -"USER_GRANT_ID" bigint NOT NULL, -"CREATE_TIME" bigint NOT NULL, -"GRANT_OPTION" smallint NOT NULL, -"GRANTOR" character varying(128) DEFAULT NULL::character varying, -"GRANTOR_TYPE" character varying(128) DEFAULT NULL::character varying, -"PRINCIPAL_NAME" character varying(128) DEFAULT NULL::character varying, -"PRINCIPAL_TYPE" character varying(128) DEFAULT NULL::character varying, -"USER_PRIV" character varying(128) DEFAULT NULL::character varying -); - - --- --- Name: IDXS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace: --- - -CREATE TABLE "IDXS" ( -"INDEX_ID" bigint NOT NULL, -"CREATE_TIME" bigint NOT NULL, -"DEFERRED_REBUILD" boolean NOT NULL, -"INDEX_HANDLER_CLASS" character varying(4000) DEFAULT NULL::character varying, -"INDEX_NAME" character varying(128) DEFAULT NULL::character varying, -"INDEX_TBL_ID" bigint, -"LAST_ACCESS_TIME" bigint NOT NULL, -"ORIG_TBL_ID" bigint, -"SD_ID" bigint -); - - --- --- Name: INDEX_PARAMS; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace: --- - -CREATE TABLE "INDEX_PARAMS" ( -"INDEX_ID" bigint NOT NULL, -"PARAM_KEY" character varying(256) NOT NULL, -"PARAM_VALUE" character varying(4000) DEFAULT NULL::character varying -); - - --- --- Name: NUCLEUS_TABLES; Type: TABLE; Schema: public; Owner: hiveuser; Tablespace: --- - -CREATE TABLE "NUCLEUS_TABLES" ( -"CLASS
[2/3] ambari git commit: AMBARI-10421 - [WinTP2] Merge HDPWIN HIVE package scripts to common services
http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.mysql.sql -- diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.mysql.sql b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.mysql.sql deleted file mode 100644 index bacee9e..000 --- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/HIVE/etc/hive-schema-0.12.0.mysql.sql +++ /dev/null @@ -1,777 +0,0 @@ --- MySQL dump 10.13 Distrib 5.5.25, for osx10.6 (i386) --- --- Host: localhostDatabase: test --- -- --- Server version 5.5.25 - -/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; -/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; -/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; -/*!40101 SET NAMES utf8 */; -/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; -/*!40103 SET TIME_ZONE='+00:00' */; -/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; -/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; -/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; -/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; - --- --- Table structure for table `BUCKETING_COLS` --- - -/*!40101 SET @saved_cs_client = @@character_set_client */; -/*!40101 SET character_set_client = utf8 */; -CREATE TABLE IF NOT EXISTS `BUCKETING_COLS` ( - `SD_ID` bigint(20) NOT NULL, - `BUCKET_COL_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL, - `INTEGER_IDX` int(11) NOT NULL, - PRIMARY KEY (`SD_ID`,`INTEGER_IDX`), - KEY `BUCKETING_COLS_N49` (`SD_ID`), - CONSTRAINT `BUCKETING_COLS_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`) -) ENGINE=InnoDB DEFAULT CHARSET=latin1; -/*!40101 SET character_set_client = @saved_cs_client */; - --- --- Table structure for table `CDS` --- - -/*!40101 SET @saved_cs_client = @@character_set_client */; -/*!40101 SET character_set_client = utf8 */; -CREATE TABLE IF NOT EXISTS `CDS` ( - `CD_ID` bigint(20) NOT NULL, - PRIMARY KEY (`CD_ID`) -) ENGINE=InnoDB DEFAULT CHARSET=latin1; -/*!40101 SET character_set_client = @saved_cs_client */; - --- --- Table structure for table `COLUMNS_V2` --- - -/*!40101 SET @saved_cs_client = @@character_set_client */; -/*!40101 SET character_set_client = utf8 */; -CREATE TABLE IF NOT EXISTS `COLUMNS_V2` ( - `CD_ID` bigint(20) NOT NULL, - `COMMENT` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL, - `COLUMN_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL, - `TYPE_NAME` varchar(4000) DEFAULT NULL, - `INTEGER_IDX` int(11) NOT NULL, - PRIMARY KEY (`CD_ID`,`COLUMN_NAME`), - KEY `COLUMNS_V2_N49` (`CD_ID`), - CONSTRAINT `COLUMNS_V2_FK1` FOREIGN KEY (`CD_ID`) REFERENCES `CDS` (`CD_ID`) -) ENGINE=InnoDB DEFAULT CHARSET=latin1; -/*!40101 SET character_set_client = @saved_cs_client */; - --- --- Table structure for table `DATABASE_PARAMS` --- - -/*!40101 SET @saved_cs_client = @@character_set_client */; -/*!40101 SET character_set_client = utf8 */; -CREATE TABLE IF NOT EXISTS `DATABASE_PARAMS` ( - `DB_ID` bigint(20) NOT NULL, - `PARAM_KEY` varchar(180) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL, - `PARAM_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL, - PRIMARY KEY (`DB_ID`,`PARAM_KEY`), - KEY `DATABASE_PARAMS_N49` (`DB_ID`), - CONSTRAINT `DATABASE_PARAMS_FK1` FOREIGN KEY (`DB_ID`) REFERENCES `DBS` (`DB_ID`) -) ENGINE=InnoDB DEFAULT CHARSET=latin1; -/*!40101 SET character_set_client = @saved_cs_client */; - --- --- Table structure for table `DBS` --- - -/*!40101 SET @saved_cs_client = @@character_set_client */; -/*!40101 SET character_set_client = utf8 */; -CREATE TABLE IF NOT EXISTS `DBS` ( - `DB_ID` bigint(20) NOT NULL, - `DESC` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL, - `DB_LOCATION_URI` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL, - `NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL, - PRIMARY KEY (`DB_ID`), - UNIQUE KEY `UNIQUE_DATABASE` (`NAME`) -) ENGINE=InnoDB DEFAULT CHARSET=latin1; -/*!40101 SET character_set_client = @saved_cs_client */; - --- --- Table structure for table `DB_PRIVS` --- - -/*!40101 SET @saved_cs_client = @@character_set_client */; -/*!40101 SET character_set_client = utf8 */; -CREATE TABLE IF NOT EXISTS `DB_PRIVS` ( - `DB_GRANT_ID` bigint(20) NOT NULL, - `CREATE_TIME` int(11) NOT NULL, - `DB_ID` bigint(20) DEFAULT NULL, - `GRANT_OPTION` smallint(6) NOT NULL, - `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL, - `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL, - `PRINCIPAL_NAME` varchar(1
[3/3] ambari git commit: AMBARI-10421 - [WinTP2] Merge HDPWIN HIVE package scripts to common services
AMBARI-10421 - [WinTP2] Merge HDPWIN HIVE package scripts to common services Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7a68f8e4 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7a68f8e4 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7a68f8e4 Branch: refs/heads/trunk Commit: 7a68f8e49bafbc34a316c0a9fe9046ab3e5c2ffd Parents: 3160218 Author: Artem Baranchuk Authored: Wed Apr 15 14:56:34 2015 +0300 Committer: Artem Baranchuk Committed: Wed Apr 15 15:43:11 2015 +0300 -- .../HIVE/0.12.0.2.0/package/scripts/hcat.py | 15 + .../0.12.0.2.0/package/scripts/hcat_client.py | 18 +- .../package/scripts/hcat_service_check.py | 11 + .../HIVE/0.12.0.2.0/package/scripts/hive.py | 47 +- .../0.12.0.2.0/package/scripts/hive_client.py | 38 +- .../package/scripts/hive_metastore.py | 41 +- .../0.12.0.2.0/package/scripts/hive_server.py | 35 +- .../0.12.0.2.0/package/scripts/hive_service.py | 16 + .../HIVE/0.12.0.2.0/package/scripts/params.py | 397 + .../0.12.0.2.0/package/scripts/params_linux.py | 414 ++ .../package/scripts/params_windows.py | 54 + .../0.12.0.2.0/package/scripts/service_check.py | 22 +- .../0.12.0.2.0/package/scripts/status_params.py | 55 +- .../HIVE/0.12.0.2.0/package/scripts/webhcat.py | 12 +- .../package/scripts/webhcat_server.py | 37 +- .../package/scripts/webhcat_service.py | 10 + .../package/scripts/webhcat_service_check.py| 11 + .../HIVE/etc/hive-schema-0.12.0.mysql.sql | 777 -- .../HIVE/etc/hive-schema-0.12.0.oracle.sql | 717 - .../HIVE/etc/hive-schema-0.12.0.postgres.sql| 1405 -- .../HDPWIN/2.1/services/HIVE/metainfo.xml |4 + .../HIVE/package/scripts/hcat_client.py | 40 - .../HIVE/package/scripts/hcat_service_check.py | 25 - .../2.1/services/HIVE/package/scripts/hive.py | 61 - .../HIVE/package/scripts/hive_client.py | 41 - .../HIVE/package/scripts/hive_metastore.py | 53 - .../HIVE/package/scripts/hive_server.py | 52 - .../HIVE/package/scripts/mysql_server.py| 46 - .../2.1/services/HIVE/package/scripts/params.py | 55 - .../HIVE/package/scripts/service_check.py | 39 - .../HIVE/package/scripts/service_mapping.py | 23 - .../services/HIVE/package/scripts/webhcat.py| 30 - .../HIVE/package/scripts/webhcat_server.py | 48 - .../package/scripts/webhcat_service_check.py| 27 - 34 files changed, 750 insertions(+), 3926 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py -- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py index 31c1673..1f7893d 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py @@ -20,8 +20,23 @@ limitations under the License. from resource_management import * import sys +from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl +from ambari_commons import OSConst +@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY) +def hcat(): + import params + + XmlConfig("hive-site.xml", +conf_dir = params.hive_conf_dir, +configurations = params.config['configurations']['hive-site'], +owner=params.hive_user, + configuration_attributes=params.config['configuration_attributes']['hive-site'] + ) + + +@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT) def hcat(): import params http://git-wip-us.apache.org/repos/asf/ambari/blob/7a68f8e4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py -- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py index 811cac6..79096e4 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py @@ -21,12 +21,11 @@ limitations under the License. import sys from resource_management import * from hcat import hcat +from ambari_commons import OSConst +from ambari_commons.os_family_impl import OsFamilyImpl -class
[2/2] ambari git commit: AMBARI-10439 - [WinTP2] Merge HDPWIN YARN package scripts into common services
AMBARI-10439 - [WinTP2] Merge HDPWIN YARN package scripts into common services Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/31602184 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/31602184 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/31602184 Branch: refs/heads/trunk Commit: 316021844bb7ef6f8b362b91c76579309d5df02c Parents: 899fe99 Author: Artem Baranchuk Authored: Wed Apr 15 14:15:29 2015 +0300 Committer: Artem Baranchuk Committed: Wed Apr 15 14:47:11 2015 +0300 -- .../scripts/application_timeline_server.py | 43 ++-- .../2.1.0.2.0/package/scripts/historyserver.py | 38 ++- .../package/scripts/mapred_service_check.py | 87 +++ .../package/scripts/mapreduce2_client.py| 34 ++- .../2.1.0.2.0/package/scripts/nodemanager.py| 44 ++-- .../YARN/2.1.0.2.0/package/scripts/params.py| 212 + .../2.1.0.2.0/package/scripts/params_linux.py | 229 +++ .../2.1.0.2.0/package/scripts/params_windows.py | 58 + .../package/scripts/resourcemanager.py | 66 -- .../YARN/2.1.0.2.0/package/scripts/service.py | 13 ++ .../2.1.0.2.0/package/scripts/service_check.py | 52 + .../2.1.0.2.0/package/scripts/status_params.py | 40 ++-- .../YARN/2.1.0.2.0/package/scripts/yarn.py | 25 ++ .../2.1.0.2.0/package/scripts/yarn_client.py| 34 ++- .../scripts/application_timeline_server.py | 54 - .../YARN/package/scripts/historyserver.py | 53 - .../package/scripts/mapred_service_check.py | 105 - .../YARN/package/scripts/mapreduce2_client.py | 43 .../YARN/package/scripts/nodemanager.py | 53 - .../2.1/services/YARN/package/scripts/params.py | 57 - .../YARN/package/scripts/resourcemanager.py | 77 --- .../YARN/package/scripts/service_check.py | 68 -- .../YARN/package/scripts/service_mapping.py | 26 --- .../2.1/services/YARN/package/scripts/yarn.py | 45 .../YARN/package/scripts/yarn_client.py | 44 .../package/templates/container-executor.cfg.j2 | 40 .../package/templates/exclude_hosts_list.j2 | 21 -- .../YARN/package/templates/mapreduce.conf.j2| 35 --- .../package/templates/taskcontroller.cfg.j2 | 38 --- .../YARN/package/templates/yarn.conf.j2 | 35 --- 30 files changed, 664 insertions(+), 1105 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/31602184/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py -- diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py index 184596c..8cb5a39 100644 --- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py +++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py @@ -25,24 +25,44 @@ from resource_management.libraries.functions.security_commons import build_expec cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties,\ FILE_TYPE_XML from resource_management.libraries.functions.format import format - from yarn import yarn from service import service +from ambari_commons import OSConst +from ambari_commons.os_family_impl import OsFamilyImpl -class ApplicationTimelineServer(Script): - - def get_stack_to_component(self): -return {"HDP": "hadoop-yarn-timelineserver"} +class ApplicationTimelineServer(Script): def install(self, env): self.install_packages(env) -#self.configure(env) + + def start(self, env, rolling_restart=False): +import params +env.set_params(params) +self.configure(env) # FOR SECURITY +service('timelineserver', action='start') + + def stop(self, env, rolling_restart=False): +import params +env.set_params(params) +service('timelineserver', action='stop') def configure(self, env): import params env.set_params(params) yarn(name='apptimelineserver') + +@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY) +class ApplicationTimelineServerWindows(ApplicationTimelineServer): + def status(self, env): +service('timelineserver', action='status') + + +@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT) +class ApplicationTimelineServerDefault(ApplicationTimelineServer): + def get_stack_to_component(self): +return {"HDP": "hadoop-yarn-timelineserver"} + def pre_rolling_restart(self, env): Logger.info("Executing Rolling Upgrade pre-restart") import param
[1/2] ambari git commit: AMBARI-10439 - [WinTP2] Merge HDPWIN YARN package scripts into common services
Repository: ambari Updated Branches: refs/heads/trunk 899fe99f1 -> 316021844 http://git-wip-us.apache.org/repos/asf/ambari/blob/31602184/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/container-executor.cfg.j2 -- diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/container-executor.cfg.j2 b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/container-executor.cfg.j2 deleted file mode 100644 index 90b12e6..000 --- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/container-executor.cfg.j2 +++ /dev/null @@ -1,40 +0,0 @@ -{# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#} - -#/* -# * Licensed to the Apache Software Foundation (ASF) under one -# * or more contributor license agreements. See the NOTICE file -# * distributed with this work for additional information -# * regarding copyright ownership. The ASF licenses this file -# * to you under the Apache License, Version 2.0 (the -# * "License"); you may not use this file except in compliance -# * with the License. You may obtain a copy of the License at -# * -# * http://www.apache.org/licenses/LICENSE-2.0 -# * -# * Unless required by applicable law or agreed to in writing, software -# * distributed under the License is distributed on an "AS IS" BASIS, -# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# * See the License for the specific language governing permissions and -# * limitations under the License. -# */ -yarn.nodemanager.local-dirs={{nm_local_dirs}} -yarn.nodemanager.log-dirs={{nm_log_dirs}} -yarn.nodemanager.linux-container-executor.group={{yarn_executor_container_group}} -banned.users=hdfs,yarn,mapred,bin -min.user.id=1000 http://git-wip-us.apache.org/repos/asf/ambari/blob/31602184/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/exclude_hosts_list.j2 -- diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/exclude_hosts_list.j2 b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/exclude_hosts_list.j2 deleted file mode 100644 index c7ce416..000 --- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/exclude_hosts_list.j2 +++ /dev/null @@ -1,21 +0,0 @@ -{# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#} - -{% for host in exclude_hosts %} -{{host}} -{% endfor %} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/31602184/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/mapreduce.conf.j2 -- diff --git a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/mapreduce.conf.j2 b/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/mapreduce.conf.j2 deleted file mode 100644 index b996645..000 --- a/ambari-server/src/main/resources/stacks/HDPWIN/2.1/services/YARN/package/templates/mapreduce.conf.j2 +++ /dev/null @@ -1,35 +0,0 @@ -{# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information
ambari git commit: AMBARI-10474. NFSGateways have incorrect displaying at UI. (akovalenko)
Repository: ambari Updated Branches: refs/heads/trunk 7262f985d -> 899fe99f1 AMBARI-10474. NFSGateways have incorrect displaying at UI. (akovalenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/899fe99f Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/899fe99f Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/899fe99f Branch: refs/heads/trunk Commit: 899fe99f140b21ffb527b59571763898713e13cc Parents: 7262f98 Author: Aleksandr Kovalenko Authored: Tue Apr 14 20:19:34 2015 +0300 Committer: Aleksandr Kovalenko Committed: Wed Apr 15 14:20:20 2015 +0300 -- .../app/mappers/components_state_mapper.js | 5 .../app/mappers/service_metrics_mapper.js | 5 +++- .../templates/main/service/services/hdfs.hbs| 26 ++-- .../app/views/main/service/services/hdfs.js | 7 -- 4 files changed, 22 insertions(+), 21 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/899fe99f/ambari-web/app/mappers/components_state_mapper.js -- diff --git a/ambari-web/app/mappers/components_state_mapper.js b/ambari-web/app/mappers/components_state_mapper.js index 37128fe..ee3bccf 100644 --- a/ambari-web/app/mappers/components_state_mapper.js +++ b/ambari-web/app/mappers/components_state_mapper.js @@ -48,6 +48,11 @@ App.componentsStateMapper = App.QuickDataMapper.create({ data_nodes_installed: 'INSTALLED_PATH', data_nodes_total: 'TOTAL_PATH' }, +'NFS_GATEWAY': { + nfs_gateways_started: 'STARTED_PATH', + nfs_gateways_installed: 'INSTALLED_PATH', + nfs_gateways_total: 'TOTAL_PATH' +}, 'NODEMANAGER': { node_managers_started: 'STARTED_PATH', node_managers_installed: 'INSTALLED_PATH', http://git-wip-us.apache.org/repos/asf/ambari/blob/899fe99f/ambari-web/app/mappers/service_metrics_mapper.js -- diff --git a/ambari-web/app/mappers/service_metrics_mapper.js b/ambari-web/app/mappers/service_metrics_mapper.js index a2189bb..f6e0756 100644 --- a/ambari-web/app/mappers/service_metrics_mapper.js +++ b/ambari-web/app/mappers/service_metrics_mapper.js @@ -65,7 +65,10 @@ App.serviceMetricsMapper = App.QuickDataMapper.create({ name_node_rpc: 'nameNodeComponent.host_components[0].metrics.rpc.RpcQueueTime_avg_time', data_nodes_started: 'data_nodes_started', data_nodes_installed: 'data_nodes_installed', -data_nodes_total: 'data_nodes_total' +data_nodes_total: 'data_nodes_total', +nfs_gateways_started: 'nfs_gateways_started', +nfs_gateways_installed: 'nfs_gateways_installed', +nfs_gateways_total: 'nfs_gateways_total' }, yarnConfig: { version: 'resourceManagerComponent.ServiceComponentInfo.Version', http://git-wip-us.apache.org/repos/asf/ambari/blob/899fe99f/ambari-web/app/templates/main/service/services/hdfs.hbs -- diff --git a/ambari-web/app/templates/main/service/services/hdfs.hbs b/ambari-web/app/templates/main/service/services/hdfs.hbs index 5c93dfe..9319490 100644 --- a/ambari-web/app/templates/main/service/services/hdfs.hbs +++ b/ambari-web/app/templates/main/service/services/hdfs.hbs @@ -29,6 +29,18 @@ {{t common.started}} + + + {{t dashboard.services.hdfs.nfsgateways}} + + + {{#view App.ComponentLiveTextView liveComponentsBinding="view.service.nfsGatewaysStarted" totalComponentsBinding="view.service.nfsGatewaysTotal"}} +{{view.liveComponents}}/{{view.totalComponents}} + {{/view}} + +{{t common.started}} + + {{t dashboard.services.hdfs.datanodecounts}} @@ -115,16 +127,4 @@ {{view.safeModeStatus}} - - - - {{t dashboard.services.hdfs.nfsgateways}} - - + \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/899fe99f/ambari-web/app/views/main/service/services/hdfs.js -- diff --git a/ambari-web/app/views/main/service/services/hdfs.js b/ambari-web/app/views/main/service/services/hdfs.js index 518c493..01f1ee8 100644 --- a/ambari-web/app/views/main/service/services/hdfs.js +++ b/ambari-web/app/views/main/service/services/hdfs.js @@ -70,13 +70,6 @@ App.MainDashboardServiceHdfsView = App.MainDashboardServiceView.extend({ return this.get('service.dataNodesInstalled'); }.property('service.dataNodesInstalled'), - nfsGatewaysLive: function () { -return this.get('service.nfsGatewaysStarted'); - }.property('service.nfsGatewaysStarted'), - nfsGatewaysDead: function () { -return this.get('service.nfsGatewaysInstalled'); - }.property('service.nfsGatewaysInstalled'), - showJournalNo
ambari git commit: AMBARI-10496. Slider control when being moved should not jump to nearest major tick (onechiporenko)
Repository: ambari Updated Branches: refs/heads/trunk 64850bb77 -> 7262f985d AMBARI-10496. Slider control when being moved should not jump to nearest major tick (onechiporenko) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7262f985 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7262f985 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7262f985 Branch: refs/heads/trunk Commit: 7262f985d6995378df7862718b7fcd5170df3c67 Parents: 64850bb Author: Oleg Nechiporenko Authored: Wed Apr 15 13:37:01 2015 +0300 Committer: Oleg Nechiporenko Committed: Wed Apr 15 13:37:01 2015 +0300 -- .../configs/widgets/slider_config_widget_view.js | 1 - .../configs/widgets/time_interval_spinner_view.js | 14 ++ 2 files changed, 6 insertions(+), 9 deletions(-) -- http://git-wip-us.apache.org/repos/asf/ambari/blob/7262f985/ambari-web/app/views/common/configs/widgets/slider_config_widget_view.js -- diff --git a/ambari-web/app/views/common/configs/widgets/slider_config_widget_view.js b/ambari-web/app/views/common/configs/widgets/slider_config_widget_view.js index 678f0cd..31e1c94 100644 --- a/ambari-web/app/views/common/configs/widgets/slider_config_widget_view.js +++ b/ambari-web/app/views/common/configs/widgets/slider_config_widget_view.js @@ -239,7 +239,6 @@ App.SliderConfigWidgetView = App.ConfigWidgetView.extend({ ticks: ticks, tooltip: 'always', ticks_labels: ticksLabels, - ticks_snap_bounds: Em.get(valueAttributes, 'type') === 'int' ? 1 : 0.1, step: increment_step ? this.widgetValueByConfigAttributes(increment_step) : (Em.get(valueAttributes, 'type') === 'int' ? 1 : 0.1) }); http://git-wip-us.apache.org/repos/asf/ambari/blob/7262f985/ambari-web/app/views/common/configs/widgets/time_interval_spinner_view.js -- diff --git a/ambari-web/app/views/common/configs/widgets/time_interval_spinner_view.js b/ambari-web/app/views/common/configs/widgets/time_interval_spinner_view.js index 3e8375e..99906d1 100644 --- a/ambari-web/app/views/common/configs/widgets/time_interval_spinner_view.js +++ b/ambari-web/app/views/common/configs/widgets/time_interval_spinner_view.js @@ -99,12 +99,10 @@ App.TimeIntervalSpinnerView = App.ConfigWidgetView.extend({ prepareContent: function() { var property = this.get('config'); -this.setProperties({ - propertyUnit: property.get('stackConfigProperty.valueAttributes.unit'), - minValue: this.generateWidgetValue(property.get('stackConfigProperty.valueAttributes.minimum')), - maxValue: this.generateWidgetValue(property.get('stackConfigProperty.valueAttributes.maximum')), - content: this.generateWidgetValue(property.get('value')) -}); +this.set('propertyUnit', property.get('stackConfigProperty.valueAttributes.unit')); +this.set('minValue', this.generateWidgetValue(property.get('stackConfigProperty.valueAttributes.minimum'))); +this.set('maxValue', this.generateWidgetValue(property.get('stackConfigProperty.valueAttributes.maximum'))); +this.set('content', this.generateWidgetValue(property.get('value'))); this.parseIncrement(); }, @@ -115,12 +113,12 @@ App.TimeIntervalSpinnerView = App.ConfigWidgetView.extend({ */ parseIncrement: function () { var property = this.get('config'); -var type = this.get('content.lastObject.type'); var step = property.get('stackConfigProperty.valueAttributes.increment_step'); if (step) { + var type = this.get('content.lastObject.type'); step = this.convertValue(step, property.get('stackConfigProperty.valueAttributes.unit'), type); this.set('content.lastObject.incrementStep', step); - if (step > Em.get(this, 'timeMaxValueOverflow.' + type)) { + if (step > Em.get(this, 'timeMaxValueOverflow.' + type)) { this.set('content.lastObject.enabled', false); } }