http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py index 7c2f2df..65487b7 100644 --- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py +++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/metrics_grafana_util.py @@ -18,19 +18,24 @@ limitations under the License. """ import httplib + +from ambari_commons.parallel_processing import PrallelProcessResult, execute_in_parallel, SUCCESS +from service_check import post_metrics_to_collector from resource_management.core.logger import Logger from resource_management.core.base import Fail from resource_management import Template from collections import namedtuple from urlparse import urlparse from base64 import b64encode +import random import time import socket import ambari_simplejson as json import network +import os -GRAFANA_CONNECT_TRIES = 5 -GRAFANA_CONNECT_TIMEOUT = 10 +GRAFANA_CONNECT_TRIES = 15 +GRAFANA_CONNECT_TIMEOUT = 20 GRAFANA_SEARCH_BUILTIN_DASHBOARDS = "/api/search?tag=builtin" GRAFANA_DATASOURCE_URL = "/api/datasources" GRAFANA_DASHBOARDS_URL = "/api/dashboards/db" @@ -171,20 +176,32 @@ def perform_grafana_delete_call(url, server): return response -def is_unchanged_datasource_url(datasource_url): +def is_unchanged_datasource_url(grafana_datasource_url, new_datasource_host): import params - parsed_url = urlparse(datasource_url) + parsed_url = urlparse(grafana_datasource_url) Logger.debug("parsed url: scheme = %s, host = %s, port = %s" % ( parsed_url.scheme, parsed_url.hostname, parsed_url.port)) Logger.debug("collector: scheme = %s, host = %s, port = %s" % - (params.metric_collector_protocol, params.metric_collector_host, + (params.metric_collector_protocol, new_datasource_host, params.metric_collector_port)) return parsed_url.scheme.strip() == params.metric_collector_protocol.strip() and \ - parsed_url.hostname.strip() == params.metric_collector_host.strip() and \ + parsed_url.hostname.strip() == new_datasource_host.strip() and \ str(parsed_url.port) == params.metric_collector_port +def do_ams_collector_post(metric_collector_host, params): + ams_metrics_post_url = "/ws/v1/timeline/metrics/" + random_value1 = random.random() + headers = {"Content-type": "application/json"} + ca_certs = os.path.join(params.ams_collector_conf_dir, + params.metric_truststore_ca_certs) + + current_time = int(time.time()) * 1000 + metric_json = Template('smoketest_metrics.json.j2', hostname=params.hostname, random1=random_value1, + current_time=current_time).get_content() + post_metrics_to_collector(ams_metrics_post_url, metric_collector_host, params.metric_collector_port, params.metric_collector_https_enabled, + metric_json, headers, ca_certs) def create_ams_datasource(): import params server = Server(protocol = params.ams_grafana_protocol.strip(), @@ -196,11 +213,28 @@ def create_ams_datasource(): """ Create AMS datasource in Grafana, if exsists make sure the collector url is accurate """ - ams_datasource_json = Template('metrics_grafana_datasource.json.j2', - ams_datasource_name=METRICS_GRAFANA_DATASOURCE_NAME).get_content() + Logger.info("Trying to find working metric collector") + results = execute_in_parallel(do_ams_collector_post, params.ams_collector_hosts.split(','), params) + new_datasource_host = "" + + for host in params.ams_collector_hosts: + if host in results: + if results[host].status == SUCCESS: + new_datasource_host = host + Logger.info("Found working collector on host %s" % new_datasource_host) + break + else: + Logger.warning(results[host].result) - Logger.info("Checking if AMS Grafana datasource already exists") + if new_datasource_host == "": + Logger.warning("All metric collectors are unavailable. Will use random collector as datasource host.") + new_datasource_host = params.random_metric_collector_host + Logger.info("New datasource host will be %s" % new_datasource_host) + + ams_datasource_json = Template('metrics_grafana_datasource.json.j2', + ams_datasource_name=METRICS_GRAFANA_DATASOURCE_NAME, ams_datasource_host=new_datasource_host).get_content() + Logger.info("Checking if AMS Grafana datasource already exists") response = perform_grafana_get_call(GRAFANA_DATASOURCE_URL, server) create_datasource = True @@ -215,7 +249,7 @@ def create_ams_datasource(): Logger.info("Ambari Metrics Grafana datasource already present. Checking Metrics Collector URL") datasource_url = datasources_json[i]["url"] - if is_unchanged_datasource_url(datasource_url): + if is_unchanged_datasource_url(datasource_url, new_datasource_host): Logger.info("Metrics Collector URL validation succeeded.") return else: # Metrics datasource present, but collector host is wrong. @@ -359,4 +393,3 @@ def create_ams_dashboards(): pass -
http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py index e278d0d..ad66ffe 100644 --- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py @@ -50,10 +50,16 @@ pass #AMBARI_METRICS data ams_pid_dir = status_params.ams_collector_pid_dir - +is_ams_distributed = config['configurations']['ams-site']['timeline.metrics.service.operation.mode'] == 'distributed' ams_collector_script = "/usr/sbin/ambari-metrics-collector" ams_collector_pid_dir = status_params.ams_collector_pid_dir -ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", []) +ams_collector_hosts = ",".join(default("/clusterHostInfo/metrics_collector_hosts", [])) +ams_collector_list = default("/clusterHostInfo/metrics_collector_hosts", []) +embedded_mode_multiple_instances = False + +if not is_ams_distributed and len(ams_collector_list) > 1: + embedded_mode_multiple_instances = True + failover_strategy_blacklisted_interval_seconds = default("/configurations/ams-env/failover_strategy_blacklisted_interval", "600") failover_strategy = default("/configurations/ams-site/failover.strategy", "round-robin") if default("/configurations/ams-site/timeline.metrics.service.http.policy", "HTTP_ONLY") == "HTTPS_ONLY": @@ -118,6 +124,9 @@ if 'cluster-env' in config['configurations'] and \ metric_collector_host = config['configurations']['cluster-env']['metrics_collector_vip_host'] else: metric_collector_host = select_metric_collector_hosts_from_hostnames(ams_collector_hosts) + +random_metric_collector_host = select_metric_collector_hosts_from_hostnames(ams_collector_hosts) + if 'cluster-env' in config['configurations'] and \ 'metrics_collector_vip_port' in config['configurations']['cluster-env']: metric_collector_port = config['configurations']['cluster-env']['metrics_collector_vip_port'] @@ -172,7 +181,6 @@ hbase_pid_dir = status_params.hbase_pid_dir is_hbase_distributed = config['configurations']['ams-hbase-site']['hbase.cluster.distributed'] is_local_fs_rootdir = hbase_root_dir.startswith('file://') -is_ams_distributed = config['configurations']['ams-site']['timeline.metrics.service.operation.mode'] == 'distributed' # security is disabled for embedded mode, when HBase is backed by file security_enabled = False if not is_hbase_distributed else config['configurations']['cluster-env']['security_enabled'] @@ -227,15 +235,18 @@ else: max_open_files_limit = default("/configurations/ams-hbase-env/max_open_files_limit", "32768") hostname = config["hostname"] +cluster_zookeeper_quorum_hosts = ",".join(config['clusterHostInfo']['zookeeper_hosts']) +if 'zoo.cfg' in config['configurations'] and 'clientPort' in config['configurations']['zoo.cfg']: + cluster_zookeeper_clientPort = config['configurations']['zoo.cfg']['clientPort'] +else: + cluster_zookeeper_clientPort = '2181' + if not is_hbase_distributed: zookeeper_quorum_hosts = hostname zookeeper_clientPort = '61181' else: - zookeeper_quorum_hosts = ",".join(config['clusterHostInfo']['zookeeper_hosts']) - if 'zoo.cfg' in config['configurations'] and 'clientPort' in config['configurations']['zoo.cfg']: - zookeeper_clientPort = config['configurations']['zoo.cfg']['clientPort'] - else: - zookeeper_clientPort = '2181' + zookeeper_quorum_hosts = cluster_zookeeper_quorum_hosts + zookeeper_clientPort = cluster_zookeeper_clientPort ams_checkpoint_dir = config['configurations']['ams-site']['timeline.metrics.aggregator.checkpoint.dir'] _hbase_tmp_dir = config['configurations']['ams-hbase-site']['hbase.tmp.dir'] @@ -337,4 +348,3 @@ HdfsResource = functools.partial( ) - http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py index ddd3e42..1417f4a 100644 --- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py +++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/service_check.py @@ -25,6 +25,7 @@ from resource_management import Template from ambari_commons import OSConst from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl +from ambari_commons.parallel_processing import PrallelProcessResult, execute_in_parallel, SUCCESS import httplib import network @@ -39,10 +40,10 @@ import socket class AMSServiceCheck(Script): AMS_METRICS_POST_URL = "/ws/v1/timeline/metrics/" AMS_METRICS_GET_URL = "/ws/v1/timeline/metrics?%s" - AMS_CONNECT_TRIES = 30 - AMS_CONNECT_TIMEOUT = 15 - AMS_READ_TRIES = 10 - AMS_READ_TIMEOUT = 5 + AMS_CONNECT_TRIES = 10 + AMS_CONNECT_TIMEOUT = 10 + AMS_READ_TRIES = 5 + AMS_READ_TIMEOUT = 10 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY) def service_check(self, env): @@ -62,124 +63,139 @@ class AMSServiceCheck(Script): if not check_windows_service_exists(params.ams_collector_win_service_name): raise Fail("Metrics Collector service was not properly installed. Check the logs and retry the installation.") - @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT) - def service_check(self, env): - import params - - Logger.info("Ambari Metrics service check was started.") - env.set_params(params) - + def service_check_for_single_host(self, metric_collector_host, params): random_value1 = random.random() headers = {"Content-type": "application/json"} ca_certs = os.path.join(params.ams_collector_conf_dir, params.metric_truststore_ca_certs) - for i in xrange(0, self.AMS_CONNECT_TRIES): - try: - current_time = int(time.time()) * 1000 - metric_json = Template('smoketest_metrics.json.j2', hostname=params.hostname, random1=random_value1, + current_time = int(time.time()) * 1000 + metric_json = Template('smoketest_metrics.json.j2', hostname=params.hostname, random1=random_value1, current_time=current_time).get_content() - Logger.info("Generated metrics:\n%s" % metric_json) - - Logger.info("Connecting (POST) to %s:%s%s" % (params.metric_collector_host, - params.metric_collector_port, - self.AMS_METRICS_POST_URL)) - conn = network.get_http_connection(params.metric_collector_host, + try: + post_metrics_to_collector(self.AMS_METRICS_POST_URL, metric_collector_host, params.metric_collector_port, params.metric_collector_https_enabled, + metric_json, headers, ca_certs, self.AMS_CONNECT_TRIES, self.AMS_CONNECT_TIMEOUT) + + get_metrics_parameters = { + "metricNames": "AMBARI_METRICS.SmokeTest.FakeMetric", + "appId": "amssmoketestfake", + "hostname": params.hostname, + "startTime": current_time - 60000, + "endTime": current_time + 61000, + "precision": "seconds", + "grouped": "false", + } + encoded_get_metrics_parameters = urllib.urlencode(get_metrics_parameters) + + Logger.info("Connecting (GET) to %s:%s%s" % (metric_collector_host, + params.metric_collector_port, + self.AMS_METRICS_GET_URL % encoded_get_metrics_parameters)) + for i in xrange(0, self.AMS_READ_TRIES): + conn = network.get_http_connection(metric_collector_host, int(params.metric_collector_port), params.metric_collector_https_enabled, ca_certs) - conn.request("POST", self.AMS_METRICS_POST_URL, metric_json, headers) - + conn.request("GET", self.AMS_METRICS_GET_URL % encoded_get_metrics_parameters) response = conn.getresponse() - Logger.info("Http response: %s %s" % (response.status, response.reason)) - except (httplib.HTTPException, socket.error) as ex: - if i < self.AMS_CONNECT_TRIES - 1: #range/xrange returns items from start to end-1 - time.sleep(self.AMS_CONNECT_TIMEOUT) - Logger.info("Connection failed. Next retry in %s seconds." - % (self.AMS_CONNECT_TIMEOUT)) - continue - else: - raise Fail("Metrics were not saved. Service check has failed. " - "\nConnection failed.") + Logger.info("Http response for host %s : %s %s" % (metric_collector_host, response.status, response.reason)) - data = response.read() - Logger.info("Http data: %s" % data) - conn.close() + data = response.read() + Logger.info("Http data: %s" % data) + conn.close() - if response.status == 200: - Logger.info("Metrics were saved.") - break - else: - Logger.info("Metrics were not saved. Service check has failed.") - if i < self.AMS_CONNECT_TRIES - 1: #range/xrange returns items from start to end-1 - time.sleep(self.AMS_CONNECT_TIMEOUT) - Logger.info("Next retry in %s seconds." - % (self.AMS_CONNECT_TIMEOUT)) + if response.status == 200: + Logger.info("Metrics were retrieved from host %s" % metric_collector_host) + else: + raise Fail("Metrics were not retrieved from host %s. GET request status: %s %s \n%s" % + (metric_collector_host, response.status, response.reason, data)) + data_json = json.loads(data) + + def floats_eq(f1, f2, delta): + return abs(f1-f2) < delta + + values_are_present = False + for metrics_data in data_json["metrics"]: + if (str(current_time) in metrics_data["metrics"] and str(current_time + 1000) in metrics_data["metrics"] + and floats_eq(metrics_data["metrics"][str(current_time)], random_value1, 0.0000001) + and floats_eq(metrics_data["metrics"][str(current_time + 1000)], current_time, 1)): + Logger.info("Values %s and %s were found in the response from host %s." % (metric_collector_host, random_value1, current_time)) + values_are_present = True + break + pass + + if not values_are_present: + if i < self.AMS_READ_TRIES - 1: #range/xrange returns items from start to end-1 + Logger.info("Values weren't stored yet. Retrying in %s seconds." + % (self.AMS_READ_TIMEOUT)) + time.sleep(self.AMS_READ_TIMEOUT) + else: + raise Fail("Values %s and %s were not found in the response." % (random_value1, current_time)) else: - raise Fail("Metrics were not saved. Service check has failed. POST request status: %s %s \n%s" % - (response.status, response.reason, data)) - - get_metrics_parameters = { - "metricNames": "AMBARI_METRICS.SmokeTest.FakeMetric", - "appId": "amssmoketestfake", - "hostname": params.hostname, - "startTime": current_time - 60000, - "endTime": current_time + 61000, - "precision": "seconds", - "grouped": "false", - } - encoded_get_metrics_parameters = urllib.urlencode(get_metrics_parameters) - - Logger.info("Connecting (GET) to %s:%s%s" % (params.metric_collector_host, - params.metric_collector_port, - self.AMS_METRICS_GET_URL % encoded_get_metrics_parameters)) - for i in xrange(0, self.AMS_READ_TRIES): - conn = network.get_http_connection(params.metric_collector_host, - int(params.metric_collector_port), - params.metric_collector_https_enabled, - ca_certs) - conn.request("GET", self.AMS_METRICS_GET_URL % encoded_get_metrics_parameters) - response = conn.getresponse() - Logger.info("Http response: %s %s" % (response.status, response.reason)) - - data = response.read() - Logger.info("Http data: %s" % data) - conn.close() - - if response.status == 200: - Logger.info("Metrics were retrieved.") - else: - Logger.info("Metrics were not retrieved. Service check has failed.") - raise Fail("Metrics were not retrieved. Service check has failed. GET request status: %s %s \n%s" % - (response.status, response.reason, data)) - data_json = json.loads(data) - - def floats_eq(f1, f2, delta): - return abs(f1-f2) < delta - - values_are_present = False - for metrics_data in data_json["metrics"]: - if (str(current_time) in metrics_data["metrics"] and str(current_time + 1000) in metrics_data["metrics"] - and floats_eq(metrics_data["metrics"][str(current_time)], random_value1, 0.0000001) - and floats_eq(metrics_data["metrics"][str(current_time + 1000)], current_time, 1)): - Logger.info("Values %s and %s were found in the response." % (random_value1, current_time)) - values_are_present = True break pass + except Fail as ex: + Logger.warning("Ambari Metrics service check failed on collector host %s. Reason : %s" % (metric_collector_host, str(ex))) + raise Fail("Ambari Metrics service check failed on collector host %s. Reason : %s" % (metric_collector_host, str(ex))) + + @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT) + def service_check(self, env): + import params + + Logger.info("Ambari Metrics service check was started.") + env.set_params(params) - if not values_are_present: - if i < self.AMS_READ_TRIES - 1: #range/xrange returns items from start to end-1 - Logger.info("Values weren't stored yet. Retrying in %s seconds." - % (self.AMS_READ_TIMEOUT)) - time.sleep(self.AMS_READ_TIMEOUT) + results = execute_in_parallel(self.service_check_for_single_host, params.ams_collector_hosts.split(','), params) + + for host in str(params.ams_collector_hosts).split(","): + if host in results: + if results[host].status == SUCCESS: + Logger.info("Ambari Metrics service check passed on host " + host) + return else: - Logger.info("Values %s and %s were not found in the response." % (random_value1, current_time)) - raise Fail("Values %s and %s were not found in the response." % (random_value1, current_time)) - else: - break - pass - Logger.info("Ambari Metrics service check is finished.") + Logger.warning(results[host].result) + raise Fail("All metrics collectors are unavailable.") + +def post_metrics_to_collector(ams_metrics_post_url, metric_collector_host, metric_collector_port, metric_collector_https_enabled, + metric_json, headers, ca_certs, tries = 1, connect_timeout = 10): + for i in xrange(0, tries): + try: + Logger.info("Generated metrics for host %s :\n%s" % (metric_collector_host, metric_json)) + + Logger.info("Connecting (POST) to %s:%s%s" % (metric_collector_host, + metric_collector_port, + ams_metrics_post_url)) + conn = network.get_http_connection(metric_collector_host, + int(metric_collector_port), + metric_collector_https_enabled, + ca_certs) + conn.request("POST", ams_metrics_post_url, metric_json, headers) + response = conn.getresponse() + Logger.info("Http response for host %s: %s %s" % (metric_collector_host, response.status, response.reason)) + except (httplib.HTTPException, socket.error) as ex: + if i < tries - 1: #range/xrange returns items from start to end-1 + time.sleep(connect_timeout) + Logger.info("Connection failed for host %s. Next retry in %s seconds." + % (metric_collector_host, connect_timeout)) + continue + else: + raise Fail("Metrics were not saved. Connection failed.") + + data = response.read() + Logger.info("Http data: %s" % data) + conn.close() + + if response.status == 200: + Logger.info("Metrics were saved.") + break + else: + Logger.info("Metrics were not saved.") + if i < tries - 1: #range/xrange returns items from start to end-1 + time.sleep(tries) + Logger.info("Next retry in %s seconds." + % (tries)) + else: + raise Fail("Metrics were not saved. POST request status: %s %s \n%s" % + (response.status, response.reason, data)) if __name__ == "__main__": AMSServiceCheck().execute() - http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2 b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2 index 678d769..05d1ae5 100644 --- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2 +++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metrics_grafana_datasource.json.j2 @@ -20,7 +20,7 @@ "name": "{{ams_datasource_name}}", "type": "ambarimetrics", "access": "proxy", - "url": "{{metric_collector_protocol}}://{{metric_collector_host}}:{{metric_collector_port}}", + "url": "{{metric_collector_protocol}}://{{ams_datasource_host}}:{{metric_collector_port}}", "password": "", "user": "", "database": "", @@ -30,4 +30,4 @@ "withCredentials": false, "isDefault": true, "jsonData": {} -} +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-env.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-env.xml b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-env.xml index 09cced6..5663f57 100644 --- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-env.xml +++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/configuration/falcon-env.xml @@ -21,6 +21,23 @@ --> <configuration supports_adding_forbidden="true"> <property> + <name>falcon.atlas.hook</name> + <value>false</value> + <display-name>Enable Atlas Hook</display-name> + <description>Enable Atlas Hook</description> + <value-attributes> + <type>boolean</type> + <overridable>false</overridable> + </value-attributes> + <on-ambari-upgrade add="true"/> + <depends-on> + <property> + <type>application-properties</type> + <name>atlas.rest.address</name> + </property> + </depends-on> + </property> + <property> <name>falcon_user</name> <display-name>Falcon User</display-name> <value>falcon</value> http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/metainfo.xml b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/metainfo.xml index 3ddcc41..91d469b 100644 --- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/metainfo.xml +++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/metainfo.xml @@ -131,6 +131,13 @@ <excluded-config-types> <config-type>oozie-site</config-type> </excluded-config-types> + + <quickLinksConfigurations> + <quickLinksConfiguration> + <fileName>quicklinks.json</fileName> + <default>true</default> + </quickLinksConfiguration> + </quickLinksConfigurations> </service> </services> </metainfo> http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py index 4429253..4a12b9f 100644 --- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py +++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py @@ -118,7 +118,7 @@ def falcon(type, action = None, upgrade_type=None): cd_access = "a") # Generate atlas-application.properties.xml file - if params.falcon_atlas_support: + if params.falcon_atlas_support and params.enable_atlas_hook: # If Atlas is added later than Falcon, this package will be absent. install_atlas_hook_packages(params.atlas_plugin_package, params.atlas_ubuntu_plugin_package, params.host_sys_prepped, params.agent_stack_retry_on_unavailability, params.agent_stack_retry_count) http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py index 0fb21d0..6d1ae03 100644 --- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py +++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py @@ -138,14 +138,15 @@ dfs_data_mirroring_dir = "/apps/data-mirroring" ######################################################## #region Atlas Hooks falcon_atlas_application_properties = default('/configurations/falcon-atlas-application.properties', {}) +atlas_hook_filename = default('/configurations/atlas-env/metadata_conf_file', 'atlas-application.properties') +enable_atlas_hook = default('/configurations/falcon-env/falcon.atlas.hook', False) # Calculate atlas_hook_cp to add to FALCON_EXTRA_CLASS_PATH falcon_atlas_support = False # Path to add to environment variable atlas_hook_cp = "" -if has_atlas_in_cluster(): - atlas_hook_filename = default('/configurations/atlas-env/metadata_conf_file', 'atlas-application.properties') +if enable_atlas_hook: # stack_version doesn't contain a minor number of the stack (only first two numbers: 2.3). Get it from current_version_formatted falcon_atlas_support = current_version_formatted and check_stack_feature(StackFeature.FALCON_ATLAS_SUPPORT_2_3, current_version_formatted) \ http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/quicklinks/quicklinks.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/quicklinks/quicklinks.json b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/quicklinks/quicklinks.json new file mode 100644 index 0000000..cc81fca --- /dev/null +++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/quicklinks/quicklinks.json @@ -0,0 +1,35 @@ +{ + "name": "default", + "description": "default quick links configuration", + "configuration": { + "protocol": + { + "type":"https", + "checks":[ + { + "property":"falcon.enableTLS", + "desired":"true", + "site":"falcon-startup-properties" + } + ] + }, + + "links": [ + { + "name": "falcon_dashboard", + "label": "Falcon Dashboard", + "requires_user_name": "true", + "component_name": "FALCON_SERVER", + "url": "%@://%@:%@/", + "port":{ + "http_property": "falcon_port", + "http_default_port": "15000", + "https_property": "falcon_port", + "https_default_port": "15443", + "regex": "^(\\d+)$", + "site": "falcon-env" + } + } + ] + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py index 008e9d6..a44b461 100644 --- a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py @@ -85,7 +85,7 @@ hostname = None if config.has_key('hostname'): hostname = config['hostname'] -ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", []) +ams_collector_hosts = ",".join(default("/clusterHostInfo/metrics_collector_hosts", [])) has_metric_collector = not len(ams_collector_hosts) == 0 metric_collector_port = None if has_metric_collector: http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py index 1f32c41..f47dc8f 100644 --- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py +++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py @@ -151,7 +151,7 @@ has_ganglia_server = not len(ganglia_server_hosts) == 0 if has_ganglia_server: ganglia_server_host = ganglia_server_hosts[0] -ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", []) +ams_collector_hosts = ",".join(default("/clusterHostInfo/metrics_collector_hosts", [])) has_metric_collector = not len(ams_collector_hosts) == 0 if has_metric_collector: if 'cluster-env' in config['configurations'] and \ http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml index 22ab02a..aad2db0 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml @@ -395,7 +395,7 @@ <property> <name>dfs.cluster.administrators</name> <value> hdfs</value> - <description>ACL for who all can view the default servlets in the HDFS</description> + <description>ACL for the admins, this configuration is used to control who can access the default servlets in the namenode, etc. The value should be a comma separated list of users and groups. The user list comes first and is separated by a space followed by the group list, e.g. "user1,user2 group1,group2". Both users and groups are optional, so "user1", " group1", "", "user1 group1", "user1,user2 group1,group2" are all valid (note the leading space in " group1"). '*' grants access to all users and groups, e.g. '*', '* ' and ' *' are all valid.</description> <on-ambari-upgrade add="true"/> </property> <property> http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/alerts/alert_metrics_deviation.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/alerts/alert_metrics_deviation.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/alerts/alert_metrics_deviation.py index 4efdae5..8a06f56 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/alerts/alert_metrics_deviation.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/alerts/alert_metrics_deviation.py @@ -178,7 +178,7 @@ def execute(configurations={}, parameters={}, host_name=None): else: collector_webapp_address = configurations[METRICS_COLLECTOR_WEBAPP_ADDRESS_KEY].split(":") if valid_collector_webapp_address(collector_webapp_address): - collector_host = select_metric_collector_for_sink(app_id.lower()).split(":")[0] + collector_host = select_metric_collector_for_sink(app_id.lower()) collector_port = int(collector_webapp_address[1]) else: return (RESULT_STATE_UNKNOWN, ['{0} value should be set as "fqdn_hostname:port", but set to {1}'.format( http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml index 150f629..1213580 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/configuration/hive-env.xml @@ -21,6 +21,23 @@ --> <configuration supports_adding_forbidden="true"> <property> + <name>hive.atlas.hook</name> + <value>false</value> + <display-name>Enable Atlas Hook</display-name> + <description>Enable Atlas Hook</description> + <value-attributes> + <type>boolean</type> + <overridable>false</overridable> + </value-attributes> + <on-ambari-upgrade add="true"/> + <depends-on> + <property> + <type>application-properties</type> + <name>atlas.rest.address</name> + </property> + </depends-on> + </property> + <property> <name>hive.client.heapsize</name> <value>512</value> <description>Hive Client Java heap size</description> http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py index 5e2c709..c047114 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat.py @@ -76,6 +76,6 @@ def hcat(): ) # Generate atlas-application.properties.xml file - if has_atlas_in_cluster(): + if params.enable_atlas_hook: atlas_hook_filepath = os.path.join(params.hive_config_dir, params.atlas_hook_filename) setup_atlas_hook(SERVICE.HIVE, params.hive_atlas_application_properties, atlas_hook_filepath, params.hive_user, params.user_group) http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py index bcc598a..792aac3 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py @@ -229,7 +229,7 @@ def hive(name=None): mode=0644) # Generate atlas-application.properties.xml file - if has_atlas_in_cluster(): + if params.enable_atlas_hook: atlas_hook_filepath = os.path.join(params.hive_config_dir, params.atlas_hook_filename) setup_atlas_hook(SERVICE.HIVE, params.hive_atlas_application_properties, atlas_hook_filepath, params.hive_user, params.user_group) http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py index 6c44d4b..e9436c1 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py @@ -525,9 +525,8 @@ metrics_collection_period = default("/configurations/ams-site/timeline.metrics.s ######################################################## #region Atlas Hooks hive_atlas_application_properties = default('/configurations/hive-atlas-application.properties', {}) - -if has_atlas_in_cluster(): - atlas_hook_filename = default('/configurations/atlas-env/metadata_conf_file', 'atlas-application.properties') +enable_atlas_hook = default('/configurations/hive-env/hive.atlas.hook', False) +atlas_hook_filename = default('/configurations/atlas-env/metadata_conf_file', 'atlas-application.properties') #endregion ######################################################## http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py index fe3f34a..aa116d6 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py @@ -139,7 +139,7 @@ def webhcat(): ) # Generate atlas-application.properties.xml file - if has_atlas_in_cluster(): + if params.enable_atlas_hook: # WebHCat uses a different config dir than the rest of the daemons in Hive. atlas_hook_filepath = os.path.join(params.config_dir, params.atlas_hook_filename) setup_atlas_hook(SERVICE.HIVE, params.hive_atlas_application_properties, atlas_hook_filepath, params.hive_user, params.user_group) http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py index 0cb88fe..5635fe3 100644 --- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py @@ -120,7 +120,7 @@ metric_truststore_path= default("/configurations/ams-ssl-client/ssl.client.trust metric_truststore_type= default("/configurations/ams-ssl-client/ssl.client.truststore.type", "") metric_truststore_password= default("/configurations/ams-ssl-client/ssl.client.truststore.password", "") -ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", []) +ams_collector_hosts = ",".join(default("/clusterHostInfo/metrics_collector_hosts", [])) has_metric_collector = not len(ams_collector_hosts) == 0 if has_metric_collector: http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml index ee885e3..aba638a 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml @@ -119,6 +119,20 @@ <on-ambari-upgrade add="true"/> </property> <property> + <name>logfeeder_external_solr_kerberos_keytab</name> + <value></value> + <display-name>Logfeeder External Solr keytab</display-name> + <description>The path to the Kerberos Keytab file containing service principal of Logfeeder e.g. /etc/security/keytabs/logfeeder.service.keytab</description> + <on-ambari-upgrade add="true"/> + </property> + <property> + <name>logfeeder_external_solr_kerberos_principal</name> + <value></value> + <display-name>Logfeeder External Solr principal</display-name> + <description>The service principal for Logfeeder which will be used to access SolrClient e.g. logfeeder/_HOST@REALM</description> + <on-ambari-upgrade add="true"/> + </property> + <property> <name>logfeeder_kerberos_keytab</name> <value>/etc/security/keytabs/logfeeder.service.keytab</value> <display-name>Logfeeder Solr keytab</display-name> http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml index c5b9b4e..2f13710 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml @@ -99,20 +99,100 @@ <on-ambari-upgrade add="true"/> </property> <property> - <name>logsearch_solr_audit_logs_zk_node</name> - <value>{infra_solr_znode}</value> - <display-name>Solr Audit Logs Znode</display-name> - <description>Only needed if using custom solr cloud. E.g. /audit_logs</description> + <name>logsearch_use_external_solr</name> + <value>false</value> + <display-name>Use External Solr</display-name> + <description>Use External Solr to store logs</description> + <value-attributes> + <type>value-list</type> + <overridable>false</overridable> + <entries> + <entry> + <value>true</value> + <label>ON</label> + </entry> + <entry> + <value>false</value> + <label>OFF</label> + </entry> + </entries> + <selection-cardinality>1</selection-cardinality> + </value-attributes> + <on-ambari-upgrade add="true"/> + </property> + <property> + <name>logsearch_external_solr_zk_znode</name> + <value></value> + <display-name>External Solr Logs Znode</display-name> + <description>Only needed if using custom solr cloud. E.g. /logsearch_solr</description> <on-ambari-upgrade add="true"/> </property> <property> - <name>logsearch_solr_audit_logs_zk_quorum</name> - <value>{zookeeper_quorum}</value> - <display-name>Solr Audit Logs ZK Quorum</display-name> + <name>logsearch_external_solr_zk_quorum</name> + <value></value> + <display-name>External Solr Logs ZK Quorum</display-name> <description>Only needed if using custom solr cloud. E.g. zk1:2181,zk2:2182</description> <on-ambari-upgrade add="true"/> </property> <property> + <name>logsearch_external_solr_ssl_enabled</name> + <value>false</value> + <display-name>Log Search External Solr SSL Enabled</display-name> + <description>Use SSL to connect to the external solr</description> + <value-attributes> + <type>value-list</type> + <overridable>false</overridable> + <entries> + <entry> + <value>true</value> + <label>ON</label> + </entry> + <entry> + <value>false</value> + <label>OFF</label> + </entry> + </entries> + <selection-cardinality>1</selection-cardinality> + </value-attributes> + <on-ambari-upgrade add="true"/> + </property> + <property> + <name>logsearch_external_solr_kerberos_enabled</name> + <value>false</value> + <display-name>Log Search External Solr Kerberos Enabled</display-name> + <description>Use Kerberos to connect to the external solr</description> + <value-attributes> + <type>value-list</type> + <overridable>false</overridable> + <entries> + <entry> + <value>true</value> + <label>ON</label> + </entry> + <entry> + <value>false</value> + <label>OFF</label> + </entry> + </entries> + <selection-cardinality>1</selection-cardinality> + </value-attributes> + <on-ambari-upgrade add="true"/> + </property> + <property> + <name>logsearch_external_solr_kerberos_keytab</name> + <value></value> + <display-name>Log Search External Solr keytab</display-name> + <description>The path to the Kerberos Keytab file containing service principal of Log Search e.g. /etc/security/keytabs/logsearch.service.keytab</description> + <on-ambari-upgrade add="true"/> + </property> + <property> + <name>logsearch_external_solr_kerberos_principal</name> + <value></value> + <display-name>Log Search External Solr principal</display-name> + <description>The service principal for Log Search which will be used to access SolrClient e.g. logsearch/_HOST@REALM</description> + <on-ambari-upgrade add="true"/> + </property> + <property> <name>logsearch_truststore_location</name> <value>/etc/security/serverKeys/logsearch.trustStore.jks</value> <display-name>Log Search trust store location</display-name> http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml index 10ca9ae..b474c5d 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-properties.xml @@ -238,7 +238,7 @@ </property> <property> <name>logsearch.spnego.kerberos.enabled</name> - <value/> + <value>false</value> <display-name>Http Spnego enabled</display-name> <description>Enable SPNEGO based authentication on the Log Search UI</description> <value-attributes> http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml index f69bd3c..6921a8e 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml @@ -36,6 +36,7 @@ <commandScript> <script>scripts/logsearch.py</script> <scriptType>PYTHON</scriptType> + <timeout>1800</timeout> </commandScript> <logs> <log> @@ -54,15 +55,6 @@ <enabled>true</enabled> </auto-deploy> </dependency> - - <dependency> - <name>ZOOKEEPER/ZOOKEEPER_SERVER</name> - <scope>cluster</scope> - <auto-deploy> - <enabled>true</enabled> - </auto-deploy> - </dependency> - </dependencies> <configuration-dependencies> <config-type>infra-solr-env</config-type> @@ -94,20 +86,9 @@ </log> </logs> - <dependencies> - - <dependency> - <name>ZOOKEEPER/ZOOKEEPER_SERVER</name> - <scope>cluster</scope> - <auto-deploy> - <enabled>true</enabled> - </auto-deploy> - </dependency> - - </dependencies> - <configuration-dependencies> <config-type>infra-solr-env</config-type> + <config-type>logsearch-env</config-type> <config-type>logfeeder-properties</config-type> <config-type>logfeeder-env</config-type> <config-type>logfeeder-grok</config-type> @@ -134,6 +115,10 @@ <skipUpgrade>true</skipUpgrade> <condition>should_install_logsearch_portal</condition> </package> + <package> + <name>ambari-infra-solr-client</name> + <condition>should_install_infra_solr_client</condition> + </package> </packages> </osSpecific> <osSpecific> @@ -148,6 +133,10 @@ <skipUpgrade>true</skipUpgrade> <condition>should_install_logsearch_portal</condition> </package> + <package> + <name>ambari-infra-solr-client</name> + <condition>should_install_infra_solr_client</condition> + </package> </packages> </osSpecific> </osSpecifics> @@ -158,10 +147,6 @@ <timeout>300</timeout> </commandScript> - <requiredServices> - <service>AMBARI_INFRA</service> - </requiredServices> - <quickLinksConfigurations> <quickLinksConfiguration> <fileName>quicklinks.json</fileName> http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py index d086f9f..c20d92d 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py @@ -38,7 +38,6 @@ def get_port_from_url(address): config = Script.get_config() tmp_dir = Script.get_tmp_dir() -stack_version = default("/commandParams/version", None) sudo = AMBARI_SUDO_BINARY security_enabled = status_params.security_enabled @@ -56,16 +55,10 @@ logfeeder_pid_dir = status_params.logfeeder_pid_dir logfeeder_pid_file = status_params.logfeeder_pid_file user_group = config['configurations']['cluster-env']['user_group'] -fetch_nonlocal_groups = config['configurations']['cluster-env']["fetch_nonlocal_groups"] # shared configs java64_home = config['hostLevelParams']['java_home'] -zookeeper_hosts_list = config['clusterHostInfo']['zookeeper_hosts'] -zookeeper_hosts_list.sort() -# get comma separated list of zookeeper hosts from clusterHostInfo -zookeeper_hosts = ",".join(zookeeper_hosts_list) cluster_name = str(config['clusterName']) -availableServices = config['availableServices'] configurations = config['configurations'] # need reference inside logfeeder jinja templates logserch_meta_configs = get_logsearch_meta_configs(configurations) @@ -84,30 +77,31 @@ else: ##################################### # Infra Solr configs ##################################### -infra_solr_znode = default('/configurations/infra-solr-env/infra_solr_znode', '/infra-solr') -infra_solr_instance_count = len(config['clusterHostInfo']['infra_solr_hosts']) -infra_solr_ssl_enabled = default('configurations/infra-solr-env/infra_solr_ssl_enabled', False) -infra_solr_jmx_port = config['configurations']['infra-solr-env']['infra_solr_jmx_port'] - -zookeeper_port = default('/configurations/zoo.cfg/clientPort', None) -index = 0 -zookeeper_quorum = "" -for host in config['clusterHostInfo']['zookeeper_hosts']: - zookeeper_quorum += host + ":" + str(zookeeper_port) - index += 1 - if index < len(config['clusterHostInfo']['zookeeper_hosts']): - zookeeper_quorum += "," +infra_solr_znode = '/infra-solr' +infra_solr_ssl_enabled = False +infra_solr_jmx_port = '' +if 'infra-solr-env' in config['configurations']: + infra_solr_znode = default('/configurations/infra-solr-env/infra_solr_znode', '/infra-solr') + infra_solr_ssl_enabled = default('configurations/infra-solr-env/infra_solr_ssl_enabled', False) + infra_solr_jmx_port = config['configurations']['infra-solr-env']['infra_solr_jmx_port'] if security_enabled: kinit_path_local = status_params.kinit_path_local _hostname_lowercase = config['hostname'].lower() logsearch_jaas_file = logsearch_server_conf + '/logsearch_jaas.conf' logfeeder_jaas_file = logsearch_logfeeder_conf + '/logfeeder_jaas.conf' - logsearch_kerberos_keytab = config['configurations']['logsearch-env']['logsearch_kerberos_keytab'] - logsearch_kerberos_principal = config['configurations']['logsearch-env']['logsearch_kerberos_principal'].replace('_HOST',_hostname_lowercase) - logfeeder_kerberos_keytab = config['configurations']['logfeeder-env']['logfeeder_kerberos_keytab'] - logfeeder_kerberos_principal = config['configurations']['logfeeder-env']['logfeeder_kerberos_principal'].replace('_HOST',_hostname_lowercase) + use_external_solr_with_kerberos = default('configurations/logsearch-env/logsearch_external_solr_kerberos_enabled', False) + if use_external_solr_with_kerberos: + logsearch_kerberos_keytab = config['configurations']['logsearch-env']['logsearch_external_solr_kerberos_keytab'] + logsearch_kerberos_principal = config['configurations']['logsearch-env']['logsearch_external_solr_kerberos_principal'].replace('_HOST',_hostname_lowercase) + logfeeder_kerberos_keytab = config['configurations']['logfeeder-env']['logfeeder_external_solr_kerberos_keytab'] + logfeeder_kerberos_principal = config['configurations']['logfeeder-env']['logfeeder_external_solr_kerberos_principal'].replace('_HOST',_hostname_lowercase) + else: + logsearch_kerberos_keytab = config['configurations']['logsearch-env']['logsearch_kerberos_keytab'] + logsearch_kerberos_principal = config['configurations']['logsearch-env']['logsearch_kerberos_principal'].replace('_HOST',_hostname_lowercase) + logfeeder_kerberos_keytab = config['configurations']['logfeeder-env']['logfeeder_kerberos_keytab'] + logfeeder_kerberos_principal = config['configurations']['logfeeder-env']['logfeeder_kerberos_principal'].replace('_HOST',_hostname_lowercase) ##################################### # Logsearch configs @@ -120,10 +114,29 @@ logsearch_service_logs_merge_factor = config['configurations']['logsearch-servic logsearch_audit_logs_max_retention = config['configurations']['logsearch-audit_logs-solrconfig']['logsearch_audit_logs_max_retention'] logsearch_audit_logs_merge_factor = config['configurations']['logsearch-audit_logs-solrconfig']['logsearch_audit_logs_merge_factor'] -logsearch_solr_audit_logs_zk_node = default('/configurations/logsearch-env/logsearch_solr_audit_logs_zk_node', infra_solr_znode) -logsearch_solr_audit_logs_zk_quorum = default('/configurations/logsearch-env/logsearch_solr_audit_logs_zk_quorum', zookeeper_quorum) -logsearch_solr_audit_logs_zk_node = format(logsearch_solr_audit_logs_zk_node) -logsearch_solr_audit_logs_zk_quorum = format(logsearch_solr_audit_logs_zk_quorum) +logsearch_use_external_solr = default('/configurations/logsearch-env/logsearch_use_external_solr', False) + +if logsearch_use_external_solr: + logsearch_solr_zk_znode = config['configurations']['logsearch-env']['logsearch_external_solr_zk_znode'] + logsearch_solr_zk_quorum = config['configurations']['logsearch-env']['logsearch_external_solr_zk_quorum'] + logsearch_solr_ssl_enabled = default('configurations/logsearch-env/logsearch_external_solr_ssl_enabled', False) + logsearch_solr_kerberos_enabled = security_enabled and default('configurations/logsearch-env/logsearch_external_solr_kerberos_enabled', False) +else: + logsearch_solr_zk_znode = infra_solr_znode + + logsearch_solr_zk_quorum = "" + zookeeper_port = default('/configurations/zoo.cfg/clientPort', None) + if 'zookeeper_hosts' in config['clusterHostInfo']: + for host in config['clusterHostInfo']['zookeeper_hosts']: + if logsearch_solr_zk_quorum: + logsearch_solr_zk_quorum += ',' + logsearch_solr_zk_quorum += host + ":" + str(zookeeper_port) + + logsearch_solr_ssl_enabled = infra_solr_ssl_enabled + logsearch_solr_kerberos_enabled = security_enabled + +zookeeper_quorum = logsearch_solr_zk_quorum + # logsearch-env configs @@ -179,8 +192,8 @@ logsearch_properties = {} # default values -logsearch_properties['logsearch.solr.zk_connect_string'] = zookeeper_quorum + infra_solr_znode -logsearch_properties['logsearch.solr.audit.logs.zk_connect_string'] = logsearch_solr_audit_logs_zk_quorum + logsearch_solr_audit_logs_zk_node +logsearch_properties['logsearch.solr.zk_connect_string'] = logsearch_solr_zk_quorum + logsearch_solr_zk_znode +logsearch_properties['logsearch.solr.audit.logs.zk_connect_string'] = logsearch_solr_zk_quorum + logsearch_solr_zk_znode logsearch_properties['logsearch.solr.collection.history'] = 'history' logsearch_properties['logsearch.solr.history.config.name'] = 'history' @@ -210,7 +223,7 @@ del logsearch_properties['logsearch.solr.audit.logs.use.ranger'] logsearch_properties['logsearch.solr.metrics.collector.hosts'] = format(logsearch_properties['logsearch.solr.metrics.collector.hosts']) logsearch_properties['logsearch.auth.external_auth.host_url'] = format(logsearch_properties['logsearch.auth.external_auth.host_url']) -if security_enabled: +if logsearch_solr_kerberos_enabled: logsearch_properties['logsearch.solr.kerberos.enable'] = 'true' logsearch_properties['logsearch.solr.jaas.file'] = logsearch_jaas_file @@ -293,9 +306,9 @@ logfeeder_properties = dict(logfeeder_properties.items() + dict(config['configur logfeeder_properties['logfeeder.metrics.collector.hosts'] = format(logfeeder_properties['logfeeder.metrics.collector.hosts']) logfeeder_properties['logfeeder.config.files'] = format(logfeeder_properties['logfeeder.config.files']) -logfeeder_properties['logfeeder.solr.zk_connect_string'] = zookeeper_quorum + infra_solr_znode +logfeeder_properties['logfeeder.solr.zk_connect_string'] = logsearch_solr_zk_quorum + logsearch_solr_zk_znode -if security_enabled: +if logsearch_solr_kerberos_enabled: if 'logfeeder.solr.kerberos.enable' not in logfeeder_properties: logfeeder_properties['logfeeder.solr.kerberos.enable'] = 'true' if 'logfeeder.solr.jaas.file' not in logfeeder_properties: http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py index 9582334..14f8d20 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logfeeder.py @@ -92,7 +92,7 @@ def setup_logfeeder(): ) - if params.security_enabled: + if params.logsearch_solr_kerberos_enabled: File(format("{logfeeder_jaas_file}"), content=Template("logfeeder_jaas.conf.j2") ) http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py index 6c1a936..874b90b 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/setup_logsearch.py @@ -118,10 +118,10 @@ def setup_logsearch(): def upload_conf_set(config_set, solrconfig_content = None): import params - jaas_file = params.logsearch_jaas_file if params.security_enabled else None + jaas_file = params.logsearch_jaas_file if params.logsearch_solr_kerberos_enabled else None solr_cloud_util.upload_configuration_to_zk( - zookeeper_quorum=params.zookeeper_quorum, - solr_znode=params.infra_solr_znode, + zookeeper_quorum=params.logsearch_solr_zk_quorum, + solr_znode=params.logsearch_solr_zk_znode, config_set_dir=format("{logsearch_server_conf}/solr_configsets/{config_set}/conf"), config_set=config_set, tmp_dir=params.tmp_dir, @@ -134,6 +134,7 @@ def upload_conf_set(config_set, solrconfig_content = None): def check_znode(): import params solr_cloud_util.check_znode( - zookeeper_quorum=params.zookeeper_quorum, - solr_znode=params.infra_solr_znode, - java64_home=params.java64_home) + zookeeper_quorum=params.logsearch_solr_zk_quorum, + solr_znode=params.logsearch_solr_zk_znode, + java64_home=params.java64_home, + retry=30, interval=5) http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-env.sh.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-env.sh.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-env.sh.j2 index 2818708..6795dab 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-env.sh.j2 +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logfeeder-env.sh.j2 @@ -32,7 +32,7 @@ if [ "$LOGFEEDER_JAVA_MEM" = "" ]; then export LOGFEEDER_JAVA_MEM=-Xmx{{logfeeder_max_mem}} fi -{% if infra_solr_ssl_enabled %} +{% if logsearch_solr_ssl_enabled %} export LOGFEEDER_SSL="true" export LOGFEEDER_KEYSTORE_LOCATION={{logfeeder_keystore_location}} export LOGFEEDER_KEYSTORE_PASSWORD={{logfeeder_keystore_password}} http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-env.sh.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-env.sh.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-env.sh.j2 index eb7306c..a179983 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-env.sh.j2 +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-env.sh.j2 @@ -38,7 +38,7 @@ export LOGSEARCH_DEBUG={{logsearch_debug_enabled}} export LOGSEARCH_DEBUG_PORT={{logsearch_debug_port}} -{% if infra_solr_ssl_enabled or logsearch_ui_protocol == 'https' or ambari_server_use_ssl %} +{% if logsearch_solr_ssl_enabled or logsearch_ui_protocol == 'https' or ambari_server_use_ssl %} export LOGSEARCH_SSL="true" export LOGSEARCH_KEYSTORE_LOCATION={{logsearch_keystore_location}} export LOGSEARCH_KEYSTORE_PASSWORD={{logsearch_keystore_password}} http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-log4j.xml.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-log4j.xml.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-log4j.xml.j2 index ce39030..06fdad2 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-log4j.xml.j2 +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/logsearch-log4j.xml.j2 @@ -25,7 +25,7 @@ limitations under the License. </appender> <appender name="rolling_file" class="org.apache.log4j.RollingFileAppender"> - <param name="file" value="{{logsearch_log_dir}}/logsearch.err" /> + <param name="file" value="{{logsearch_log_dir}}/logsearch.log" /> <param name="Threshold" value="info" /> <param name="append" value="true" /> <param name="maxFileSize" value="10MB" /> @@ -74,7 +74,7 @@ limitations under the License. </category> <root> - <priority value="warn"/> + <priority value="info"/> <!-- <appender-ref ref="console" /> --> <appender-ref ref="rolling_file" /> <appender-ref ref="rolling_file_json"/> http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/output.config.json.j2 ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/output.config.json.j2 b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/output.config.json.j2 index 062d636..214e5ba 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/output.config.json.j2 +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/properties/output.config.json.j2 @@ -21,7 +21,7 @@ "comment":"Output to solr for service logs", "is_enabled":"{{solr_service_logs_enable}}", "destination":"solr", - "zk_connect_string":"{{zookeeper_quorum}}{{infra_solr_znode}}", + "zk_connect_string":"{{logsearch_solr_zk_quorum}}{{logsearch_solr_zk_znode}}", "collection":"{{logsearch_solr_collection_service_logs}}", "number_of_shards": "{{logsearch_collection_service_logs_numshards}}", "splits_interval_mins": "{{logsearch_service_logs_split_interval_mins}}", @@ -40,7 +40,7 @@ "comment":"Output to solr for audit records", "is_enabled":"{{solr_audit_logs_enable}}", "destination":"solr", - "zk_connect_string":"{{zookeeper_quorum}}{{infra_solr_znode}}", + "zk_connect_string":"{{logsearch_solr_zk_quorum}}{{logsearch_solr_zk_znode}}", "collection":"{{logsearch_solr_collection_audit_logs}}", "number_of_shards": "{{logsearch_collection_audit_logs_numshards}}", "splits_interval_mins": "{{logsearch_audit_logs_split_interval_mins}}", @@ -57,5 +57,5 @@ } ] - + } \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/themes/theme.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/themes/theme.json b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/themes/theme.json index 0193689..2858825 100644 --- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/themes/theme.json +++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/themes/theme.json @@ -57,6 +57,25 @@ "column-span": "1" } ] + }, + { + "name": "section-logsearch-solr-connection", + "display-name": "Solr Connection", + "row-index": "4", + "column-index": "1", + "row-span": "3", + "column-span": "1", + "section-columns": "1", + "section-rows": "4", + "subsections": [ + { + "name": "subsection-logsearch-solr-connection-col1", + "row-index": "4", + "column-index": "0", + "row-span": "3", + "column-span": "1" + } + ] } ] } @@ -138,6 +157,186 @@ } } ] + }, + { + "config": "logsearch-env/logsearch_use_external_solr", + "subsection-name": "subsection-logsearch-solr-connection-col1" + }, + { + "config": "logsearch-env/logsearch_external_solr_zk_znode", + "subsection-name": "subsection-logsearch-solr-connection-col1", + "depends-on": [ + { + "configs":[ + "logsearch-env/logsearch_use_external_solr" + ], + "if": "${logsearch-env/logsearch_use_external_solr}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "logsearch-env/logsearch_external_solr_zk_quorum", + "subsection-name": "subsection-logsearch-solr-connection-col1", + "depends-on": [ + { + "configs":[ + "logsearch-env/logsearch_use_external_solr" + ], + "if": "${logsearch-env/logsearch_use_external_solr}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "logsearch-env/logsearch_external_solr_ssl_enabled", + "subsection-name": "subsection-logsearch-solr-connection-col1", + "depends-on": [ + { + "configs":[ + "logsearch-env/logsearch_use_external_solr" + ], + "if": "${logsearch-env/logsearch_use_external_solr}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "logsearch-env/logsearch_external_solr_kerberos_enabled", + "subsection-name": "subsection-logsearch-solr-connection-col1", + "depends-on": [ + { + "configs":[ + "logsearch-env/logsearch_use_external_solr" + ], + "if": "${logsearch-env/logsearch_use_external_solr}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "logsearch-env/logsearch_external_solr_kerberos_keytab", + "subsection-name": "subsection-logsearch-solr-connection-col1", + "depends-on": [ + { + "configs":[ + "logsearch-env/logsearch_external_solr_kerberos_enabled" + ], + "if": "${logsearch-env/logsearch_external_solr_kerberos_enabled}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "logsearch-env/logsearch_external_solr_kerberos_principal", + "subsection-name": "subsection-logsearch-solr-connection-col1", + "depends-on": [ + { + "configs":[ + "logsearch-env/logsearch_external_solr_kerberos_enabled" + ], + "if": "${logsearch-env/logsearch_external_solr_kerberos_enabled}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "logfeeder-env/logfeeder_external_solr_kerberos_keytab", + "subsection-name": "subsection-logsearch-solr-connection-col1", + "depends-on": [ + { + "configs":[ + "logsearch-env/logsearch_external_solr_kerberos_enabled" + ], + "if": "${logsearch-env/logsearch_external_solr_kerberos_enabled}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] + }, + { + "config": "logfeeder-env/logfeeder_external_solr_kerberos_principal", + "subsection-name": "subsection-logsearch-solr-connection-col1", + "depends-on": [ + { + "configs":[ + "logsearch-env/logsearch_external_solr_kerberos_enabled" + ], + "if": "${logsearch-env/logsearch_external_solr_kerberos_enabled}", + "then": { + "property_value_attributes": { + "visible": true + } + }, + "else": { + "property_value_attributes": { + "visible": false + } + } + } + ] } ] }, @@ -220,6 +419,60 @@ "widget": { "type": "directories" } + }, + { + "config": "logsearch-env/logsearch_use_external_solr", + "widget": { + "type": "toggle" + } + }, + { + "config": "logsearch-env/logsearch_external_solr_zk_znode", + "widget": { + "type": "text-field" + } + }, + { + "config": "logsearch-env/logsearch_external_solr_zk_quorum", + "widget": { + "type": "text-field" + } + }, + { + "config": "logsearch-env/logsearch_external_solr_ssl_enabled", + "widget": { + "type": "toggle" + } + }, + { + "config": "logsearch-env/logsearch_external_solr_kerberos_enabled", + "widget": { + "type": "toggle" + } + }, + { + "config": "logsearch-env/logsearch_external_solr_kerberos_keytab", + "widget": { + "type": "text-field" + } + }, + { + "config": "logsearch-env/logsearch_external_solr_kerberos_principal", + "widget": { + "type": "text-field" + } + }, + { + "config": "logfeeder-env/logfeeder_external_solr_kerberos_keytab", + "widget": { + "type": "text-field" + } + }, + { + "config": "logfeeder-env/logfeeder_external_solr_kerberos_principal", + "widget": { + "type": "text-field" + } } ] } http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py index e27b363..f2cc940 100644 --- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/params.py @@ -70,6 +70,7 @@ stack_supports_usersync_passwd = check_stack_feature(StackFeature.RANGER_USERSYN stack_supports_infra_client = check_stack_feature(StackFeature.RANGER_INSTALL_INFRA_CLIENT, version_for_stack_feature_checks) stack_supports_pid = check_stack_feature(StackFeature.RANGER_PID_SUPPORT, version_for_stack_feature_checks) stack_supports_ranger_admin_password_change = check_stack_feature(StackFeature.RANGER_ADMIN_PASSWD_CHANGE, version_for_stack_feature_checks) +stack_supports_ranger_setup_db_on_start = check_stack_feature(StackFeature.RANGER_SETUP_DB_ON_START, version_for_stack_feature_checks) downgrade_from_version = default("/commandParams/downgrade_from_version", None) upgrade_direction = default("/commandParams/upgrade_direction", None) http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py index a9656c7..b849d58 100644 --- a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py +++ b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py @@ -45,18 +45,9 @@ class RangerAdmin(Script): self.install_packages(env) import params env.set_params(params) - if params.xml_configurations_supported: - from setup_ranger_xml import setup_ranger_db - setup_ranger_db() - - self.configure(env) - - if params.xml_configurations_supported: - from setup_ranger_xml import setup_java_patch - setup_java_patch() - - if params.stack_supports_ranger_admin_password_change: - setup_ranger_admin_passwd_change() + # call config and setup db only in case of HDP version < 2.6 + if not params.stack_supports_ranger_setup_db_on_start: + self.configure(env, setup_db=True) def stop(self, env, upgrade_type=None): import params @@ -93,7 +84,9 @@ class RangerAdmin(Script): def start(self, env, upgrade_type=None): import params env.set_params(params) - self.configure(env, upgrade_type=upgrade_type) + + # setup db only if in case HDP version is > 2.6 + self.configure(env, upgrade_type=upgrade_type, setup_db=params.stack_supports_ranger_setup_db_on_start) if params.stack_supports_infra_client and params.audit_solr_enabled and params.is_solrCloud_enabled: solr_cloud_util.setup_solr_client(params.config, custom_log4j = params.custom_log4j) @@ -122,7 +115,7 @@ class RangerAdmin(Script): raise ComponentIsNotRunning() pass - def configure(self, env, upgrade_type=None): + def configure(self, env, upgrade_type=None, setup_db=False): import params env.set_params(params) if params.xml_configurations_supported: @@ -130,8 +123,23 @@ class RangerAdmin(Script): else: from setup_ranger import ranger + # set up db if we are not upgrading and setup_db is true + if setup_db and upgrade_type is None: + if params.xml_configurations_supported: + from setup_ranger_xml import setup_ranger_db + setup_ranger_db() + ranger('ranger_admin', upgrade_type=upgrade_type) + # set up java patches if we are not upgrading and setup_db is true + if setup_db and upgrade_type is None: + if params.xml_configurations_supported: + from setup_ranger_xml import setup_java_patch + setup_java_patch() + + if params.stack_supports_ranger_admin_password_change: + setup_ranger_admin_passwd_change() + def set_ru_rangeradmin_in_progress(self, upgrade_marker_file): config_dir = os.path.dirname(upgrade_marker_file) try: http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/configuration/sqoop-env.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/configuration/sqoop-env.xml b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/configuration/sqoop-env.xml index f682e97..508cfab 100644 --- a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/configuration/sqoop-env.xml +++ b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/configuration/sqoop-env.xml @@ -22,6 +22,23 @@ <configuration supports_adding_forbidden="true"> <!-- sqoop-env.sh --> <property> + <name>sqoop.atlas.hook</name> + <value>false</value> + <display-name>Enable Atlas Hook</display-name> + <description>Enable Atlas Hook</description> + <value-attributes> + <type>boolean</type> + <overridable>false</overridable> + </value-attributes> + <on-ambari-upgrade add="true"/> + <depends-on> + <property> + <type>application-properties</type> + <name>atlas.rest.address</name> + </property> + </depends-on> + </property> + <property> <name>content</name> <display-name>sqoop-env template</display-name> <description>This is the jinja template for sqoop-env.sh file</description> http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/configuration/sqoop-site.xml ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/configuration/sqoop-site.xml b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/configuration/sqoop-site.xml index f7823d2..5d87c4d 100644 --- a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/configuration/sqoop-site.xml +++ b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/configuration/sqoop-site.xml @@ -35,5 +35,11 @@ <property> <name>sqoop.job.data.publish.class</name> <on-ambari-upgrade add="true"/> + <depends-on> + <property> + <type>sqoop-env</type> + <name>sqoop.atlas.hook</name> + </property> + </depends-on> </property> </configuration> http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py index 283f54d..c1138b3 100644 --- a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py +++ b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/params_linux.py @@ -130,7 +130,6 @@ jdk_location = config['hostLevelParams']['jdk_location'] ######################################################## #region Atlas Hooks sqoop_atlas_application_properties = default('/configurations/sqoop-atlas-application.properties', {}) - -if has_atlas_in_cluster(): - atlas_hook_filename = default('/configurations/atlas-env/metadata_conf_file', 'atlas-application.properties') +enable_atlas_hook = default('/configurations/sqoop-env/sqoop.atlas.hook', False) +atlas_hook_filename = default('/configurations/atlas-env/metadata_conf_file', 'atlas-application.properties') #endregion http://git-wip-us.apache.org/repos/asf/ambari/blob/4278c4a4/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop.py b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop.py index 68f06db..d005cbe 100644 --- a/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop.py +++ b/ambari-server/src/main/resources/common-services/SQOOP/1.4.4.2.0/package/scripts/sqoop.py @@ -64,7 +64,7 @@ def sqoop(type=None): ) # Generate atlas-application.properties.xml file and symlink the hook jars - if has_atlas_in_cluster(): + if params.enable_atlas_hook: atlas_hook_filepath = os.path.join(params.sqoop_conf_dir, params.atlas_hook_filename) setup_atlas_hook(SERVICE.SQOOP, params.sqoop_atlas_application_properties, atlas_hook_filepath, params.sqoop_user, params.user_group) setup_atlas_jar_symlinks("sqoop", params.sqoop_lib)