[7/8] ambari git commit: AMBARI-21882. Throw an error if unsupported database JDBC driver is configured for HDP services. (stoader)

2017-09-06 Thread stoader
http://git-wip-us.apache.org/repos/asf/ambari/blob/a86e1618/ambari-server/src/test/python/common-services/configs/hive_default.json
--
diff --git 
a/ambari-server/src/test/python/common-services/configs/hive_default.json 
b/ambari-server/src/test/python/common-services/configs/hive_default.json
new file mode 100644
index 000..2cd0d11
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/configs/hive_default.json
@@ -0,0 +1,650 @@
+{
+  "roleCommand": "SERVICE_CHECK",
+  "clusterName": "c1",
+  "hostname": "c6401.ambari.apache.org",
+  "hostLevelParams": {
+"not_managed_hdfs_path_list": 
"[\"/apps/hive/warehouse\",\"/apps/falcon\",\"/mr-history/done\",\"/app-logs\",\"/tmp\"]",
+"agent_stack_retry_count": "5",
+"agent_stack_retry_on_unavailability": "false",
+"jdk_location": "http://c6401.ambari.apache.org:8080/resources/;,
+"ambari_db_rca_password": "mapred",
+"ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
+"jce_name": "UnlimitedJCEPolicyJDK7.zip",
+"stack_version": "2.6",
+"stack_name": "HDP",
+"ambari_db_rca_driver": "org.postgresql.Driver",
+"jdk_name": "jdk-7u67-linux-x64.tar.gz",
+"ambari_db_rca_username": "mapred",
+"java_home": "/usr/jdk64/jdk1.7.0_45",
+"db_name": "ambari",
+"custom_mysql_jdbc_name": "mysql-connector-java.jar"
+  },
+  "commandType": "EXECUTION_COMMAND",
+  "roleParams": {},
+  "serviceName": "SLIDER",
+  "role": "SLIDER",
+  "commandParams": {
+"version": "2.5.0.0-1235",
+"command_timeout": "300",
+"service_package_folder": "OOZIE",
+"script_type": "PYTHON",
+"script": "scripts/service_check.py",
+"excluded_hosts": "host1,host2"
+  },
+  "taskId": 152,
+  "public_hostname": "c6401.ambari.apache.org",
+  "configurations": {
+"hive-env" : {
+  "hcat_pid_dir": "/var/run/webhcat",
+  "hcat_user": "hcat",
+  "hive_ambari_database": "MySQL",
+  "hive_hostname": "abtest-3.c.pramod-thangali.internal",
+  "hive_metastore_port": "9083",
+  "webhcat_user": "hcat",
+  "content": "\n if [ \"$SERVICE\" = \"cli\" ]; then\n   if [ -z 
\"$DEBUG\" ]; then\n export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 
-Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseParNewGC 
-XX:-UseGCOverheadLimit\"\n   else\n export HADOOP_OPTS=\"$HADOOP_OPTS 
-XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 
-XX:-UseGCOverheadLimit\"\n   fi\n fi\n\n# The heap size of the jvm stared by 
hive shell script can be controlled via:\n\nexport 
HADOOP_HEAPSIZE=\"{{hive_heapsize}}\"\nexport 
HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n\n# Larger 
heap size may be required when running queries over large number of files or 
partitions.\n# By default hive shell scripts use a heap size of 256 (MB).  
Larger heap size would also be\n# appropriate for hive server (hwi etc).\n\n\n# 
Set HADOOP_HOME to point to a specific hadoop install 
directory\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n# Hive Configuration 
Directory can 
 be controlled by:\nexport HIVE_CONF_DIR={{conf_dir}}\n\n# Folder containing 
extra ibraries required for hive compilation/execution can be controlled 
by:\nif [ \"${HIVE_AUX_JARS_PATH}\" != \"\" ]; then\n  export 
HIVE_AUX_JARS_PATH=${HIVE_AUX_JARS_PATH}\nelif [ -d \"/usr/lib/hive-hcatalog/\" 
]; then\n  export 
HIVE_AUX_JARS_PATH=/usr/lib/hive-hcatalog/share/hcatalog/hive-hcatalog-core-*.jar\nelse\n
  export 
HIVE_AUX_JARS_PATH=/usr/lib/hcatalog/share/hcatalog/hcatalog-core.jar\nfi\nexport
 METASTORE_PORT={{hive_metastore_port}}",
+  "hive_database_name": "hive",
+  "hive_database_type": "mysql",
+  "hive_pid_dir": "/var/run/hive",
+  "hive_log_dir": "/var/log/hive",
+  "hive_user": "hive",
+  "hcat_log_dir": "/var/log/webhcat",
+  "hive_database": "New MySQL Database",
+  "hive_security_authorization": "None"
+},
+"hive-site": {
+  "hive.enforce.sorting": "true",
+  "javax.jdo.option.ConnectionPassword": "!`\"' 1",
+  "javax.jdo.option.ConnectionDriverName": "com.mysql.jdbc.Driver",
+  "hive.optimize.bucketmapjoin.sortedmerge": "true",
+  "hive.security.metastore.authorization.manager": 
"org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+  "fs.file.impl.disable.cache": "true",
+  "hive.auto.convert.join.noconditionaltask": "true",
+  "hive.map.aggr": "true",
+  "hive.optimize.index.filter": "true",
+  "hive.security.authorization.enabled": "false",
+  "hive.optimize.reducededuplication.min.reducer": "1",
+  "hive.optimize.bucketmapjoin": "true",
+  "hive.metastore.uris": "thrift://c6402.ambari.apache.org:9083",
+  "hive.mapjoin.bucket.cache.size": "1",
+  "hive.auto.convert.join.noconditionaltask.size": "10",
+  "hive.vectorized.execution.enabled": "false",
+  

[7/8] ambari git commit: AMBARI-21882. Throw an error if unsupported database JDBC driver is configured for HDP services. (stoader)

2017-09-06 Thread stoader
http://git-wip-us.apache.org/repos/asf/ambari/blob/680f1148/ambari-server/src/test/python/common-services/configs/hive_default.json
--
diff --git 
a/ambari-server/src/test/python/common-services/configs/hive_default.json 
b/ambari-server/src/test/python/common-services/configs/hive_default.json
new file mode 100644
index 000..2cd0d11
--- /dev/null
+++ b/ambari-server/src/test/python/common-services/configs/hive_default.json
@@ -0,0 +1,650 @@
+{
+  "roleCommand": "SERVICE_CHECK",
+  "clusterName": "c1",
+  "hostname": "c6401.ambari.apache.org",
+  "hostLevelParams": {
+"not_managed_hdfs_path_list": 
"[\"/apps/hive/warehouse\",\"/apps/falcon\",\"/mr-history/done\",\"/app-logs\",\"/tmp\"]",
+"agent_stack_retry_count": "5",
+"agent_stack_retry_on_unavailability": "false",
+"jdk_location": "http://c6401.ambari.apache.org:8080/resources/;,
+"ambari_db_rca_password": "mapred",
+"ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
+"jce_name": "UnlimitedJCEPolicyJDK7.zip",
+"stack_version": "2.6",
+"stack_name": "HDP",
+"ambari_db_rca_driver": "org.postgresql.Driver",
+"jdk_name": "jdk-7u67-linux-x64.tar.gz",
+"ambari_db_rca_username": "mapred",
+"java_home": "/usr/jdk64/jdk1.7.0_45",
+"db_name": "ambari",
+"custom_mysql_jdbc_name": "mysql-connector-java.jar"
+  },
+  "commandType": "EXECUTION_COMMAND",
+  "roleParams": {},
+  "serviceName": "SLIDER",
+  "role": "SLIDER",
+  "commandParams": {
+"version": "2.5.0.0-1235",
+"command_timeout": "300",
+"service_package_folder": "OOZIE",
+"script_type": "PYTHON",
+"script": "scripts/service_check.py",
+"excluded_hosts": "host1,host2"
+  },
+  "taskId": 152,
+  "public_hostname": "c6401.ambari.apache.org",
+  "configurations": {
+"hive-env" : {
+  "hcat_pid_dir": "/var/run/webhcat",
+  "hcat_user": "hcat",
+  "hive_ambari_database": "MySQL",
+  "hive_hostname": "abtest-3.c.pramod-thangali.internal",
+  "hive_metastore_port": "9083",
+  "webhcat_user": "hcat",
+  "content": "\n if [ \"$SERVICE\" = \"cli\" ]; then\n   if [ -z 
\"$DEBUG\" ]; then\n export HADOOP_OPTS=\"$HADOOP_OPTS -XX:NewRatio=12 
-Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseParNewGC 
-XX:-UseGCOverheadLimit\"\n   else\n export HADOOP_OPTS=\"$HADOOP_OPTS 
-XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 
-XX:-UseGCOverheadLimit\"\n   fi\n fi\n\n# The heap size of the jvm stared by 
hive shell script can be controlled via:\n\nexport 
HADOOP_HEAPSIZE=\"{{hive_heapsize}}\"\nexport 
HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n\n# Larger 
heap size may be required when running queries over large number of files or 
partitions.\n# By default hive shell scripts use a heap size of 256 (MB).  
Larger heap size would also be\n# appropriate for hive server (hwi etc).\n\n\n# 
Set HADOOP_HOME to point to a specific hadoop install 
directory\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\n# Hive Configuration 
Directory can 
 be controlled by:\nexport HIVE_CONF_DIR={{conf_dir}}\n\n# Folder containing 
extra ibraries required for hive compilation/execution can be controlled 
by:\nif [ \"${HIVE_AUX_JARS_PATH}\" != \"\" ]; then\n  export 
HIVE_AUX_JARS_PATH=${HIVE_AUX_JARS_PATH}\nelif [ -d \"/usr/lib/hive-hcatalog/\" 
]; then\n  export 
HIVE_AUX_JARS_PATH=/usr/lib/hive-hcatalog/share/hcatalog/hive-hcatalog-core-*.jar\nelse\n
  export 
HIVE_AUX_JARS_PATH=/usr/lib/hcatalog/share/hcatalog/hcatalog-core.jar\nfi\nexport
 METASTORE_PORT={{hive_metastore_port}}",
+  "hive_database_name": "hive",
+  "hive_database_type": "mysql",
+  "hive_pid_dir": "/var/run/hive",
+  "hive_log_dir": "/var/log/hive",
+  "hive_user": "hive",
+  "hcat_log_dir": "/var/log/webhcat",
+  "hive_database": "New MySQL Database",
+  "hive_security_authorization": "None"
+},
+"hive-site": {
+  "hive.enforce.sorting": "true",
+  "javax.jdo.option.ConnectionPassword": "!`\"' 1",
+  "javax.jdo.option.ConnectionDriverName": "com.mysql.jdbc.Driver",
+  "hive.optimize.bucketmapjoin.sortedmerge": "true",
+  "hive.security.metastore.authorization.manager": 
"org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider",
+  "fs.file.impl.disable.cache": "true",
+  "hive.auto.convert.join.noconditionaltask": "true",
+  "hive.map.aggr": "true",
+  "hive.optimize.index.filter": "true",
+  "hive.security.authorization.enabled": "false",
+  "hive.optimize.reducededuplication.min.reducer": "1",
+  "hive.optimize.bucketmapjoin": "true",
+  "hive.metastore.uris": "thrift://c6402.ambari.apache.org:9083",
+  "hive.mapjoin.bucket.cache.size": "1",
+  "hive.auto.convert.join.noconditionaltask.size": "10",
+  "hive.vectorized.execution.enabled": "false",
+