http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/files/draining_servers.rb
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/files/draining_servers.rb
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/files/draining_servers.rb
deleted file mode 100644
index 5bcb5b6..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/files/draining_servers.rb
+++ /dev/null
@@ -1,164 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Add or remove servers from draining mode via zookeeper 
-
-require 'optparse'
-include Java
-
-import org.apache.hadoop.hbase.HBaseConfiguration
-import org.apache.hadoop.hbase.client.HBaseAdmin
-import org.apache.hadoop.hbase.zookeeper.ZKUtil
-import org.apache.commons.logging.Log
-import org.apache.commons.logging.LogFactory
-
-# Name of this script
-NAME = "draining_servers"
-
-# Do command-line parsing
-options = {}
-optparse = OptionParser.new do |opts|
-  opts.banner = "Usage: ./hbase org.jruby.Main #{NAME}.rb [options] 
add|remove|list <hostname>|<host:port>|<servername> ..."
-  opts.separator 'Add remove or list servers in draining mode. Can accept 
either hostname to drain all region servers' +
-                 'in that host, a host:port pair or a host,port,startCode 
triplet. More than one server can be given separated by space'
-  opts.on('-h', '--help', 'Display usage information') do
-    puts opts
-    exit
-  end
-  options[:debug] = false
-  opts.on('-d', '--debug', 'Display extra debug logging') do
-    options[:debug] = true
-  end
-end
-optparse.parse!
-
-# Return array of servernames where servername is hostname+port+startcode
-# comma-delimited
-def getServers(admin)
-  serverInfos = admin.getClusterStatus().getServerInfo()
-  servers = []
-  for server in serverInfos
-    servers << server.getServerName()
-  end
-  return servers
-end
-
-def getServerNames(hostOrServers, config)
-  ret = []
-  
-  for hostOrServer in hostOrServers
-    # check whether it is already serverName. No need to connect to cluster
-    parts = hostOrServer.split(',')
-    if parts.size() == 3
-      ret << hostOrServer
-    else 
-      admin = HBaseAdmin.new(config) if not admin
-      servers = getServers(admin)
-
-      hostOrServer = hostOrServer.gsub(/:/, ",")
-      for server in servers 
-        ret << server if server.start_with?(hostOrServer)
-      end
-    end
-  end
-  
-  admin.close() if admin
-  return ret
-end
-
-def addServers(options, hostOrServers)
-  config = HBaseConfiguration.create()
-  servers = getServerNames(hostOrServers, config)
-  
-  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
"draining_servers", nil)
-  parentZnode = zkw.drainingZNode
-  
-  begin
-    for server in servers
-      node = ZKUtil.joinZNode(parentZnode, server)
-      ZKUtil.createAndFailSilent(zkw, node)
-    end
-  ensure
-    zkw.close()
-  end
-end
-
-def removeServers(options, hostOrServers)
-  config = HBaseConfiguration.create()
-  servers = getServerNames(hostOrServers, config)
-  
-  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
"draining_servers", nil)
-  parentZnode = zkw.drainingZNode
-  
-  begin
-    for server in servers
-      node = ZKUtil.joinZNode(parentZnode, server)
-      ZKUtil.deleteNodeFailSilent(zkw, node)
-    end
-  ensure
-    zkw.close()
-  end
-end
-
-# list servers in draining mode
-def listServers(options)
-  config = HBaseConfiguration.create()
-  
-  zkw = org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.new(config, 
"draining_servers", nil)
-  parentZnode = zkw.drainingZNode
-
-  servers = ZKUtil.listChildrenNoWatch(zkw, parentZnode)
-  servers.each {|server| puts server}
-end
-
-hostOrServers = ARGV[1..ARGV.size()]
-
-# Create a logger and disable the DEBUG-level annoying client logging
-def configureLogging(options)
-  apacheLogger = LogFactory.getLog(NAME)
-  # Configure log4j to not spew so much
-  unless (options[:debug]) 
-    logger = org.apache.log4j.Logger.getLogger("org.apache.hadoop.hbase")
-    logger.setLevel(org.apache.log4j.Level::WARN)
-    logger = org.apache.log4j.Logger.getLogger("org.apache.zookeeper")
-    logger.setLevel(org.apache.log4j.Level::WARN)
-  end
-  return apacheLogger
-end
-
-# Create a logger and save it to ruby global
-$LOG = configureLogging(options)
-case ARGV[0]
-  when 'add'
-    if ARGV.length < 2
-      puts optparse
-      exit 1
-    end
-    addServers(options, hostOrServers)
-  when 'remove'
-    if ARGV.length < 2
-      puts optparse
-      exit 1
-    end
-    removeServers(options, hostOrServers)
-  when 'list'
-    listServers(options)
-  else
-    puts optparse
-    exit 3
-end

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/files/hbaseSmokeVerify.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/files/hbaseSmokeVerify.sh
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/files/hbaseSmokeVerify.sh
deleted file mode 100644
index 5c320c0..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/files/hbaseSmokeVerify.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env bash
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-conf_dir=$1
-data=$2
-hbase_cmd=$3
-echo "scan 'ambarismoketest'" | $hbase_cmd --config $conf_dir shell > 
/tmp/hbase_chk_verify
-cat /tmp/hbase_chk_verify
-echo "Looking for $data"
-grep -q $data /tmp/hbase_chk_verify
-if [ "$?" -ne 0 ]
-then
-  exit 1
-fi
-
-grep -q '1 row(s)' /tmp/hbase_chk_verify

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/__init__.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/__init__.py
deleted file mode 100644
index 5561e10..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/functions.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/functions.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/functions.py
deleted file mode 100644
index e6e7fb9..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/functions.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import os
-import re
-import math
-import datetime
-
-from resource_management.core.shell import checked_call
-
-def calc_xmn_from_xms(heapsize_str, xmn_percent, xmn_max):
-  """
-  @param heapsize_str: str (e.g '1000m')
-  @param xmn_percent: float (e.g 0.2)
-  @param xmn_max: integer (e.g 512)
-  """
-  heapsize = int(re.search('\d+',heapsize_str).group(0))
-  heapsize_unit = re.search('\D+',heapsize_str).group(0)
-  xmn_val = int(math.floor(heapsize*xmn_percent))
-  xmn_val -= xmn_val % 8
-  
-  result_xmn_val = xmn_max if xmn_val > xmn_max else xmn_val
-  return str(result_xmn_val) + heapsize_unit

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase.py
deleted file mode 100644
index eb62f92..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase.py
+++ /dev/null
@@ -1,176 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import os
-from resource_management import *
-import sys
-from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
-from ambari_commons import OSConst
-
-
-@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
-def hbase(name=None):
-  import params
-  XmlConfig("hbase-site.xml",
-            conf_dir = params.hbase_conf_dir,
-            configurations = params.config['configurations']['hbase-site'],
-            
configuration_attributes=params.config['configuration_attributes']['hbase-site']
-  )
-
-# name is 'master' or 'regionserver' or 'client'
-@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
-def hbase(name=None):
-  import params
-
-  Directory( params.hbase_conf_dir_prefix,
-      mode=0755
-  )
-
-  Directory( params.hbase_conf_dir,
-      owner = params.hbase_user,
-      group = params.user_group,
-      recursive = True
-  )
-
-  Directory (params.tmp_dir,
-             owner = params.hbase_user,
-             mode=0775,
-             recursive = True,
-             cd_access="a",
-  )
-
-  Directory (params.local_dir,
-             owner = params.hbase_user,
-             group = params.user_group,
-             mode=0775,
-             recursive = True
-  )
-
-  Directory (os.path.join(params.local_dir, "jars"),
-             owner = params.hbase_user,
-             group = params.user_group,
-             mode=0775,
-             recursive = True
-  )
-
-  XmlConfig( "hbase-site.xml",
-            conf_dir = params.hbase_conf_dir,
-            configurations = params.config['configurations']['hbase-site'],
-            
configuration_attributes=params.config['configuration_attributes']['hbase-site'],
-            owner = params.hbase_user,
-            group = params.user_group
-  )
-
-  XmlConfig( "core-site.xml",
-             conf_dir = params.hbase_conf_dir,
-             configurations = params.config['configurations']['core-site'],
-             
configuration_attributes=params.config['configuration_attributes']['core-site'],
-             owner = params.hbase_user,
-             group = params.user_group
-  )
-
-  if 'hdfs-site' in params.config['configurations']:
-    XmlConfig( "hdfs-site.xml",
-            conf_dir = params.hbase_conf_dir,
-            configurations = params.config['configurations']['hdfs-site'],
-            
configuration_attributes=params.config['configuration_attributes']['hdfs-site'],
-            owner = params.hbase_user,
-            group = params.user_group
-    )
-
-    XmlConfig("hdfs-site.xml",
-            conf_dir=params.hadoop_conf_dir,
-            configurations=params.config['configurations']['hdfs-site'],
-            
configuration_attributes=params.config['configuration_attributes']['hdfs-site'],
-            owner=params.hdfs_user,
-            group=params.user_group
-    )
-
-  if 'hbase-policy' in params.config['configurations']:
-    XmlConfig( "hbase-policy.xml",
-            conf_dir = params.hbase_conf_dir,
-            configurations = params.config['configurations']['hbase-policy'],
-            
configuration_attributes=params.config['configuration_attributes']['hbase-policy'],
-            owner = params.hbase_user,
-            group = params.user_group
-    )
-  # Manually overriding ownership of file installed by hadoop package
-  else: 
-    File( format("{params.hbase_conf_dir}/hbase-policy.xml"),
-      owner = params.hbase_user,
-      group = params.user_group
-    )
-
-  File(format("{hbase_conf_dir}/hbase-env.sh"),
-       owner = params.hbase_user,
-       content=InlineTemplate(params.hbase_env_sh_template)
-  )     
-       
-  hbase_TemplateConfig( params.metric_prop_file_name,
-    tag = 'GANGLIA-MASTER' if name == 'master' else 'GANGLIA-RS'
-  )
-
-  hbase_TemplateConfig( 'regionservers')
-
-  if params.security_enabled:
-    hbase_TemplateConfig( format("hbase_{name}_jaas.conf"))
-  
-  if name != "client":
-    Directory( params.pid_dir,
-      owner = params.hbase_user,
-      recursive = True
-    )
-  
-    Directory (params.log_dir,
-      owner = params.hbase_user,
-      recursive = True
-    )
-
-  if (params.log4j_props != None):
-    File(format("{params.hbase_conf_dir}/log4j.properties"),
-         mode=0644,
-         group=params.user_group,
-         owner=params.hbase_user,
-         content=params.log4j_props
-    )
-  elif (os.path.exists(format("{params.hbase_conf_dir}/log4j.properties"))):
-    File(format("{params.hbase_conf_dir}/log4j.properties"),
-      mode=0644,
-      group=params.user_group,
-      owner=params.hbase_user
-    )
-  if name in ["master","regionserver"]:
-    params.HdfsDirectory(params.hbase_hdfs_root_dir,
-                         action="create_delayed",
-                         owner=params.hbase_user
-    )
-    params.HdfsDirectory(params.hbase_staging_dir,
-                         action="create_delayed",
-                         owner=params.hbase_user,
-                         mode=0711
-    )
-    params.HdfsDirectory(None, action="create")
-
-def hbase_TemplateConfig(name, tag=None):
-  import params
-
-  TemplateConfig( format("{hbase_conf_dir}/{name}"),
-      owner = params.hbase_user,
-      template_tag = tag
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_client.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_client.py
deleted file mode 100644
index 3955b66..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_client.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-from hbase import hbase
-from ambari_commons import OSCheck, OSConst
-from ambari_commons.os_family_impl import OsFamilyImpl
-
-
-class HbaseClient(Script):
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hbase(name='client')
-
-  def status(self, env):
-    raise ClientComponentHasNoStatus()
-
-
-@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
-class HbaseClientWindows(HbaseClient):
-  pass
-
-
-@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
-class HbaseClientDefault(HbaseClient):
-  def get_stack_to_component(self):
-    return {"HDP": "hbase-client"}
-
-  def pre_rolling_restart(self, env):
-    import params
-    env.set_params(params)
-
-    if params.version and 
compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
-      Execute(format("hdp-select set hbase-client {version}"))
-
-      # set all of the hadoop clientss since hbase client is upgraded as part
-      # of the final "CLIENTS" group and we need to ensure that hadoop-client
-      # is also set
-      Execute(format("hdp-select set hadoop-client {version}"))
-
-
-if __name__ == "__main__":
-  HbaseClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_decommission.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_decommission.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_decommission.py
deleted file mode 100644
index 54d8c0e..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_decommission.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
-from ambari_commons import OSConst
-
-@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
-def hbase_decommission(env):
-  import params
-
-  env.set_params(params)
-  File(params.region_drainer, content=StaticFile("draining_servers.rb"), 
owner=params.hbase_user, mode="f")
-
-  hosts = params.hbase_excluded_hosts.split(",")
-  for host in hosts:
-    if host:
-      if params.hbase_drain_only == True:
-        regiondrainer_cmd = format("cmd /c {hbase_executable} org.jruby.Main 
{region_drainer} remove {host}")
-        Execute(regiondrainer_cmd, user=params.hbase_user, logoutput=True)
-      else:
-        regiondrainer_cmd = format("cmd /c {hbase_executable} org.jruby.Main 
{region_drainer} add {host}")
-        regionmover_cmd = format("cmd /c {hbase_executable} org.jruby.Main 
{region_mover} unload {host}")
-        Execute(regiondrainer_cmd, user=params.hbase_user, logoutput=True)
-        Execute(regionmover_cmd, user=params.hbase_user, logoutput=True)
-
-
-@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
-def hbase_decommission(env):
-  import params
-
-  env.set_params(params)
-  kinit_cmd = params.kinit_cmd
-
-  File(params.region_drainer,
-       content=StaticFile("draining_servers.rb"),
-       mode=0755
-  )
-  
-  if params.hbase_excluded_hosts and params.hbase_excluded_hosts.split(","):
-    hosts = params.hbase_excluded_hosts.split(",")
-  elif params.hbase_included_hosts and params.hbase_included_hosts.split(","):
-    hosts = params.hbase_included_hosts.split(",")
-
-  if params.hbase_drain_only:
-    for host in hosts:
-      if host:
-        regiondrainer_cmd = format(
-          "{kinit_cmd} {hbase_cmd} --config {hbase_conf_dir} org.jruby.Main 
{region_drainer} remove {host}")
-        Execute(regiondrainer_cmd,
-                user=params.hbase_user,
-                logoutput=True
-        )
-        pass
-    pass
-
-  else:
-    for host in hosts:
-      if host:
-        regiondrainer_cmd = format(
-          "{kinit_cmd} {hbase_cmd} --config {hbase_conf_dir} org.jruby.Main 
{region_drainer} add {host}")
-        regionmover_cmd = format(
-          "{kinit_cmd} {hbase_cmd} --config {hbase_conf_dir} org.jruby.Main 
{region_mover} unload {host}")
-
-        Execute(regiondrainer_cmd,
-                user=params.hbase_user,
-                logoutput=True
-        )
-
-        Execute(regionmover_cmd,
-                user=params.hbase_user,
-                logoutput=True
-        )
-      pass
-    pass
-  pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_master.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_master.py
deleted file mode 100644
index 30198c9..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_master.py
+++ /dev/null
@@ -1,148 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-from resource_management.libraries.functions.security_commons import 
build_expectations, \
-  cached_kinit_executor, get_params_from_filesystem, 
validate_security_config_properties, \
-  FILE_TYPE_XML
-from hbase import hbase
-from hbase_service import hbase_service
-from hbase_decommission import hbase_decommission
-import upgrade
-from setup_ranger_hbase import setup_ranger_hbase
-from ambari_commons import OSCheck, OSConst
-from ambari_commons.os_family_impl import OsFamilyImpl
-
-
-class HbaseMaster(Script):
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hbase(name='master')
-
-  def install(self, env):
-    self.install_packages(env)
-
-  def decommission(self, env):
-    import params
-    env.set_params(params)
-    hbase_decommission(env)
-
-
-@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
-class HbaseMasterWindows(HbaseMaster):
-  def start(self, env):
-    import status_params
-    self.configure(env)
-    Service(status_params.hbase_master_win_service_name, action="start")
-
-  def stop(self, env):
-    import status_params
-    env.set_params(status_params)
-    Service(status_params.hbase_master_win_service_name, action="stop")
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    check_windows_service_status(status_params.hbase_master_win_service_name)
-
-
-
-@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
-class HbaseMasterDefault(HbaseMaster):
-  def get_stack_to_component(self):
-    return {"HDP": "hbase-master"}
-
-  def pre_rolling_restart(self, env):
-    import params
-    env.set_params(params)
-    upgrade.prestart(env, "hbase-master")
-
-  def start(self, env, rolling_restart=False):
-    import params
-    env.set_params(params)
-    self.configure(env) # for security
-    setup_ranger_hbase()  
-    hbase_service('master', action = 'start')
-    
-  def stop(self, env, rolling_restart=False):
-    import params
-    env.set_params(params)
-    hbase_service('master', action = 'stop')
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    pid_file = format("{pid_dir}/hbase-{hbase_user}-master.pid")
-    check_process_status(pid_file)
-
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    if status_params.security_enabled:
-      props_value_check = {"hbase.security.authentication" : "kerberos",
-                           "hbase.security.authorization": "true"}
-      props_empty_check = ['hbase.master.keytab.file',
-                           'hbase.master.kerberos.principal']
-      props_read_check = ['hbase.master.keytab.file']
-      hbase_site_expectations = build_expectations('hbase-site', 
props_value_check, props_empty_check,
-                                                  props_read_check)
-
-      hbase_expectations = {}
-      hbase_expectations.update(hbase_site_expectations)
-
-      security_params = 
get_params_from_filesystem(status_params.hbase_conf_dir,
-                                                   {'hbase-site.xml': 
FILE_TYPE_XML})
-      result_issues = validate_security_config_properties(security_params, 
hbase_expectations)
-      if not result_issues:  # If all validations passed successfully
-        try:
-          # Double check the dict before calling execute
-          if ( 'hbase-site' not in security_params
-               or 'hbase.master.keytab.file' not in 
security_params['hbase-site']
-               or 'hbase.master.kerberos.principal' not in 
security_params['hbase-site']):
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "Keytab file or principal are not set 
property."})
-            return
-
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.hbase_user,
-                                
security_params['hbase-site']['hbase.master.keytab.file'],
-                                
security_params['hbase-site']['hbase.master.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. 
Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-
-
-if __name__ == "__main__":
-  HbaseMaster().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_regionserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_regionserver.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_regionserver.py
deleted file mode 100644
index 882b982..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_regionserver.py
+++ /dev/null
@@ -1,156 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management import *
-from resource_management.libraries.functions.security_commons import 
build_expectations, \
-  cached_kinit_executor, get_params_from_filesystem, 
validate_security_config_properties, \
-  FILE_TYPE_XML
-from hbase import hbase
-from hbase_service import hbase_service
-import upgrade
-from setup_ranger_hbase import setup_ranger_hbase
-from ambari_commons import OSCheck, OSConst
-from ambari_commons.os_family_impl import OsFamilyImpl
-
-
-class HbaseRegionServer(Script):
-  def install(self, env):
-    self.install_packages(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    hbase(name='regionserver')
-
-  def decommission(self, env):
-    print "Decommission not yet implemented!"
-
-
-
-@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
-class HbaseRegionServerWindows(HbaseRegionServer):
-  def start(self, env):
-    import status_params
-    self.configure(env)
-    Service(status_params.hbase_regionserver_win_service_name, action="start")
-
-  def stop(self, env):
-    import status_params
-    env.set_params(status_params)
-    Service(status_params.hbase_regionserver_win_service_name, action="stop")
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    
check_windows_service_status(status_params.hbase_regionserver_win_service_name)
-
-
-
-@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
-class HbaseRegionServerDefault(HbaseRegionServer):
-  def get_stack_to_component(self):
-    return {"HDP": "hbase-regionserver"}
-
-  def pre_rolling_restart(self, env):
-    import params
-    env.set_params(params)
-    upgrade.prestart(env, "hbase-regionserver")
-
-  def post_rolling_restart(self, env):
-    import params
-    env.set_params(params)
-    upgrade.post_regionserver(env)
-
-  def start(self, env, rolling_restart=False):
-    import params
-    env.set_params(params)
-    self.configure(env) # for security
-    setup_ranger_hbase()  
-    hbase_service( 'regionserver',
-      action = 'start'
-    )
-
-  def stop(self, env, rolling_restart=False):
-    import params
-    env.set_params(params)
-
-    hbase_service( 'regionserver',
-      action = 'stop'
-    )
-
-  def status(self, env):
-    import status_params
-    env.set_params(status_params)
-    pid_file = format("{pid_dir}/hbase-{hbase_user}-regionserver.pid")
-    check_process_status(pid_file)
-
-  def security_status(self, env):
-    import status_params
-
-    env.set_params(status_params)
-    if status_params.security_enabled:
-      props_value_check = {"hbase.security.authentication" : "kerberos",
-                           "hbase.security.authorization": "true"}
-      props_empty_check = ['hbase.regionserver.keytab.file',
-                           'hbase.regionserver.kerberos.principal']
-      props_read_check = ['hbase.regionserver.keytab.file']
-      hbase_site_expectations = build_expectations('hbase-site', 
props_value_check, props_empty_check,
-                                                   props_read_check)
-
-      hbase_expectations = {}
-      hbase_expectations.update(hbase_site_expectations)
-
-      security_params = 
get_params_from_filesystem(status_params.hbase_conf_dir,
-                                                   {'hbase-site.xml': 
FILE_TYPE_XML})
-      result_issues = validate_security_config_properties(security_params, 
hbase_expectations)
-      if not result_issues:  # If all validations passed successfully
-        try:
-          # Double check the dict before calling execute
-          if ( 'hbase-site' not in security_params
-               or 'hbase.regionserver.keytab.file' not in 
security_params['hbase-site']
-               or 'hbase.regionserver.kerberos.principal' not in 
security_params['hbase-site']):
-            self.put_structured_out({"securityState": "UNSECURED"})
-            self.put_structured_out(
-              {"securityIssuesFound": "Keytab file or principal are not set 
property."})
-            return
-
-          cached_kinit_executor(status_params.kinit_path_local,
-                                status_params.hbase_user,
-                                
security_params['hbase-site']['hbase.regionserver.keytab.file'],
-                                
security_params['hbase-site']['hbase.regionserver.kerberos.principal'],
-                                status_params.hostname,
-                                status_params.tmp_dir)
-          self.put_structured_out({"securityState": "SECURED_KERBEROS"})
-        except Exception as e:
-          self.put_structured_out({"securityState": "ERROR"})
-          self.put_structured_out({"securityStateErrorInfo": str(e)})
-      else:
-        issues = []
-        for cf in result_issues:
-          issues.append("Configuration file %s did not pass the validation. 
Reason: %s" % (cf, result_issues[cf]))
-        self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
-        self.put_structured_out({"securityState": "UNSECURED"})
-    else:
-      self.put_structured_out({"securityState": "UNSECURED"})
-
-
-if __name__ == "__main__":
-  HbaseRegionServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_service.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_service.py
deleted file mode 100644
index fb565a7..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_service.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-
-def hbase_service(
-  name,
-  action = 'start'): # 'start' or 'stop' or 'status'
-    
-    import params
-  
-    role = name
-    cmd = format("{daemon_script} --config {hbase_conf_dir}")
-    pid_file = format("{pid_dir}/hbase-{hbase_user}-{role}.pid")
-    no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat 
{pid_file}` >/dev/null 2>&1")
-    
-    if action == 'start':
-      daemon_cmd = format("{cmd} start {role}")
-      
-      Execute ( daemon_cmd,
-        not_if = no_op_test,
-        user = params.hbase_user
-      )
-    elif action == 'stop':
-      daemon_cmd = format("{cmd} stop {role}")
-
-      Execute ( daemon_cmd,
-        user = params.hbase_user,
-        # BUGFIX: hbase regionserver sometimes hangs when nn is in safemode
-        timeout = 30,
-        on_timeout = format("! ( {no_op_test} ) || {sudo} -H -E kill -9 `cat 
{pid_file}`"),
-      )
-      
-      Execute (format("rm -f {pid_file}"))

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_upgrade.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_upgrade.py
deleted file mode 100644
index 610f527..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/hbase_upgrade.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management.libraries.script import Script
-from resource_management.core.resources.system import Execute
-
-class HbaseMasterUpgrade(Script):
-
-  def snapshot(self, env):
-    import params
-
-    snap_cmd = "echo 'snapshot_all' | {0} shell".format(params.hbase_cmd)
-
-    exec_cmd = "{0} {1}".format(params.kinit_cmd, snap_cmd)
-
-    Execute(exec_cmd, user=params.hbase_user)
-
-if __name__ == "__main__":
-  HbaseMasterUpgrade().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/params.py
deleted file mode 100644
index a10c1d4..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/params.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-from ambari_commons import OSCheck
-
-if OSCheck.is_windows_family():
-  from params_windows import *
-else:
-  from params_linux import *

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/params_linux.py
deleted file mode 100644
index efca51a..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/params_linux.py
+++ /dev/null
@@ -1,255 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from ambari_commons.constants import AMBARI_SUDO_BINARY
-from functions import calc_xmn_from_xms
-from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
-from resource_management.libraries.functions.default import default
-from resource_management import *
-import status_params
-
-# server configurations
-config = Script.get_config()
-exec_tmp_dir = Script.get_tmp_dir()
-sudo = AMBARI_SUDO_BINARY
-
-stack_name = default("/hostLevelParams/stack_name", None)
-
-version = default("/commandParams/version", None)
-
-stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
-
-#hadoop params
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  hadoop_bin_dir = format("/usr/hdp/current/hadoop-client/bin")
-  daemon_script = format('/usr/hdp/current/hbase-client/bin/hbase-daemon.sh')
-  region_mover = format('/usr/hdp/current/hbase-client/bin/region_mover.rb')
-  region_drainer = 
format('/usr/hdp/current/hbase-client/bin/draining_servers.rb')
-  hbase_cmd = format('/usr/hdp/current/hbase-client/bin/hbase')
-else:
-  hadoop_bin_dir = "/usr/bin"
-  daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
-  region_mover = "/usr/lib/hbase/bin/region_mover.rb"
-  region_drainer = "/usr/lib/hbase/bin/draining_servers.rb"
-  hbase_cmd = "/usr/lib/hbase/bin/hbase"
-
-phx_daemon_script = '/usr/hdp/current/phoenix-server/bin/queryserver.py'
-
-hadoop_conf_dir = "/etc/hadoop/conf"
-hbase_conf_dir_prefix = "/etc/hbase"
-hbase_conf_dir = format("{hbase_conf_dir_prefix}/conf")
-hbase_excluded_hosts = config['commandParams']['excluded_hosts']
-hbase_drain_only = default("/commandParams/mark_draining_only",False)
-hbase_included_hosts = config['commandParams']['included_hosts']
-
-hbase_user = status_params.hbase_user
-hbase_principal_name = 
config['configurations']['hbase-env']['hbase_principal_name']
-smokeuser = config['configurations']['cluster-env']['smokeuser']
-_authentication = 
config['configurations']['core-site']['hadoop.security.authentication']
-security_enabled = config['configurations']['cluster-env']['security_enabled']
-
-# this is "hadoop-metrics.properties" for 1.x stacks
-metric_prop_file_name = "hadoop-metrics2-hbase.properties"
-
-# not supporting 32 bit jdk.
-java64_home = config['hostLevelParams']['java_home']
-
-log_dir = config['configurations']['hbase-env']['hbase_log_dir']
-master_heapsize = 
config['configurations']['hbase-env']['hbase_master_heapsize']
-
-regionserver_heapsize = 
config['configurations']['hbase-env']['hbase_regionserver_heapsize']
-regionserver_xmn_max = 
config['configurations']['hbase-env']['hbase_regionserver_xmn_max']
-regionserver_xmn_percent = 
config['configurations']['hbase-env']['hbase_regionserver_xmn_ratio']
-regionserver_xmn_size = calc_xmn_from_xms(regionserver_heapsize, 
regionserver_xmn_percent, regionserver_xmn_max)
-
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  hbase_max_direct_memory_size  = 
config['configurations']['hbase-env']['hbase_max_direct_memory_size']
-
-pid_dir = status_params.pid_dir
-tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
-# TODO UPGRADE default, update site during upgrade
-_local_dir_conf = default('/configurations/hbase-site/hbase.local.dir', 
"${hbase.tmp.dir}/local")
-local_dir = substitute_vars(_local_dir_conf, 
config['configurations']['hbase-site'])
-
-client_jaas_config_file = format("{hbase_conf_dir}/hbase_client_jaas.conf")
-master_jaas_config_file = format("{hbase_conf_dir}/hbase_master_jaas.conf")
-regionserver_jaas_config_file = 
format("{hbase_conf_dir}/hbase_regionserver_jaas.conf")
-
-ganglia_server_hosts = default('/clusterHostInfo/ganglia_server_host', []) # 
is not passed when ganglia is not present
-ganglia_server_host = '' if len(ganglia_server_hosts) == 0 else 
ganglia_server_hosts[0]
-
-ams_collector_hosts = default("/clusterHostInfo/metrics_collector_hosts", [])
-has_metric_collector = not len(ams_collector_hosts) == 0
-if has_metric_collector:
-  metric_collector_host = ams_collector_hosts[0]
-  metric_collector_port = 
default("/configurations/ams-site/timeline.metrics.service.webapp.address", 
"0.0.0.0:6188")
-  if metric_collector_port and metric_collector_port.find(':') != -1:
-    metric_collector_port = metric_collector_port.split(':')[1]
-  pass
-
-# if hbase is selected the hbase_rs_hosts, should not be empty, but still 
default just in case
-if 'slave_hosts' in config['clusterHostInfo']:
-  rs_hosts = default('/clusterHostInfo/hbase_rs_hosts', 
'/clusterHostInfo/slave_hosts') #if hbase_rs_hosts not given it is assumed that 
region servers on same nodes as slaves
-else:
-  rs_hosts = default('/clusterHostInfo/hbase_rs_hosts', 
'/clusterHostInfo/all_hosts') 
-
-smoke_test_user = config['configurations']['cluster-env']['smokeuser']
-smokeuser_principal =  
config['configurations']['cluster-env']['smokeuser_principal_name']
-smokeuser_permissions = "RWXCA"
-service_check_data = functions.get_unique_id_and_date()
-user_group = config['configurations']['cluster-env']["user_group"]
-
-if security_enabled:
-  _hostname_lowercase = config['hostname'].lower()
-  master_jaas_princ = 
config['configurations']['hbase-site']['hbase.master.kerberos.principal'].replace('_HOST',_hostname_lowercase)
-  regionserver_jaas_princ = 
config['configurations']['hbase-site']['hbase.regionserver.kerberos.principal'].replace('_HOST',_hostname_lowercase)
-
-master_keytab_path = 
config['configurations']['hbase-site']['hbase.master.keytab.file']
-regionserver_keytab_path = 
config['configurations']['hbase-site']['hbase.regionserver.keytab.file']
-smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
-hbase_user_keytab = config['configurations']['hbase-env']['hbase_user_keytab']
-kinit_path_local = functions.get_kinit_path()
-if security_enabled:
-  kinit_cmd = format("{kinit_path_local} -kt {hbase_user_keytab} 
{hbase_principal_name};")
-else:
-  kinit_cmd = ""
-
-#log4j.properties
-if (('hbase-log4j' in config['configurations']) and ('content' in 
config['configurations']['hbase-log4j'])):
-  log4j_props = config['configurations']['hbase-log4j']['content']
-else:
-  log4j_props = None
-  
-hbase_env_sh_template = config['configurations']['hbase-env']['content']
-
-hbase_hdfs_root_dir = config['configurations']['hbase-site']['hbase.rootdir']
-hbase_staging_dir = "/apps/hbase/staging"
-#for create_hdfs_directory
-hostname = config["hostname"]
-hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
-hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
-hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-kinit_path_local = functions.get_kinit_path()
-import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
-
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  command_role = default("/role", "")
-  if command_role == "HBASE_MASTER" or command_role == "HBASE_REGIONSERVER":
-    role_root = "master" if command_role == "HBASE_MASTER" else "regionserver"
-
-    
daemon_script=format("/usr/hdp/current/hbase-{role_root}/bin/hbase-daemon.sh")
-    region_mover = 
format("/usr/hdp/current/hbase-{role_root}/bin/region_mover.rb")
-    region_drainer = 
format("/usr/hdp/current/hbase-{role_root}/bin/draining_servers.rb")
-    hbase_cmd = format("/usr/hdp/current/hbase-{role_root}/bin/hbase")
-
-if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
-  # Setting Flag value for ranger hbase plugin
-  enable_ranger_hbase = False
-  ranger_plugin_enable = 
default("/configurations/ranger-hbase-plugin-properties/ranger-hbase-plugin-enabled","no")
-  if ranger_plugin_enable.lower() == 'yes':
-    enable_ranger_hbase = True
-  elif ranger_plugin_enable.lower() == 'no':
-    enable_ranger_hbase = False
-
-# ranger host
-ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
-has_ranger_admin = not len(ranger_admin_hosts) == 0    
-
-ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
-
-
-# ranger hbase properties
-policymgr_mgr_url = 
default("/configurations/admin-properties/policymgr_external_url", 
"http://localhost:6080";)
-sql_connector_jar = 
default("/configurations/admin-properties/SQL_CONNECTOR_JAR", 
"/usr/share/java/mysql-connector-java.jar")
-xa_audit_db_flavor = default("/configurations/admin-properties/DB_FLAVOR", 
"MYSQL")
-xa_audit_db_name = default("/configurations/admin-properties/audit_db_name", 
"ranger_audit")
-xa_audit_db_user = default("/configurations/admin-properties/audit_db_user", 
"rangerlogger")
-xa_audit_db_password = 
default("/configurations/admin-properties/audit_db_password", "rangerlogger")
-xa_db_host = default("/configurations/admin-properties/db_host", "localhost")
-repo_name = str(config['clusterName']) + '_hbase'
-db_enabled = 
default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.DB.IS_ENABLED", 
"false")
-hdfs_enabled = 
default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.IS_ENABLED",
 "false")
-hdfs_dest_dir = 
default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.DESTINATION_DIRECTORY",
 "hdfs://__REPLACE__NAME_NODE_HOST:8020/ranger/audit/app-type/time:yyyyMMdd")
-hdfs_buffer_dir = 
default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY",
 "__REPLACE__LOG_DIR/hadoop/app-type/audit")
-hdfs_archive_dir = 
default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY",
 "__REPLACE__LOG_DIR/hadoop/app-type/audit/archive")
-hdfs_dest_file = 
default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FILE",
 "hostname-audit.log")
-hdfs_dest_flush_int_sec = 
default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS",
 "900")
-hdfs_dest_rollover_int_sec = 
default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS",
 "86400")
-hdfs_dest_open_retry_int_sec = 
default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS",
 "60")
-hdfs_buffer_file = 
default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FILE",
 "time:yyyyMMdd-HHmm.ss.log")
-hdfs_buffer_flush_int_sec = 
default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS",
 "60")
-hdfs_buffer_rollover_int_sec = 
default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS",
 "600")
-hdfs_archive_max_file_count = 
default("/configurations/ranger-hbase-plugin-properties/XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT",
 "10")
-ssl_keystore_file = 
default("/configurations/ranger-hbase-plugin-properties/SSL_KEYSTORE_FILE_PATH",
 "/etc/hadoop/conf/ranger-plugin-keystore.jks")
-ssl_keystore_password = 
default("/configurations/ranger-hbase-plugin-properties/SSL_KEYSTORE_PASSWORD", 
"myKeyFilePassword")
-ssl_truststore_file = 
default("/configurations/ranger-hbase-plugin-properties/SSL_TRUSTSTORE_FILE_PATH",
 "/etc/hadoop/conf/ranger-plugin-truststore.jks")
-ssl_truststore_password = 
default("/configurations/ranger-hbase-plugin-properties/SSL_TRUSTSTORE_PASSWORD",
 "changeit")
-grant_revoke = 
default("/configurations/ranger-hbase-plugin-properties/UPDATE_XAPOLICIES_ON_GRANT_REVOKE","true")
-common_name_for_certificate = 
default("/configurations/ranger-hbase-plugin-properties/common.name.for.certificate",
 "-")
-
-zookeeper_znode_parent = 
config['configurations']['hbase-site']['zookeeper.znode.parent']
-hbase_zookeeoer_quorum = 
config['configurations']['hbase-site']['hbase.zookeeper.quorum']
-hbase_zookeeper_property_clientPort = 
config['configurations']['hbase-site']['hbase.zookeeper.property.clientPort']
-hbase_security_authentication = 
config['configurations']['hbase-site']['hbase.security.authentication']
-hadoop_security_authentication = 
config['configurations']['core-site']['hadoop.security.authentication']
-
-repo_config_username = 
default("/configurations/ranger-hbase-plugin-properties/REPOSITORY_CONFIG_USERNAME",
 "hbase")
-repo_config_password = 
default("/configurations/ranger-hbase-plugin-properties/REPOSITORY_CONFIG_PASSWORD",
 "hbase")
-
-admin_uname = default("/configurations/ranger-env/admin_username", "admin")
-admin_password = default("/configurations/ranger-env/admin_password", "admin")
-admin_uname_password = format("{admin_uname}:{admin_password}")
-
-ambari_ranger_admin = 
default("/configurations/ranger-env/ranger_admin_username", "amb_ranger_admin")
-ambari_ranger_password = 
default("/configurations/ranger-env/ranger_admin_password", "ambari123")
-policy_user = 
default("/configurations/ranger-hbase-plugin-properties/policy_user", 
"ambari-qa")
-
-#For curl command in ranger plugin to get db connector
-jdk_location = config['hostLevelParams']['jdk_location']
-java_share_dir = '/usr/share/java'
-if xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'mysql':
-  jdbc_symlink_name = "mysql-jdbc-driver.jar"
-  jdbc_jar_name = "mysql-connector-java.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'oracle':
-  jdbc_jar_name = "ojdbc6.jar"
-  jdbc_symlink_name = "oracle-jdbc-driver.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'postgres':
-  jdbc_jar_name = "postgresql.jar"
-  jdbc_symlink_name = "postgres-jdbc-driver.jar"
-elif xa_audit_db_flavor and xa_audit_db_flavor.lower() == 'sqlserver':
-  jdbc_jar_name = "sqljdbc4.jar"
-  jdbc_symlink_name = "mssql-jdbc-driver.jar"
-
-downloaded_custom_connector = format("{exec_tmp_dir}/{jdbc_jar_name}")
-
-driver_curl_source = format("{jdk_location}/{jdbc_symlink_name}")
-driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/params_windows.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/params_windows.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/params_windows.py
deleted file mode 100644
index 571b93c..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/params_windows.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import os
-import status_params
-
-# server configurations
-config = Script.get_config()
-hbase_conf_dir = os.environ["HBASE_CONF_DIR"]
-hbase_bin_dir = os.path.join(os.environ["HBASE_HOME"],'bin')
-hbase_executable = os.path.join(hbase_bin_dir,"hbase.cmd")
-hdp_root = os.path.abspath(os.path.join(os.environ["HADOOP_HOME"],".."))
-hbase_user = "hadoop"
-
-#decomm params
-region_drainer = os.path.join(hbase_bin_dir,"draining_servers.rb")
-region_mover = os.path.join(hbase_bin_dir,"region_mover.rb")
-hbase_excluded_hosts = config['commandParams']['excluded_hosts']
-hbase_drain_only = config['commandParams']['mark_draining_only']
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/phoenix_queryserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/phoenix_queryserver.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/phoenix_queryserver.py
deleted file mode 100644
index 3ba5f7f..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/phoenix_queryserver.py
+++ /dev/null
@@ -1,55 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-import sys
-from resource_management.libraries.script import Script
-from phoenix_service import phoenix_service
-
-class PhoenixQueryServer(Script):
-
-  def install(self, env):
-    self.install_packages(env)
-
-  def get_stack_to_component(self):
-    return {"HDP": "phoenix-server"}
-
-  def configure(self, env):
-    pass
-
-  def start(self, env, rolling_restart=False):
-    import params
-    env.set_params(params)
-    self.configure(env)
-    phoenix_service('start')
-
-  def stop(self, env, rolling_restart=False):
-    import params
-    env.set_params(params)
-    phoenix_service('stop')
-
-  def status(self, env):
-    import params
-    env.set_params(params)
-    phoenix_service('status')
-
-  def security_status(self, env):
-    self.put_structured_out({"securityState": "UNSECURED"})
-
-if __name__ == "__main__":
-  PhoenixQueryServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/phoenix_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/phoenix_service.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/phoenix_service.py
deleted file mode 100644
index f35b9a0..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/phoenix_service.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions import check_process_status, 
format
-
-def phoenix_service(action = 'start'): # 'start', 'stop', 'status'
-
-  import params
-
-  cmd = format("{phx_daemon_script}")
-  pid_file = format("{pid_dir}/phoenix-{hbase_user}-server.pid")
-  no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}` 
>/dev/null 2>&1")
-
-  if action == 'start':
-    Execute(format("{cmd} start"))
-
-  elif action == 'stop':
-    daemon_cmd = format("{cmd} stop")
-    Execute(daemon_cmd,
-      timeout = 30,
-      on_timeout = format("! ( {no_op_test} ) || {sudo} -H -E kill -9 `cat 
{pid_file}`"),
-    )
-    Execute(format("rm -f {pid_file}"))
-
-  elif action == 'status':
-    check_process_status(pid_file)

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/service_check.py
deleted file mode 100644
index a60ebad..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/service_check.py
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from resource_management.libraries.functions.format import format
-import functions
-from ambari_commons import OSCheck, OSConst
-from ambari_commons.os_family_impl import OsFamilyImpl
-
-
-class HbaseServiceCheck(Script):
-  pass
-
-
-@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
-class HbaseServiceCheckWindows(HbaseServiceCheck):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-    smoke_cmd = os.path.join(params.hdp_root, "Run-SmokeTests.cmd")
-    service = "HBASE"
-    Execute(format("cmd /C {smoke_cmd} {service}"), logoutput=True)
-
-
-@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
-class HbaseServiceCheckDefault(HbaseServiceCheck):
-  def service_check(self, env):
-    import params
-    env.set_params(params)
-    
-    output_file = "/apps/hbase/data/ambarismoketest"
-    test_cmd = format("fs -test -e {output_file}")
-    smokeuser_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} 
{smokeuser_principal};") if params.security_enabled else ""
-    hbase_servicecheck_file = format("{exec_tmp_dir}/hbase-smoke.sh")
-  
-    File( format("{exec_tmp_dir}/hbaseSmokeVerify.sh"),
-      content = StaticFile("hbaseSmokeVerify.sh"),
-      mode = 0755
-    )
-  
-    File( hbase_servicecheck_file,
-      mode = 0755,
-      content = Template('hbase-smoke.sh.j2')
-    )
-    
-    if params.security_enabled:    
-      hbase_grant_premissions_file = 
format("{exec_tmp_dir}/hbase_grant_permissions.sh")
-      grantprivelegecmd = format("{kinit_cmd} {hbase_cmd} shell 
{hbase_grant_premissions_file}")
-  
-      File( hbase_grant_premissions_file,
-        owner   = params.hbase_user,
-        group   = params.user_group,
-        mode    = 0644,
-        content = Template('hbase_grant_permissions.j2')
-      )
-      
-      Execute( grantprivelegecmd,
-        user = params.hbase_user,
-      )
-
-    servicecheckcmd = format("{smokeuser_kinit_cmd} {hbase_cmd} --config 
{hbase_conf_dir} shell {hbase_servicecheck_file}")
-    smokeverifycmd = format("{smokeuser_kinit_cmd} 
{exec_tmp_dir}/hbaseSmokeVerify.sh {hbase_conf_dir} {service_check_data} 
{hbase_cmd}")
-  
-    Execute( servicecheckcmd,
-      tries     = 3,
-      try_sleep = 5,
-      user = params.smoke_test_user,
-      logoutput = True
-    )
-  
-    Execute ( smokeverifycmd,
-      tries     = 3,
-      try_sleep = 5,
-      user = params.smoke_test_user,
-      logoutput = True
-    )
-    
-if __name__ == "__main__":
-  HbaseServiceCheck().execute()
-  

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/setup_ranger_hbase.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/setup_ranger_hbase.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/setup_ranger_hbase.py
deleted file mode 100644
index 7626de8..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/setup_ranger_hbase.py
+++ /dev/null
@@ -1,202 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-import sys
-import fileinput
-import subprocess
-import json
-import re
-import os
-from resource_management import *
-from resource_management.libraries.functions.ranger_functions import 
Rangeradmin
-from resource_management.core.logger import Logger
-
-def setup_ranger_hbase():
-  import params
-  
-  if params.has_ranger_admin:
-    File(params.downloaded_custom_connector,
-         content = DownloadSource(params.driver_curl_source)
-    )
-
-    if not os.path.isfile(params.driver_curl_target):
-      Execute(('cp', '--remove-destination', 
params.downloaded_custom_connector, params.driver_curl_target),
-              path=["/bin", "/usr/bin/"],
-              sudo=True)
-
-    try:
-      command = 'hdp-select status hbase-client'
-      return_code, hdp_output = shell.call(command, timeout=20)
-    except Exception, e:
-      Logger.error(str(e))
-      raise Fail('Unable to execute hdp-select command to retrieve the 
version.')
-
-    if return_code != 0:
-      raise Fail('Unable to determine the current version because of a 
non-zero return code of {0}'.format(str(return_code)))
-
-    hdp_version = re.sub('hbase-client - ', '', hdp_output).strip()
-    match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', hdp_version)
-
-    if match is None:
-      raise Fail('Failed to get extracted version')
-
-    file_path = '/usr/hdp/'+ hdp_version 
+'/ranger-hbase-plugin/install.properties'
-    if not os.path.isfile(file_path):
-      raise Fail('Ranger HBase plugin install.properties file does not exist 
at {0}'.format(file_path))
-    
-    ranger_hbase_dict = ranger_hbase_properties()
-    hbase_repo_data = hbase_repo_properties()
-
-    write_properties_to_file(file_path, ranger_hbase_dict)
-
-    if params.enable_ranger_hbase:
-      cmd = format('cd /usr/hdp/{hdp_version}/ranger-hbase-plugin/ && sh 
enable-hbase-plugin.sh')
-      ranger_adm_obj = Rangeradmin(url=ranger_hbase_dict['POLICY_MGR_URL'])
-      response_code, response_recieved = 
ranger_adm_obj.check_ranger_login_urllib2(ranger_hbase_dict['POLICY_MGR_URL'] + 
'/login.jsp', 'test:test')
-
-      if response_code is not None and response_code == 200:
-        ambari_ranger_admin, ambari_ranger_password = 
ranger_adm_obj.create_ambari_admin_user(params.ambari_ranger_admin, 
params.ambari_ranger_password, params.admin_uname_password)
-        ambari_username_password_for_ranger = ambari_ranger_admin + ':' + 
ambari_ranger_password
-        if ambari_ranger_admin != '' and ambari_ranger_password != '':
-          repo = 
ranger_adm_obj.get_repository_by_name_urllib2(ranger_hbase_dict['REPOSITORY_NAME'],
 'hbase', 'true', ambari_username_password_for_ranger)
-          if repo and repo['name'] == ranger_hbase_dict['REPOSITORY_NAME']:
-            Logger.info('Hbase Repository exist')
-          else:
-            response = 
ranger_adm_obj.create_repository_urllib2(hbase_repo_data, 
ambari_username_password_for_ranger, params.policy_user)
-            if response is not None:
-              Logger.info('Hbase Repository created in Ranger admin')
-            else:
-              Logger.info('Hbase Repository creation failed in Ranger admin')
-        else:
-          Logger.info('Ambari admin username and password are blank ')
-      else:
-          Logger.info('Ranger service is not started on given host')
-    else:
-      cmd = format('cd /usr/hdp/{hdp_version}/ranger-hbase-plugin/ && sh 
disable-hbase-plugin.sh')
-
-    Execute(cmd, environment={'JAVA_HOME': params.java64_home}, 
logoutput=True)                    
-  else:
-    Logger.info('Ranger admin not installed')
-
-
-def write_properties_to_file(file_path, value):
-  for key in value:
-    modify_config(file_path, key, value[key])
-
-
-def modify_config(filepath, variable, setting):
-  var_found = False
-  already_set = False
-  V=str(variable)
-  S=str(setting)
-  # use quotes if setting has spaces #
-  if ' ' in S:
-    S = '%s' % S
-  for line in fileinput.input(filepath, inplace = 1):
-    # process lines that look like config settings #
-    if not line.lstrip(' ').startswith('#') and '=' in line:
-      _infile_var = str(line.split('=')[0].rstrip(' '))
-      _infile_set = str(line.split('=')[1].lstrip(' ').rstrip())
-      # only change the first matching occurrence #
-      if var_found == False and _infile_var.rstrip(' ') == V:
-        var_found = True
-        # don't change it if it is already set #
-        if _infile_set.lstrip(' ') == S:
-          already_set = True
-        else:
-          line = "%s=%s\n" % (V, S)
-    sys.stdout.write(line)
-
-  # Append the variable if it wasn't found #
-  if not var_found:
-    with open(filepath, "a") as f:
-        f.write("%s=%s\n" % (V, S))
-  elif already_set == True:
-    pass
-  else:
-    pass
-
-  return
-
-def ranger_hbase_properties():
-  import params
-
-  ranger_hbase_properties = dict()
-
-  ranger_hbase_properties['POLICY_MGR_URL'] = params.policymgr_mgr_url
-  ranger_hbase_properties['SQL_CONNECTOR_JAR'] = params.sql_connector_jar
-  ranger_hbase_properties['XAAUDIT.DB.FLAVOUR'] = params.xa_audit_db_flavor
-  ranger_hbase_properties['XAAUDIT.DB.DATABASE_NAME'] = params.xa_audit_db_name
-  ranger_hbase_properties['XAAUDIT.DB.USER_NAME'] = params.xa_audit_db_user
-  ranger_hbase_properties['XAAUDIT.DB.PASSWORD'] = params.xa_audit_db_password
-  ranger_hbase_properties['XAAUDIT.DB.HOSTNAME'] = params.xa_db_host
-  ranger_hbase_properties['REPOSITORY_NAME'] = params.repo_name
-  ranger_hbase_properties['XAAUDIT.DB.IS_ENABLED'] = params.db_enabled
-
-  ranger_hbase_properties['XAAUDIT.HDFS.IS_ENABLED'] = params.hdfs_enabled
-  ranger_hbase_properties['XAAUDIT.HDFS.DESTINATION_DIRECTORY'] = 
params.hdfs_dest_dir
-  ranger_hbase_properties['XAAUDIT.HDFS.LOCAL_BUFFER_DIRECTORY'] = 
params.hdfs_buffer_dir
-  ranger_hbase_properties['XAAUDIT.HDFS.LOCAL_ARCHIVE_DIRECTORY'] = 
params.hdfs_archive_dir
-  ranger_hbase_properties['XAAUDIT.HDFS.DESTINTATION_FILE'] = 
params.hdfs_dest_file
-  ranger_hbase_properties['XAAUDIT.HDFS.DESTINTATION_FLUSH_INTERVAL_SECONDS'] 
= params.hdfs_dest_flush_int_sec
-  
ranger_hbase_properties['XAAUDIT.HDFS.DESTINTATION_ROLLOVER_INTERVAL_SECONDS'] 
= params.hdfs_dest_rollover_int_sec
-  
ranger_hbase_properties['XAAUDIT.HDFS.DESTINTATION_OPEN_RETRY_INTERVAL_SECONDS']
 = params.hdfs_dest_open_retry_int_sec
-  ranger_hbase_properties['XAAUDIT.HDFS.LOCAL_BUFFER_FILE'] = 
params.hdfs_buffer_file
-  ranger_hbase_properties['XAAUDIT.HDFS.LOCAL_BUFFER_FLUSH_INTERVAL_SECONDS'] 
= params.hdfs_buffer_flush_int_sec
-  
ranger_hbase_properties['XAAUDIT.HDFS.LOCAL_BUFFER_ROLLOVER_INTERVAL_SECONDS'] 
= params.hdfs_buffer_rollover_int_sec
-  ranger_hbase_properties['XAAUDIT.HDFS.LOCAL_ARCHIVE_MAX_FILE_COUNT'] = 
params.hdfs_archive_max_file_count
-
-  ranger_hbase_properties['SSL_KEYSTORE_FILE_PATH'] = params.ssl_keystore_file
-  ranger_hbase_properties['SSL_KEYSTORE_PASSWORD'] = 
params.ssl_keystore_password
-  ranger_hbase_properties['SSL_TRUSTSTORE_FILE_PATH'] = 
params.ssl_truststore_file
-  ranger_hbase_properties['SSL_TRUSTSTORE_PASSWORD'] = 
params.ssl_truststore_password
-   
-  ranger_hbase_properties['UPDATE_XAPOLICIES_ON_GRANT_REVOKE'] = 
params.grant_revoke
-
-  return ranger_hbase_properties    
-
-def hbase_repo_properties():
-  import params
-
-  config_dict = dict()
-  config_dict['username'] = params.repo_config_username
-  config_dict['password'] = params.repo_config_password
-  config_dict['hadoop.security.authentication'] = 
params.hadoop_security_authentication
-  config_dict['hbase.security.authentication'] = 
params.hbase_security_authentication
-  config_dict['hbase.zookeeper.property.clientPort'] = 
params.hbase_zookeeper_property_clientPort
-  config_dict['hbase.zookeeper.quorum'] = params.hbase_zookeeoer_quorum
-  config_dict['zookeeper.znode.parent'] = params.zookeeper_znode_parent
-  config_dict['commonNameForCertificate'] = params.common_name_for_certificate
-
-  if params.security_enabled:
-    config_dict['hbase.master.kerberos.principal'] = params.master_jaas_princ
-  else:
-    config_dict['hbase.master.kerberos.principal'] = ''
-
-  repo= dict()
-  repo['isActive'] = "true"
-  repo['config'] = json.dumps(config_dict)
-  repo['description'] = "hbase repo"
-  repo['name'] = params.repo_name
-  repo['repositoryType'] = "hbase"
-  repo['assetType'] = '2'
-
-  data = json.dumps(repo)
-
-  return data

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/status_params.py
deleted file mode 100644
index 14a06d3..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/status_params.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-from ambari_commons import OSCheck
-
-config = Script.get_config()
-
-if OSCheck.is_windows_family():
-  hbase_master_win_service_name = "master"
-  hbase_regionserver_win_service_name = "regionserver"
-else:
-  pid_dir = config['configurations']['hbase-env']['hbase_pid_dir']
-  hbase_user = config['configurations']['hbase-env']['hbase_user']
-
-  # Security related/required params
-  hostname = config['hostname']
-  security_enabled = 
config['configurations']['cluster-env']['security_enabled']
-  kinit_path_local = functions.get_kinit_path()
-  tmp_dir = Script.get_tmp_dir()
-
-
-  hbase_conf_dir_prefix = "/etc/hbase"
-  hbase_conf_dir = format("{hbase_conf_dir_prefix}/conf")

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/upgrade.py
deleted file mode 100644
index 6f2e258..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/scripts/upgrade.py
+++ /dev/null
@@ -1,49 +0,0 @@
-
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-from resource_management import *
-from resource_management.core.resources.system import Execute
-from resource_management.core.shell import call
-from resource_management.libraries.functions.version import compare_versions, 
format_hdp_stack_version
-from resource_management.libraries.functions.decorator import retry
-
-def prestart(env, hdp_component):
-  import params
-
-  if params.version and 
compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
-    Execute("hdp-select set {0} {1}".format(hdp_component, params.version))
-
-def post_regionserver(env):
-  import params
-  env.set_params(params)
-
-  check_cmd = "echo 'status \"simple\"' | {0} shell".format(params.hbase_cmd)
-
-  exec_cmd = "{0} {1}".format(params.kinit_cmd, check_cmd)
-  call_and_match(exec_cmd, params.hbase_user, params.hostname.lower() + ":")
-
-
-@retry(times=15, sleep_time=2, err_class=Fail)
-def call_and_match(cmd, user, regex):
-
-  code, out = call(cmd, user=user)
-
-  if not (out and re.search(regex, out)):
-    raise Fail("Could not verify RS available")

http://git-wip-us.apache.org/repos/asf/ambari/blob/0136d893/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
 
b/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
deleted file mode 100644
index 50234f9..0000000
--- 
a/ambari-server/src/main/resources/common-services/HBASE/1.1.0.2.3/package/templates/hadoop-metrics2-hbase.properties-GANGLIA-MASTER.j2
+++ /dev/null
@@ -1,105 +0,0 @@
-{#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#}
-
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# See http://wiki.apache.org/hadoop/GangliaMetrics
-#
-# Make sure you know whether you are using ganglia 3.0 or 3.1.
-# If 3.1, you will have to patch your hadoop instance with HADOOP-4675
-# And, yes, this file is named hadoop-metrics.properties rather than
-# hbase-metrics.properties because we're leveraging the hadoop metrics
-# package and hadoop-metrics.properties is an hardcoded-name, at least
-# for the moment.
-#
-# See also http://hadoop.apache.org/hbase/docs/current/metrics.html
-
-# HBase-specific configuration to reset long-running stats (e.g. compactions)
-# If this variable is left out, then the default is no expiration.
-hbase.extendedperiod = 3600
-
-{% if has_metric_collector %}
-
-*.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
-*.sink.timeline.slave.host.name={{hostname}}
-hbase.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-hbase.period=10
-hbase.collector={{metric_collector_host}}:{{metric_collector_port}}
-
-jvm.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-jvm.period=10
-jvm.collector={{metric_collector_host}}:{{metric_collector_port}}
-
-rpc.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-rpc.period=10
-rpc.collector={{metric_collector_host}}:{{metric_collector_port}}
-
-hbase.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
-hbase.sink.timeline.period=10
-hbase.sink.timeline.collector={{metric_collector_host}}:{{metric_collector_port}}
-
-{% else %}
-
-# Configuration of the "hbase" context for ganglia
-# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
-# hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-hbase.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
-hbase.period=10
-hbase.servers={{ganglia_server_host}}:8663
-
-# Configuration of the "jvm" context for ganglia
-# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
-# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
-jvm.period=10
-jvm.servers={{ganglia_server_host}}:8663
-
-# Configuration of the "rpc" context for ganglia
-# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
-# rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext
-rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
-rpc.period=10
-rpc.servers={{ganglia_server_host}}:8663
-
-#Ganglia following hadoop example
-hbase.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31
-hbase.sink.ganglia.period=10
-
-# default for supportsparse is false
-*.sink.ganglia.supportsparse=true
-
-.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both
-.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40
-
-hbase.sink.ganglia.servers={{ganglia_server_host}}:8663
-
-{% endif %}

Reply via email to