http://git-wip-us.apache.org/repos/asf/hbase/blob/97a32318/hbase-shell/src/main/ruby/hbase/replication_admin.rb ---------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/hbase/replication_admin.rb b/hbase-shell/src/main/ruby/hbase/replication_admin.rb index b9df821..049f0c6 100644 --- a/hbase-shell/src/main/ruby/hbase/replication_admin.rb +++ b/hbase-shell/src/main/ruby/hbase/replication_admin.rb @@ -43,7 +43,7 @@ module Hbase def add_peer(id, args = {}, peer_tableCFs = nil) if args.is_a?(Hash) unless peer_tableCFs.nil? - raise(ArgumentError, "peer_tableCFs should be specified as TABLE_CFS in args") + raise(ArgumentError, 'peer_tableCFs should be specified as TABLE_CFS in args') end endpoint_classname = args.fetch(ENDPOINT_CLASSNAME, nil) @@ -51,12 +51,12 @@ module Hbase # Handle cases where custom replication endpoint and cluster key are either both provided # or neither are provided - if endpoint_classname.nil? and cluster_key.nil? - raise(ArgumentError, "Either ENDPOINT_CLASSNAME or CLUSTER_KEY must be specified.") + if endpoint_classname.nil? && cluster_key.nil? + raise(ArgumentError, 'Either ENDPOINT_CLASSNAME or CLUSTER_KEY must be specified.') end # Cluster Key is required for ReplicationPeerConfig for a custom replication endpoint - if !endpoint_classname.nil? and cluster_key.nil? + if !endpoint_classname.nil? && cluster_key.nil? cluster_key = ZKConfig.getZooKeeperClusterKey(@configuration) end @@ -81,9 +81,9 @@ module Hbase unless data.nil? # Convert Strings to Bytes for peer_data peer_data = replication_peer_config.get_peer_data - data.each{|key, val| + data.each do |key, val| peer_data.put(Bytes.to_bytes(key), Bytes.to_bytes(val)) - } + end end unless namespaces.nil? @@ -97,14 +97,14 @@ module Hbase unless table_cfs.nil? # convert table_cfs to TableName map = java.util.HashMap.new - table_cfs.each{|key, val| + table_cfs.each do |key, val| map.put(org.apache.hadoop.hbase.TableName.valueOf(key), val) - } + end replication_peer_config.set_table_cfs_map(map) end @admin.addReplicationPeer(id, replication_peer_config) else - raise(ArgumentError, "args must be a Hash") + raise(ArgumentError, 'args must be a Hash') end end @@ -116,10 +116,10 @@ module Hbase #--------------------------------------------------------------------------------------------- # Show replcated tables/column families, and their ReplicationType - def list_replicated_tables(regex = ".*") + def list_replicated_tables(regex = '.*') pattern = java.util.regex.Pattern.compile(regex) - list = @admin.listReplicatedTableCFs() - list.select {|t| pattern.match(t.getTable().getNameAsString())} + list = @admin.listReplicatedTableCFs + list.select { |t| pattern.match(t.getTable.getNameAsString) } end #---------------------------------------------------------------------------------------------- @@ -144,7 +144,7 @@ module Hbase # Show the current tableCFs config for the specified peer def show_peer_tableCFs(id) rpc = @admin.getReplicationPeerConfig(id) - ReplicationSerDeHelper.convertToString(rpc.getTableCFsMap()) + ReplicationSerDeHelper.convertToString(rpc.getTableCFsMap) end #---------------------------------------------------------------------------------------------- @@ -153,9 +153,9 @@ module Hbase unless tableCFs.nil? # convert tableCFs to TableName map = java.util.HashMap.new - tableCFs.each{|key, val| + tableCFs.each do |key, val| map.put(org.apache.hadoop.hbase.TableName.valueOf(key), val) - } + end rpc = get_peer_config(id) unless rpc.nil? rpc.setTableCFsMap(map) @@ -170,9 +170,9 @@ module Hbase unless tableCFs.nil? # convert tableCFs to TableName map = java.util.HashMap.new - tableCFs.each{|key, val| + tableCFs.each do |key, val| map.put(org.apache.hadoop.hbase.TableName.valueOf(key), val) - } + end end @admin.appendReplicationPeerTableCFs(id, map) end @@ -183,9 +183,9 @@ module Hbase unless tableCFs.nil? # convert tableCFs to TableName map = java.util.HashMap.new - tableCFs.each{|key, val| + tableCFs.each do |key, val| map.put(org.apache.hadoop.hbase.TableName.valueOf(key), val) - } + end end @admin.removeReplicationPeerTableCFs(id, map) end @@ -210,10 +210,8 @@ module Hbase unless namespaces.nil? rpc = get_peer_config(id) unless rpc.nil? - ns_set = rpc.getNamespaces() - if ns_set.nil? - ns_set = java.util.HashSet.new - end + ns_set = rpc.getNamespaces + ns_set = java.util.HashSet.new if ns_set.nil? namespaces.each do |n| ns_set.add(n) end @@ -228,7 +226,7 @@ module Hbase unless namespaces.nil? rpc = get_peer_config(id) unless rpc.nil? - ns_set = rpc.getNamespaces() + ns_set = rpc.getNamespaces unless ns_set.nil? namespaces.each do |n| ns_set.remove(n) @@ -281,14 +279,14 @@ module Hbase peers.each do |peer| map.put(peer.getPeerId, peer.getPeerConfig) end - return map + map end def get_peer_config(id) @admin.getReplicationPeerConfig(id) end - def update_peer_config(id, args={}) + def update_peer_config(id, args = {}) # Optional parameters config = args.fetch(CONFIG, nil) data = args.fetch(DATA, nil) @@ -302,9 +300,9 @@ module Hbase unless data.nil? # Convert Strings to Bytes for peer_data peer_data = replication_peer_config.get_peer_data - data.each{|key, val| + data.each do |key, val| peer_data.put(Bytes.to_bytes(key), Bytes.to_bytes(val)) - } + end end @admin.updateReplicationPeerConfig(id, replication_peer_config)
http://git-wip-us.apache.org/repos/asf/hbase/blob/97a32318/hbase-shell/src/main/ruby/hbase/rsgroup_admin.rb ---------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/hbase/rsgroup_admin.rb b/hbase-shell/src/main/ruby/hbase/rsgroup_admin.rb index 3b71062..c8ef3e9 100644 --- a/hbase-shell/src/main/ruby/hbase/rsgroup_admin.rb +++ b/hbase-shell/src/main/ruby/hbase/rsgroup_admin.rb @@ -36,21 +36,17 @@ module Hbase #-------------------------------------------------------------------------- # Returns a list of groups in hbase def list_rs_groups - @admin.listRSGroups.map { |g| g.getName } + @admin.listRSGroups.map(&:getName) end #-------------------------------------------------------------------------- # get a group's information def get_rsgroup(group_name) group = @admin.getRSGroupInfo(group_name) - if group.nil? - raise(ArgumentError, 'Group does not exist: ' + group_name) - end + raise(ArgumentError, 'Group does not exist: ' + group_name) if group.nil? res = {} - if block_given? - yield('Servers:') - end + yield('Servers:') if block_given? servers = [] group.getServers.each do |v| @@ -63,9 +59,7 @@ module Hbase res[:servers] = servers tables = [] - if block_given? - yield('Tables:') - end + yield('Tables:') if block_given? group.getTables.each do |v| if block_given? yield(v.toString) @@ -75,11 +69,7 @@ module Hbase end res[:tables] = tables - if !block_given? - res - else - nil - end + res unless block_given? end #-------------------------------------------------------------------------- @@ -113,7 +103,7 @@ module Hbase #-------------------------------------------------------------------------- # move server to a group def move_tables(dest, *args) - tables = java.util.HashSet.new; + tables = java.util.HashSet.new args[0].each do |s| tables.add(org.apache.hadoop.hbase.TableName.valueOf(s)) end @@ -124,10 +114,9 @@ module Hbase # get group of server def get_rsgroup_of_server(server) res = @admin.getRSGroupOfServer( - org.apache.hadoop.hbase.net.Address.fromString(server)) - if res.nil? - raise(ArgumentError,'Server has no group: ' + server) - end + org.apache.hadoop.hbase.net.Address.fromString(server) + ) + raise(ArgumentError, 'Server has no group: ' + server) if res.nil? res end @@ -135,10 +124,9 @@ module Hbase # get group of table def get_rsgroup_of_table(table) res = @admin.getRSGroupInfoOfTable( - org.apache.hadoop.hbase.TableName.valueOf(table)) - if res.nil? - raise(ArgumentError,'Table has no group: ' + table) - end + org.apache.hadoop.hbase.TableName.valueOf(table) + ) + raise(ArgumentError, 'Table has no group: ' + table) if res.nil? res end @@ -146,7 +134,7 @@ module Hbase # move server and table to a group def move_servers_tables(dest, *args) servers = java.util.HashSet.new - tables = java.util.HashSet.new; + tables = java.util.HashSet.new args[0].each do |s| servers.add(org.apache.hadoop.hbase.net.Address.fromString(s)) end @@ -155,6 +143,5 @@ module Hbase end @admin.moveServersAndTables(servers, tables, dest) end - end end http://git-wip-us.apache.org/repos/asf/hbase/blob/97a32318/hbase-shell/src/main/ruby/hbase/security.rb ---------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/hbase/security.rb b/hbase-shell/src/main/ruby/hbase/security.rb index 6dd1fe0..63243fa 100644 --- a/hbase-shell/src/main/ruby/hbase/security.rb +++ b/hbase-shell/src/main/ruby/hbase/security.rb @@ -26,7 +26,7 @@ module Hbase def initialize(admin) @admin = admin - @connection = @admin.getConnection() + @connection = @admin.getConnection end def close @@ -34,31 +34,33 @@ module Hbase end #---------------------------------------------------------------------------------------------- - def grant(user, permissions, table_name=nil, family=nil, qualifier=nil) + def grant(user, permissions, table_name = nil, family = nil, qualifier = nil) security_available? # TODO: need to validate user name begin # Verify that the specified permission is valid - if (permissions == nil || permissions.length == 0) - raise(ArgumentError, "Invalid permission: no actions associated with user") + if permissions.nil? || permissions.empty? + raise(ArgumentError, 'Invalid permission: no actions associated with user') end perm = org.apache.hadoop.hbase.security.access.Permission.new( - permissions.to_java_bytes) + permissions.to_java_bytes + ) - if (table_name != nil) - tablebytes=table_name.to_java_bytes - #check if the tablename passed is actually a namespace - if (isNamespace?(table_name)) + if !table_name.nil? + tablebytes = table_name.to_java_bytes + # check if the tablename passed is actually a namespace + if isNamespace?(table_name) # Namespace should exist first. namespace_name = table_name[1...table_name.length] raise(ArgumentError, "Can't find a namespace: #{namespace_name}") unless namespace_exists?(namespace_name) org.apache.hadoop.hbase.security.access.AccessControlClient.grant( - @connection, namespace_name, user, perm.getActions()) + @connection, namespace_name, user, perm.getActions + ) else # Table should exist raise(ArgumentError, "Can't find a table: #{table_name}") unless exists?(table_name) @@ -66,100 +68,106 @@ module Hbase tableName = org.apache.hadoop.hbase.TableName.valueOf(table_name) htd = @admin.getTableDescriptor(tableName) - if (family != nil) - raise(ArgumentError, "Can't find a family: #{family}") unless htd.hasFamily(family.to_java_bytes) + unless family.nil? + raise(ArgumentError, "Can't find a family: #{family}") unless htd.hasFamily(family.to_java_bytes) end - fambytes = family.to_java_bytes if (family != nil) - qualbytes = qualifier.to_java_bytes if (qualifier != nil) + fambytes = family.to_java_bytes unless family.nil? + qualbytes = qualifier.to_java_bytes unless qualifier.nil? org.apache.hadoop.hbase.security.access.AccessControlClient.grant( - @connection, tableName, user, fambytes, qualbytes, perm.getActions()) + @connection, tableName, user, fambytes, qualbytes, perm.getActions + ) end else # invoke cp endpoint to perform access controls org.apache.hadoop.hbase.security.access.AccessControlClient.grant( - @connection, user, perm.getActions()) + @connection, user, perm.getActions + ) end end end #---------------------------------------------------------------------------------------------- - def revoke(user, table_name=nil, family=nil, qualifier=nil) + def revoke(user, table_name = nil, family = nil, qualifier = nil) security_available? # TODO: need to validate user name begin - if (table_name != nil) - #check if the tablename passed is actually a namespace - if (isNamespace?(table_name)) + if !table_name.nil? + # check if the tablename passed is actually a namespace + if isNamespace?(table_name) # Namespace should exist first. namespace_name = table_name[1...table_name.length] raise(ArgumentError, "Can't find a namespace: #{namespace_name}") unless namespace_exists?(namespace_name) - tablebytes=table_name.to_java_bytes + tablebytes = table_name.to_java_bytes org.apache.hadoop.hbase.security.access.AccessControlClient.revoke( - @connection, namespace_name, user) + @connection, namespace_name, user + ) else - # Table should exist - raise(ArgumentError, "Can't find a table: #{table_name}") unless exists?(table_name) + # Table should exist + raise(ArgumentError, "Can't find a table: #{table_name}") unless exists?(table_name) - tableName = org.apache.hadoop.hbase.TableName.valueOf(table_name) - htd = @admin.getTableDescriptor(tableName) + tableName = org.apache.hadoop.hbase.TableName.valueOf(table_name) + htd = @admin.getTableDescriptor(tableName) - if (family != nil) - raise(ArgumentError, "Can't find a family: #{family}") unless htd.hasFamily(family.to_java_bytes) - end + unless family.nil? + raise(ArgumentError, "Can't find a family: #{family}") unless htd.hasFamily(family.to_java_bytes) + end - fambytes = family.to_java_bytes if (family != nil) - qualbytes = qualifier.to_java_bytes if (qualifier != nil) + fambytes = family.to_java_bytes unless family.nil? + qualbytes = qualifier.to_java_bytes unless qualifier.nil? org.apache.hadoop.hbase.security.access.AccessControlClient.revoke( - @connection, tableName, user, fambytes, qualbytes) + @connection, tableName, user, fambytes, qualbytes + ) end else perm = org.apache.hadoop.hbase.security.access.Permission.new(''.to_java_bytes) org.apache.hadoop.hbase.security.access.AccessControlClient.revoke( - @connection, user, perm.getActions()) + @connection, user, perm.getActions + ) end end end #---------------------------------------------------------------------------------------------- - def user_permission(table_regex=nil) + def user_permission(table_regex = nil) security_available? all_perms = org.apache.hadoop.hbase.security.access.AccessControlClient.getUserPermissions( - @connection,table_regex) + @connection, table_regex + ) res = {} - count = 0 + count = 0 all_perms.each do |value| - user_name = String.from_java_bytes(value.getUser) - if (table_regex != nil && isNamespace?(table_regex)) - namespace = value.getNamespace() - else - namespace = (value.getTableName != nil) ? value.getTableName.getNamespaceAsString() : value.getNamespace() - end - table = (value.getTableName != nil) ? value.getTableName.getNameAsString() : '' - family = (value.getFamily != nil) ? - org.apache.hadoop.hbase.util.Bytes::toStringBinary(value.getFamily) : - '' - qualifier = (value.getQualifier != nil) ? - org.apache.hadoop.hbase.util.Bytes::toStringBinary(value.getQualifier) : - '' - - action = org.apache.hadoop.hbase.security.access.Permission.new value.getActions - - if block_given? - yield(user_name, "#{namespace},#{table},#{family},#{qualifier}: #{action.to_s}") - else - res[user_name] ||= {} - res[user_name]["#{family}:#{qualifier}"] = action - end - count += 1 + user_name = String.from_java_bytes(value.getUser) + if !table_regex.nil? && isNamespace?(table_regex) + namespace = value.getNamespace + else + namespace = !value.getTableName.nil? ? value.getTableName.getNamespaceAsString : value.getNamespace + end + table = !value.getTableName.nil? ? value.getTableName.getNameAsString : '' + family = !value.getFamily.nil? ? + org.apache.hadoop.hbase.util.Bytes.toStringBinary(value.getFamily) : + '' + qualifier = !value.getQualifier.nil? ? + org.apache.hadoop.hbase.util.Bytes.toStringBinary(value.getQualifier) : + '' + + action = org.apache.hadoop.hbase.security.access.Permission.new value.getActions + + if block_given? + yield(user_name, "#{namespace},#{table},#{family},#{qualifier}: #{action}") + else + res[user_name] ||= {} + res[user_name]["#{family}:#{qualifier}"] = action + end + count += 1 end - return ((block_given?) ? count : res) + (block_given? ? count : res) end # Does table exist? @@ -171,15 +179,15 @@ module Hbase table_name.start_with?('@') end - # Does Namespace exist + # Does Namespace exist def namespace_exists?(namespace_name) - return @admin.getNamespaceDescriptor(namespace_name) != nil + return [email protected](namespace_name).nil? rescue org.apache.hadoop.hbase.NamespaceNotFoundException => e return false end # Make sure that security features are available - def security_available?() + def security_available? caps = [] begin # Try the getSecurityCapabilities API where supported. @@ -189,11 +197,11 @@ module Hbase rescue # If we are unable to use getSecurityCapabilities, fall back with a check for # deployment of the ACL table - raise(ArgumentError, "DISABLED: Security features are not available") unless \ + raise(ArgumentError, 'DISABLED: Security features are not available') unless \ exists?(org.apache.hadoop.hbase.security.access.AccessControlLists::ACL_TABLE_NAME) return end - raise(ArgumentError, "DISABLED: Security features are not available") unless \ + raise(ArgumentError, 'DISABLED: Security features are not available') unless \ caps.include? org.apache.hadoop.hbase.client.security.SecurityCapability::AUTHORIZATION end end http://git-wip-us.apache.org/repos/asf/hbase/blob/97a32318/hbase-shell/src/main/ruby/hbase/table.rb ---------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/hbase/table.rb b/hbase-shell/src/main/ruby/hbase/table.rb index 0dda27a..b297f58 100644 --- a/hbase-shell/src/main/ruby/hbase/table.rb +++ b/hbase-shell/src/main/ruby/hbase/table.rb @@ -32,7 +32,7 @@ module Hbase # # e.g. name = scan, adds table.scan which calls Scan.scan def self.add_shell_command(name) - self.add_command(name, name, name) + add_command(name, name, name) end # add a named command to the table instance @@ -42,18 +42,18 @@ module Hbase # shell_command - name of the command in the shell # internal_method_name - name of the method in the shell command to forward the call def self.add_command(name, shell_command, internal_method_name) - method = name.to_sym - self.class_eval do + method = name.to_sym + class_eval do define_method method do |*args| - @shell.internal_command(shell_command, internal_method_name, self, *args) - end + @shell.internal_command(shell_command, internal_method_name, self, *args) + end end end # General help for the table # class level so we can call it from anywhere def self.help - return <<-EOF + <<-EOF Help for table-reference commands. You can either create a table via 'create' and then manipulate the table via commands like 'put', 'get', etc. @@ -113,13 +113,13 @@ EOF def initialize(table, shell) @table = table - @name = @table.getName().getNameAsString() + @name = @table.getName.getNameAsString @shell = shell - @converters = Hash.new() + @converters = {} end - def close() - @table.close() + def close + @table.close end # Note the below methods are prefixed with '_' to hide them from the average user, as @@ -131,21 +131,21 @@ EOF p = org.apache.hadoop.hbase.client.Put.new(row.to_s.to_java_bytes) family, qualifier = parse_column_name(column) if args.any? - attributes = args[ATTRIBUTES] - set_attributes(p, attributes) if attributes - visibility = args[VISIBILITY] - set_cell_visibility(p, visibility) if visibility - ttl = args[TTL] - set_op_ttl(p, ttl) if ttl - end - #Case where attributes are specified without timestamp - if timestamp.kind_of?(Hash) - timestamp.each do |k, v| + attributes = args[ATTRIBUTES] + set_attributes(p, attributes) if attributes + visibility = args[VISIBILITY] + set_cell_visibility(p, visibility) if visibility + ttl = args[TTL] + set_op_ttl(p, ttl) if ttl + end + # Case where attributes are specified without timestamp + if timestamp.is_a?(Hash) + timestamp.each do |k, v| if k == 'ATTRIBUTES' set_attributes(p, v) elsif k == 'VISIBILITY' set_cell_visibility(p, v) - elsif k == "TTL" + elsif k == 'TTL' set_op_ttl(p, v) end end @@ -162,36 +162,36 @@ EOF #---------------------------------------------------------------------------------------------- # Create a Delete mutation def _createdelete_internal(row, column = nil, - timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP, args = {}) + timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP, args = {}) temptimestamp = timestamp - if temptimestamp.kind_of?(Hash) + if temptimestamp.is_a?(Hash) timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP end d = org.apache.hadoop.hbase.client.Delete.new(row.to_s.to_java_bytes, timestamp) - if temptimestamp.kind_of?(Hash) - temptimestamp.each do |k, v| - if v.kind_of?(String) + if temptimestamp.is_a?(Hash) + temptimestamp.each do |_k, v| + if v.is_a?(String) set_cell_visibility(d, v) if v end end end if args.any? - visibility = args[VISIBILITY] - set_cell_visibility(d, visibility) if visibility + visibility = args[VISIBILITY] + set_cell_visibility(d, visibility) if visibility end if column family, qualifier = parse_column_name(column) d.addColumns(family, qualifier, timestamp) end - return d + d end #---------------------------------------------------------------------------------------------- # Delete rows using prefix def _deleterows_internal(row, column = nil, - timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP, args={}) - cache = row["CACHE"] ? row["CACHE"] : 100 - prefix = row["ROWPREFIXFILTER"] + timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP, args = {}) + cache = row['CACHE'] ? row['CACHE'] : 100 + prefix = row['ROWPREFIXFILTER'] # create scan to get table names using prefix scan = org.apache.hadoop.hbase.client.Scan.new @@ -204,7 +204,7 @@ EOF iter = scanner.iterator while iter.hasNext row = iter.next - key = org.apache.hadoop.hbase.util.Bytes::toStringBinary(row.getRow) + key = org.apache.hadoop.hbase.util.Bytes.toStringBinary(row.getRow) d = _createdelete_internal(key, column, timestamp, args) list.add(d) if list.size >= cache @@ -218,20 +218,20 @@ EOF #---------------------------------------------------------------------------------------------- # Delete a cell def _delete_internal(row, column, - timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP, args = {}) + timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP, args = {}) _deleteall_internal(row, column, timestamp, args) end #---------------------------------------------------------------------------------------------- # Delete a row def _deleteall_internal(row, column = nil, - timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP, args = {}) + timestamp = org.apache.hadoop.hbase.HConstants::LATEST_TIMESTAMP, args = {}) # delete operation doesn't need read permission. Retaining the read check for # meta table as a part of HBASE-5837. if is_meta_table? - raise ArgumentError, "Row Not Found" if _get_internal(row).nil? + raise ArgumentError, 'Row Not Found' if _get_internal(row).nil? end - if row.kind_of?(Hash) + if row.is_a?(Hash) _deleterows_internal(row, column, timestamp, args) else d = _createdelete_internal(row, column, timestamp, args) @@ -241,19 +241,17 @@ EOF #---------------------------------------------------------------------------------------------- # Increment a counter atomically - def _incr_internal(row, column, value = nil, args={}) - if value.kind_of?(Hash) - value = 1 - end + def _incr_internal(row, column, value = nil, args = {}) + value = 1 if value.is_a?(Hash) value ||= 1 incr = org.apache.hadoop.hbase.client.Increment.new(row.to_s.to_java_bytes) family, qualifier = parse_column_name(column) if qualifier.nil? - raise ArgumentError, "Failed to provide both column family and column qualifier for incr" + raise ArgumentError, 'Failed to provide both column family and column qualifier for incr' end if args.any? - attributes = args[ATTRIBUTES] - visibility = args[VISIBILITY] + attributes = args[ATTRIBUTES] + visibility = args[VISIBILITY] set_attributes(incr, attributes) if attributes set_cell_visibility(incr, visibility) if visibility ttl = args[TTL] @@ -265,21 +263,21 @@ EOF # Fetch cell value cell = result.listCells[0] - org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValueArray, - cell.getValueOffset, cell.getValueLength) + org.apache.hadoop.hbase.util.Bytes.toLong(cell.getValueArray, + cell.getValueOffset, cell.getValueLength) end #---------------------------------------------------------------------------------------------- # appends the value atomically - def _append_internal(row, column, value, args={}) + def _append_internal(row, column, value, args = {}) append = org.apache.hadoop.hbase.client.Append.new(row.to_s.to_java_bytes) family, qualifier = parse_column_name(column) if qualifier.nil? - raise ArgumentError, "Failed to provide both column family and column qualifier for append" + raise ArgumentError, 'Failed to provide both column family and column qualifier for append' end if args.any? - attributes = args[ATTRIBUTES] - visibility = args[VISIBILITY] + attributes = args[ATTRIBUTES] + visibility = args[VISIBILITY] set_attributes(append, attributes) if attributes set_cell_visibility(append, visibility) if visibility ttl = args[TTL] @@ -291,31 +289,30 @@ EOF # Fetch cell value cell = result.listCells[0] - org.apache.hadoop.hbase.util.Bytes::toStringBinary(cell.getValueArray, - cell.getValueOffset, cell.getValueLength) + org.apache.hadoop.hbase.util.Bytes.toStringBinary(cell.getValueArray, + cell.getValueOffset, cell.getValueLength) end #---------------------------------------------------------------------------------------------- # Count rows in a table def _count_internal(interval = 1000, scan = nil) - - raise(ArgumentError, "Scan argument should be org.apache.hadoop.hbase.client.Scan") \ - unless scan == nil || scan.kind_of?(org.apache.hadoop.hbase.client.Scan) + raise(ArgumentError, 'Scan argument should be org.apache.hadoop.hbase.client.Scan') \ + unless scan.nil? || scan.is_a?(org.apache.hadoop.hbase.client.Scan) # We can safely set scanner caching with the first key only filter - if scan == nil + if scan.nil? scan = org.apache.hadoop.hbase.client.Scan.new scan.setCacheBlocks(false) scan.setCaching(10) scan.setFilter(org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter.new) else scan.setCacheBlocks(false) - filter = scan.getFilter() + filter = scan.getFilter firstKeyOnlyFilter = org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter.new - if filter == nil + if filter.nil? scan.setFilter(firstKeyOnlyFilter) else - firstKeyOnlyFilter.setReversed(filter.isReversed()) + firstKeyOnlyFilter.setReversed(filter.isReversed) scan.setFilter(org.apache.hadoop.hbase.filter.FilterList.new(filter, firstKeyOnlyFilter)) end end @@ -329,15 +326,15 @@ EOF while iter.hasNext row = iter.next count += 1 - next unless (block_given? && count % interval == 0) + next unless block_given? && count % interval == 0 # Allow command modules to visualize counting process yield(count, - org.apache.hadoop.hbase.util.Bytes::toStringBinary(row.getRow)) + org.apache.hadoop.hbase.util.Bytes.toStringBinary(row.getRow)) end - scanner.close() + scanner.close # Return the counter - return count + count end #---------------------------------------------------------------------------------------------- @@ -346,19 +343,19 @@ EOF get = org.apache.hadoop.hbase.client.Get.new(row.to_s.to_java_bytes) maxlength = -1 count = 0 - @converters.clear() + @converters.clear # Normalize args - args = args.first if args.first.kind_of?(Hash) - if args.kind_of?(String) || args.kind_of?(Array) - columns = [ args ].flatten.compact + args = args.first if args.first.is_a?(Hash) + if args.is_a?(String) || args.is_a?(Array) + columns = [args].flatten.compact args = { COLUMNS => columns } end # # Parse arguments # - unless args.kind_of?(Hash) + unless args.is_a?(Hash) raise ArgumentError, "Failed parse of #{args.inspect}, #{args.class}" end @@ -373,17 +370,17 @@ EOF converter_class = args.delete(FORMATTER_CLASS) || 'org.apache.hadoop.hbase.util.Bytes' unless args.empty? columns = args[COLUMN] || args[COLUMNS] - if args[VERSIONS] - vers = args[VERSIONS] - else - vers = 1 - end + vers = if args[VERSIONS] + args[VERSIONS] + else + 1 + end if columns # Normalize types, convert string to an array of strings - columns = [ columns ] if columns.is_a?(String) + columns = [columns] if columns.is_a?(String) # At this point it is either an array or some unsupported stuff - unless columns.kind_of?(Array) + unless columns.is_a?(Array) raise ArgumentError, "Failed parse column argument type #{args.inspect}, #{args.class}" end @@ -403,14 +400,14 @@ EOF get.setTimeRange(args[TIMERANGE][0], args[TIMERANGE][1]) if args[TIMERANGE] else if attributes - set_attributes(get, attributes) + set_attributes(get, attributes) elsif authorizations - set_authorizations(get, authorizations) + set_authorizations(get, authorizations) else - # May have passed TIMESTAMP and row only; wants all columns from ts. - unless ts = args[TIMESTAMP] || tr = args[TIMERANGE] - raise ArgumentError, "Failed parse of #{args.inspect}, #{args.class}" - end + # May have passed TIMESTAMP and row only; wants all columns from ts. + unless ts = args[TIMESTAMP] || tr = args[TIMERANGE] + raise ArgumentError, "Failed parse of #{args.inspect}, #{args.class}" + end end get.setMaxVersions(vers) @@ -422,11 +419,12 @@ EOF set_authorizations(get, authorizations) if authorizations end - unless filter.class == String - get.setFilter(filter) - else + if filter.class == String get.setFilter( - org.apache.hadoop.hbase.filter.ParseFilter.new.parseFilterString(filter.to_java_bytes)) + org.apache.hadoop.hbase.filter.ParseFilter.new.parseFilterString(filter.to_java_bytes) + ) + else + get.setFilter(filter) end get.setConsistency(org.apache.hadoop.hbase.client.Consistency.valueOf(consistency)) if consistency @@ -444,9 +442,9 @@ EOF res = {} result.listCells.each do |c| family = convert_bytes_with_position(c.getFamilyArray, - c.getFamilyOffset, c.getFamilyLength, converter_class, converter) + c.getFamilyOffset, c.getFamilyLength, converter_class, converter) qualifier = convert_bytes_with_position(c.getQualifierArray, - c.getQualifierOffset, c.getQualifierLength, converter_class, converter) + c.getQualifierOffset, c.getQualifierLength, converter_class, converter) column = "#{family}:#{qualifier}" value = to_string(column, c, maxlength, converter_class, converter) @@ -459,7 +457,7 @@ EOF end # If block given, we've yielded all the results, otherwise just return them - return ((block_given?) ? [count, is_stale]: res) + (block_given? ? [count, is_stale] : res) end #---------------------------------------------------------------------------------------------- @@ -477,48 +475,48 @@ EOF # Fetch cell value cell = result.listCells[0] - org.apache.hadoop.hbase.util.Bytes::toLong(cell.getValueArray, - cell.getValueOffset, cell.getValueLength) + org.apache.hadoop.hbase.util.Bytes.toLong(cell.getValueArray, + cell.getValueOffset, cell.getValueLength) end def _hash_to_scan(args) if args.any? - enablemetrics = args["ALL_METRICS"].nil? ? false : args["ALL_METRICS"] - enablemetrics = enablemetrics || !args["METRICS"].nil? - filter = args["FILTER"] - startrow = args["STARTROW"] || '' - stoprow = args["STOPROW"] - rowprefixfilter = args["ROWPREFIXFILTER"] - timestamp = args["TIMESTAMP"] - columns = args["COLUMNS"] || args["COLUMN"] || [] + enablemetrics = args['ALL_METRICS'].nil? ? false : args['ALL_METRICS'] + enablemetrics ||= !args['METRICS'].nil? + filter = args['FILTER'] + startrow = args['STARTROW'] || '' + stoprow = args['STOPROW'] + rowprefixfilter = args['ROWPREFIXFILTER'] + timestamp = args['TIMESTAMP'] + columns = args['COLUMNS'] || args['COLUMN'] || [] # If CACHE_BLOCKS not set, then default 'true'. - cache_blocks = args["CACHE_BLOCKS"].nil? ? true: args["CACHE_BLOCKS"] - cache = args["CACHE"] || 0 - reversed = args["REVERSED"] || false - versions = args["VERSIONS"] || 1 + cache_blocks = args['CACHE_BLOCKS'].nil? ? true : args['CACHE_BLOCKS'] + cache = args['CACHE'] || 0 + reversed = args['REVERSED'] || false + versions = args['VERSIONS'] || 1 timerange = args[TIMERANGE] - raw = args["RAW"] || false + raw = args['RAW'] || false attributes = args[ATTRIBUTES] authorizations = args[AUTHORIZATIONS] consistency = args[CONSISTENCY] # Normalize column names columns = [columns] if columns.class == String - limit = args["LIMIT"] || -1 - unless columns.kind_of?(Array) - raise ArgumentError.new("COLUMNS must be specified as a String or an Array") + limit = args['LIMIT'] || -1 + unless columns.is_a?(Array) + raise ArgumentError, 'COLUMNS must be specified as a String or an Array' end scan = if stoprow - org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes, stoprow.to_java_bytes) - else - org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes) + org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes, stoprow.to_java_bytes) + else + org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes) end # This will overwrite any startrow/stoprow settings scan.setRowPrefixFilter(rowprefixfilter.to_java_bytes) if rowprefixfilter # Clear converters from last scan. - @converters.clear() + @converters.clear columns.each do |c| family, qualifier = parse_column_name(c.to_s) @@ -529,11 +527,12 @@ EOF end end - unless filter.class == String - scan.setFilter(filter) - else + if filter.class == String scan.setFilter( - org.apache.hadoop.hbase.filter.ParseFilter.new.parseFilterString(filter.to_java_bytes)) + org.apache.hadoop.hbase.filter.ParseFilter.new.parseFilterString(filter.to_java_bytes) + ) + else + scan.setFilter(filter) end scan.setScanMetricsEnabled(enablemetrics) if enablemetrics @@ -562,19 +561,19 @@ EOF #---------------------------------------------------------------------------------------------- # Scans whole table or a range of keys and returns rows matching specific criteria def _scan_internal(args = {}, scan = nil) - raise(ArgumentError, "Args should be a Hash") unless args.kind_of?(Hash) - raise(ArgumentError, "Scan argument should be org.apache.hadoop.hbase.client.Scan") \ - unless scan == nil || scan.kind_of?(org.apache.hadoop.hbase.client.Scan) + raise(ArgumentError, 'Args should be a Hash') unless args.is_a?(Hash) + raise(ArgumentError, 'Scan argument should be org.apache.hadoop.hbase.client.Scan') \ + unless scan.nil? || scan.is_a?(org.apache.hadoop.hbase.client.Scan) - limit = args["LIMIT"] || -1 - maxlength = args.delete("MAXLENGTH") || -1 + limit = args['LIMIT'] || -1 + maxlength = args.delete('MAXLENGTH') || -1 converter = args.delete(FORMATTER) || nil converter_class = args.delete(FORMATTER_CLASS) || 'org.apache.hadoop.hbase.util.Bytes' count = 0 res = {} # Start the scanner - scan = scan == nil ? _hash_to_scan(args) : scan + scan = scan.nil? ? _hash_to_scan(args) : scan scanner = @table.getScanner(scan) iter = scanner.iterator @@ -586,9 +585,9 @@ EOF row.listCells.each do |c| family = convert_bytes_with_position(c.getFamilyArray, - c.getFamilyOffset, c.getFamilyLength, converter_class, converter) + c.getFamilyOffset, c.getFamilyLength, converter_class, converter) qualifier = convert_bytes_with_position(c.getQualifierArray, - c.getQualifierOffset, c.getQualifierLength, converter_class, converter) + c.getQualifierOffset, c.getQualifierLength, converter_class, converter) column = "#{family}:#{qualifier}" cell = to_string(column, c, maxlength, converter_class, converter) @@ -609,25 +608,26 @@ EOF end end - scanner.close() - return ((block_given?) ? [count, is_stale] : res) + scanner.close + (block_given? ? [count, is_stale] : res) end - # Apply OperationAttributes to puts/scans/gets + # Apply OperationAttributes to puts/scans/gets def set_attributes(oprattr, attributes) - raise(ArgumentError, "Attributes must be a Hash type") unless attributes.kind_of?(Hash) - for k,v in attributes + raise(ArgumentError, 'Attributes must be a Hash type') unless attributes.is_a?(Hash) + for k, v in attributes v = v.to_s unless v.nil? oprattr.setAttribute(k.to_s, v.to_java_bytes) end end def set_cell_permissions(op, permissions) - raise(ArgumentError, "Permissions must be a Hash type") unless permissions.kind_of?(Hash) + raise(ArgumentError, 'Permissions must be a Hash type') unless permissions.is_a?(Hash) map = java.util.HashMap.new - permissions.each do |user,perms| + permissions.each do |user, perms| map.put(user.to_s, org.apache.hadoop.hbase.security.access.Permission.new( - perms.to_java_bytes)) + perms.to_java_bytes + )) end op.setACL(map) end @@ -635,15 +635,19 @@ EOF def set_cell_visibility(oprattr, visibility) oprattr.setCellVisibility( org.apache.hadoop.hbase.security.visibility.CellVisibility.new( - visibility.to_s)) + visibility.to_s + ) + ) end def set_authorizations(oprattr, authorizations) - raise(ArgumentError, "Authorizations must be a Array type") unless authorizations.kind_of?(Array) - auths = [ authorizations ].flatten.compact + raise(ArgumentError, 'Authorizations must be a Array type') unless authorizations.is_a?(Array) + auths = [authorizations].flatten.compact oprattr.setAuthorizations( org.apache.hadoop.hbase.security.visibility.Authorizations.new( - auths.to_java(:string))) + auths.to_java(:string) + ) + ) end def set_op_ttl(op, ttl) @@ -664,14 +668,14 @@ EOF end end - #Add the following admin utilities to the table + # Add the following admin utilities to the table add_admin_utils :enable, :disable, :flush, :drop, :describe, :snapshot #---------------------------- - #give the general help for the table + # give the general help for the table # or the named command - def help (command = nil) - #if there is a command, get the per-command help from the shell + def help(command = nil) + # if there is a command, get the per-command help from the shell if command begin return @shell.help_command(command) @@ -680,13 +684,13 @@ EOF return nil end end - return @shell.help('table_help') + @shell.help('table_help') end # Table to string def to_s - cl = self.class() - return "#{cl} - #{@name}" + cl = self.class + "#{cl} - #{@name}" end # Standard ruby call to get the return value for an object @@ -707,51 +711,51 @@ EOF # Checks if current table is one of the 'meta' tables def is_meta_table? - org.apache.hadoop.hbase.TableName::META_TABLE_NAME.equals(@table.getName()) + org.apache.hadoop.hbase.TableName::META_TABLE_NAME.equals(@table.getName) end # Returns family and (when has it) qualifier for a column name def parse_column_name(column) split = org.apache.hadoop.hbase.KeyValue.parseColumn(column.to_java_bytes) set_converter(split) if split.length > 1 - return split[0], (split.length > 1) ? split[1] : nil + [split[0], split.length > 1 ? split[1] : nil] end # Make a String of the passed kv # Intercept cells whose format we know such as the info:regioninfo in hbase:meta - def to_string(column, kv, maxlength = -1, converter_class = nil, converter=nil) + def to_string(column, kv, maxlength = -1, converter_class = nil, converter = nil) if is_meta_table? - if column == 'info:regioninfo' or column == 'info:splitA' or column == 'info:splitB' + if column == 'info:regioninfo' || column == 'info:splitA' || column == 'info:splitB' hri = org.apache.hadoop.hbase.HRegionInfo.parseFromOrNull(kv.getValueArray, - kv.getValueOffset, kv.getValueLength) - return "timestamp=%d, value=%s" % [kv.getTimestamp, hri.toString] + kv.getValueOffset, kv.getValueLength) + return format('timestamp=%d, value=%s', kv.getTimestamp, hri.toString) end if column == 'info:serverstartcode' if kv.getValueLength > 0 - str_val = org.apache.hadoop.hbase.util.Bytes.toLong(kv.getValueArray, - kv.getValueOffset, kv.getValueLength) + str_val = org.apache.hadoop.hbase.util.Bytes.toLong(kv.getValueArray, + kv.getValueOffset, kv.getValueLength) else str_val = org.apache.hadoop.hbase.util.Bytes.toStringBinary(kv.getValueArray, - kv.getValueOffset, kv.getValueLength) + kv.getValueOffset, kv.getValueLength) end - return "timestamp=%d, value=%s" % [kv.getTimestamp, str_val] + return format('timestamp=%d, value=%s', kv.getTimestamp, str_val) end end if kv.isDelete - val = "timestamp=#{kv.getTimestamp}, type=#{org.apache.hadoop.hbase.KeyValue::Type::codeToType(kv.getType)}" + val = "timestamp=#{kv.getTimestamp}, type=#{org.apache.hadoop.hbase.KeyValue::Type.codeToType(kv.getType)}" else val = "timestamp=#{kv.getTimestamp}, value=#{convert(column, kv, converter_class, converter)}" end - (maxlength != -1) ? val[0, maxlength] : val + maxlength != -1 ? val[0, maxlength] : val end - def convert(column, kv, converter_class='org.apache.hadoop.hbase.util.Bytes', converter='toStringBinary') - #use org.apache.hadoop.hbase.util.Bytes as the default class + def convert(column, kv, converter_class = 'org.apache.hadoop.hbase.util.Bytes', converter = 'toStringBinary') + # use org.apache.hadoop.hbase.util.Bytes as the default class converter_class = 'org.apache.hadoop.hbase.util.Bytes' unless converter_class - #use org.apache.hadoop.hbase.util.Bytes::toStringBinary as the default convertor + # use org.apache.hadoop.hbase.util.Bytes::toStringBinary as the default convertor converter = 'toStringBinary' unless converter - if @converters.has_key?(column) + if @converters.key?(column) # lookup the CONVERTER for certain column - "cf:qualifier" matches = /c\((.+)\)\.(.+)/.match(@converters[column]) if matches.nil? @@ -767,7 +771,7 @@ EOF convert_bytes(org.apache.hadoop.hbase.CellUtil.cloneValue(kv), klazz_name, converter) end - def convert_bytes(bytes, converter_class=nil, converter_method=nil) + def convert_bytes(bytes, converter_class = nil, converter_method = nil) convert_bytes_with_position(bytes, 0, bytes.length, converter_class, converter_method) end @@ -792,14 +796,14 @@ EOF #---------------------------------------------------------------------------------------------- # Get the split points for the table - def _get_splits_internal() - locator = @table.getRegionLocator() - splits = locator.getAllRegionLocations(). - map{|i| Bytes.toStringBinary(i.getRegionInfo().getStartKey)}.delete_if{|k| k == ""} - locator.close() - puts("Total number of splits = %s" % [splits.size + 1]) + def _get_splits_internal + locator = @table.getRegionLocator + splits = locator.getAllRegionLocations + .map { |i| Bytes.toStringBinary(i.getRegionInfo.getStartKey) }.delete_if { |k| k == '' } + locator.close + puts(format('Total number of splits = %s', splits.size + 1)) puts splits - return splits + splits end end end http://git-wip-us.apache.org/repos/asf/hbase/blob/97a32318/hbase-shell/src/main/ruby/hbase/taskmonitor.rb ---------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/hbase/taskmonitor.rb b/hbase-shell/src/main/ruby/hbase/taskmonitor.rb index d312558..849752b 100644 --- a/hbase-shell/src/main/ruby/hbase/taskmonitor.rb +++ b/hbase-shell/src/main/ruby/hbase/taskmonitor.rb @@ -21,8 +21,8 @@ include Java # Add the $HBASE_HOME/lib directory to the ruby load_path to load jackson -if File.exists?(File.join(File.dirname(__FILE__), "..", "lib")) - $LOAD_PATH.unshift File.join(File.dirname(__FILE__), "..", "lib") +if File.exist?(File.join(File.dirname(__FILE__), '..', 'lib')) + $LOAD_PATH.unshift File.join(File.dirname(__FILE__), '..', 'lib') end module Hbase @@ -32,34 +32,29 @@ module Hbase #--------------------------------------------------------------------------------------------- # Represents information reported by a server on a single MonitoredTask class Task - - def initialize(taskMap,host) - - taskMap.each_pair do |k,v| + def initialize(taskMap, host) + taskMap.each_pair do |k, v| case k - when "statustimems" - @statustime = Time.at(v/1000) - when "status" - @status = v - when "starttimems" - @starttime = Time.at(v/1000) - when "description" - @description = v - when "state" - @state = v + when 'statustimems' + @statustime = Time.at(v / 1000) + when 'status' + @status = v + when 'starttimems' + @starttime = Time.at(v / 1000) + when 'description' + @description = v + when 'state' + @state = v end end @host = host - end def statustime # waiting IPC handlers often have statustime = -1, in this case return starttime - if @statustime > Time.at(-1) - return @statustime - end - return @starttime + return @statustime if @statustime > Time.at(-1) + @starttime end attr_reader :host @@ -67,120 +62,111 @@ module Hbase attr_reader :starttime attr_reader :description attr_reader :state - end - def initialize(configuration) @conf = configuration @conn = org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(@conf) - @admin = @conn.getAdmin() + @admin = @conn.getAdmin end #--------------------------------------------------------------------------------------------------- # Returns a filtered list of tasks on the given host - def tasksOnHost(filter,host) - + def tasksOnHost(filter, host) java_import 'java.net.URL' java_import 'org.codehaus.jackson.map.ObjectMapper' - infoport = @admin.getClusterStatus().getLoad(host).getInfoServerPort().to_s + infoport = @admin.getClusterStatus.getLoad(host).getInfoServerPort.to_s # Note: This condition use constants from hbase-server - #if ([email protected]().getBoolean(org.apache.hadoop.hbase.http.ServerConfigurationKeys::HBASE_SSL_ENABLED_KEY, + # if ([email protected]().getBoolean(org.apache.hadoop.hbase.http.ServerConfigurationKeys::HBASE_SSL_ENABLED_KEY, # org.apache.hadoop.hbase.http.ServerConfigurationKeys::HBASE_SSL_ENABLED_DEFAULT)) # schema = "http://" - #else + # else # schema = "https://" - #end - schema = "http://" - url = schema + host.hostname + ":" + infoport + "/rs-status?format=json&filter=" + filter + # end + schema = 'http://' + url = schema + host.hostname + ':' + infoport + '/rs-status?format=json&filter=' + filter json = URL.new(url) mapper = ObjectMapper.new # read and parse JSON - tasksArrayList = mapper.readValue(json,java.lang.Object.java_class) + tasksArrayList = mapper.readValue(json, java.lang.Object.java_class) # convert to an array of TaskMonitor::Task instances - tasks = Array.new + tasks = [] tasksArrayList.each do |t| - tasks.unshift Task.new(t,host) + tasks.unshift Task.new(t, host) end - return tasks - + tasks end #--------------------------------------------------------------------------------------------------- # Prints a table of filtered tasks on requested hosts - def tasks(filter,hosts) - + def tasks(filter, hosts) # put all tasks on all requested hosts in the same list tasks = [] hosts.each do |host| - tasks.concat(tasksOnHost(filter,host)) + tasks.concat(tasksOnHost(filter, host)) end - puts("%d tasks as of: %s" % [tasks.size,Time.now.strftime("%Y-%m-%d %H:%M:%S")]) + puts(format('%d tasks as of: %s', tasks.size, Time.now.strftime('%Y-%m-%d %H:%M:%S'))) - if tasks.size() == 0 - puts("No " + filter + " tasks currently running.") + if tasks.empty? + puts('No ' + filter + ' tasks currently running.') else # determine table width longestStatusWidth = 0 longestDescriptionWidth = 0 tasks.each do |t| - longestStatusWidth = [longestStatusWidth,t.status.length].max - longestDescriptionWidth = [longestDescriptionWidth,t.description.length].max + longestStatusWidth = [longestStatusWidth, t.status.length].max + longestDescriptionWidth = [longestDescriptionWidth, t.description.length].max end # set the maximum character width of each column, without padding hostWidth = 15 startTimeWidth = 19 stateWidth = 8 - descriptionWidth = [32,longestDescriptionWidth].min - statusWidth = [36,longestStatusWidth + 27].min + descriptionWidth = [32, longestDescriptionWidth].min + statusWidth = [36, longestStatusWidth + 27].min - rowSeparator = "+" + "-" * (hostWidth + 2) + - "+" + "-" * (startTimeWidth + 2) + - "+" + "-" * (stateWidth + 2) + - "+" + "-" * (descriptionWidth + 2) + - "+" + "-" * (statusWidth + 2) + "+" + rowSeparator = '+' + '-' * (hostWidth + 2) + + '+' + '-' * (startTimeWidth + 2) + + '+' + '-' * (stateWidth + 2) + + '+' + '-' * (descriptionWidth + 2) + + '+' + '-' * (statusWidth + 2) + '+' # print table header - cells = [setCellWidth("Host",hostWidth), - setCellWidth("Start Time",startTimeWidth), - setCellWidth("State",stateWidth), - setCellWidth("Description",descriptionWidth), - setCellWidth("Status",statusWidth)] + cells = [setCellWidth('Host', hostWidth), + setCellWidth('Start Time', startTimeWidth), + setCellWidth('State', stateWidth), + setCellWidth('Description', descriptionWidth), + setCellWidth('Status', statusWidth)] - line = "| %s | %s | %s | %s | %s |" % cells + line = format('| %s | %s | %s | %s | %s |', cells) puts(rowSeparator) puts(line) # print table content tasks.each do |t| + cells = [setCellWidth(t.host.hostname, hostWidth), + setCellWidth(t.starttime.strftime('%Y-%m-%d %H:%M:%S'), startTimeWidth), + setCellWidth(t.state, stateWidth), + setCellWidth(t.description, descriptionWidth), + setCellWidth(format('%s (since %d seconds ago)', t.status, Time.now - t.statustime), statusWidth)] - cells = [setCellWidth(t.host.hostname,hostWidth), - setCellWidth(t.starttime.strftime("%Y-%m-%d %H:%M:%S"),startTimeWidth), - setCellWidth(t.state,stateWidth), - setCellWidth(t.description,descriptionWidth), - setCellWidth("%s (since %d seconds ago)" % - [t.status,Time.now - t.statustime], statusWidth)] - - line = "| %s | %s | %s | %s | %s |" % cells + line = format('| %s | %s | %s | %s | %s |', cells) puts(rowSeparator) puts(line) - end puts(rowSeparator) end - end #--------------------------------------------------------------------------------------------------- @@ -189,16 +175,15 @@ module Hbase # # right-pad with spaces or truncate with ellipses to match passed width - def setCellWidth(cellContent,width) - numCharsTooShort = width-cellContent.length + def setCellWidth(cellContent, width) + numCharsTooShort = width - cellContent.length if numCharsTooShort < 0 # cellContent is too long, so truncate - return cellContent[0,[width-3,0].max] + "." * [3,width].min + return cellContent[0, [width - 3, 0].max] + '.' * [3, width].min else # cellContent is requested width or too short, so right-pad with zero or more spaces - return cellContent + " " * numCharsTooShort + return cellContent + ' ' * numCharsTooShort end end - end end http://git-wip-us.apache.org/repos/asf/hbase/blob/97a32318/hbase-shell/src/main/ruby/hbase/visibility_labels.rb ---------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/hbase/visibility_labels.rb b/hbase-shell/src/main/ruby/hbase/visibility_labels.rb index 8e6c93c..e2ae2ca 100644 --- a/hbase-shell/src/main/ruby/hbase/visibility_labels.rb +++ b/hbase-shell/src/main/ruby/hbase/visibility_labels.rb @@ -23,10 +23,9 @@ java_import org.apache.hadoop.hbase.util.Bytes module Hbase class VisibilityLabelsAdmin - def initialize(admin) @admin = admin - @connection = @admin.getConnection() + @connection = @admin.getConnection end def close @@ -36,53 +35,43 @@ module Hbase def add_labels(*args) visibility_feature_available? # Normalize args - if args.kind_of?(Array) - labels = [ args ].flatten.compact - end - if labels.size() == 0 - raise(ArgumentError, "Arguments cannot be null") - end + labels = [args].flatten.compact if args.is_a?(Array) + raise(ArgumentError, 'Arguments cannot be null') if labels.empty? begin response = VisibilityClient.addLabels(@connection, labels.to_java(:string)) if response.nil? - raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + raise(ArgumentError, 'DISABLED: Visibility labels feature is not available') end - labelsWithException = "" - list = response.getResultList() + labelsWithException = '' + list = response.getResultList list.each do |result| - if result.hasException() - labelsWithException += Bytes.toString(result.getException().getValue().toByteArray()) - end - end - if labelsWithException.length > 0 - raise(ArgumentError, labelsWithException) - end + if result.hasException + labelsWithException += Bytes.toString(result.getException.getValue.toByteArray) + end + end + raise(ArgumentError, labelsWithException) unless labelsWithException.empty? end end def set_auths(user, *args) visibility_feature_available? # Normalize args - if args.kind_of?(Array) - auths = [ args ].flatten.compact - end + auths = [args].flatten.compact if args.is_a?(Array) begin response = VisibilityClient.setAuths(@connection, auths.to_java(:string), user) if response.nil? - raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + raise(ArgumentError, 'DISABLED: Visibility labels feature is not available') end - labelsWithException = "" - list = response.getResultList() + labelsWithException = '' + list = response.getResultList list.each do |result| - if result.hasException() - labelsWithException += Bytes.toString(result.getException().getValue().toByteArray()) - end - end - if labelsWithException.length > 0 - raise(ArgumentError, labelsWithException) + if result.hasException + labelsWithException += Bytes.toString(result.getException.getValue.toByteArray) + end end + raise(ArgumentError, labelsWithException) unless labelsWithException.empty? end end @@ -91,18 +80,18 @@ module Hbase begin response = VisibilityClient.getAuths(@connection, user) if response.nil? - raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + raise(ArgumentError, 'DISABLED: Visibility labels feature is not available') end return response.getAuthList end end - def list_labels(regex = ".*") + def list_labels(regex = '.*') visibility_feature_available? begin response = VisibilityClient.listLabels(@connection, regex) if response.nil? - raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + raise(ArgumentError, 'DISABLED: Visibility labels feature is not available') end return response.getLabelList end @@ -111,30 +100,26 @@ module Hbase def clear_auths(user, *args) visibility_feature_available? # Normalize args - if args.kind_of?(Array) - auths = [ args ].flatten.compact - end + auths = [args].flatten.compact if args.is_a?(Array) begin response = VisibilityClient.clearAuths(@connection, auths.to_java(:string), user) if response.nil? - raise(ArgumentError, "DISABLED: Visibility labels feature is not available") + raise(ArgumentError, 'DISABLED: Visibility labels feature is not available') end - labelsWithException = "" - list = response.getResultList() + labelsWithException = '' + list = response.getResultList list.each do |result| - if result.hasException() - labelsWithException += Bytes.toString(result.getException().getValue().toByteArray()) - end - end - if labelsWithException.length > 0 - raise(ArgumentError, labelsWithException) + if result.hasException + labelsWithException += Bytes.toString(result.getException.getValue.toByteArray) + end end + raise(ArgumentError, labelsWithException) unless labelsWithException.empty? end end # Make sure that lables table is available - def visibility_feature_available?() + def visibility_feature_available? caps = [] begin # Try the getSecurityCapabilities API where supported. @@ -142,11 +127,11 @@ module Hbase rescue # If we are unable to use getSecurityCapabilities, fall back with a check for # deployment of the labels table - raise(ArgumentError, "DISABLED: Visibility labels feature is not available") unless \ + raise(ArgumentError, 'DISABLED: Visibility labels feature is not available') unless \ exists?(VisibilityConstants::LABELS_TABLE_NAME) return end - raise(ArgumentError, "DISABLED: Visibility labels feature is not available") unless \ + raise(ArgumentError, 'DISABLED: Visibility labels feature is not available') unless \ caps.include? org.apache.hadoop.hbase.client.security.SecurityCapability::CELL_VISIBILITY end http://git-wip-us.apache.org/repos/asf/hbase/blob/97a32318/hbase-shell/src/main/ruby/hbase_constants.rb ---------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/hbase_constants.rb b/hbase-shell/src/main/ruby/hbase_constants.rb index 55806cf..ebaae78 100644 --- a/hbase-shell/src/main/ruby/hbase_constants.rb +++ b/hbase-shell/src/main/ruby/hbase_constants.rb @@ -32,62 +32,62 @@ java_import('java.lang.Long') { |_package, name| "J#{name}" } java_import('java.lang.Boolean') { |_package, name| "J#{name}" } module HBaseConstants - COLUMN = "COLUMN" - COLUMNS = "COLUMNS" - TIMESTAMP = "TIMESTAMP" - TIMERANGE = "TIMERANGE" + COLUMN = 'COLUMN'.freeze + COLUMNS = 'COLUMNS'.freeze + TIMESTAMP = 'TIMESTAMP'.freeze + TIMERANGE = 'TIMERANGE'.freeze NAME = org.apache.hadoop.hbase.HConstants::NAME VERSIONS = org.apache.hadoop.hbase.HConstants::VERSIONS IN_MEMORY = org.apache.hadoop.hbase.HConstants::IN_MEMORY IN_MEMORY_COMPACTION = org.apache.hadoop.hbase.HColumnDescriptor::IN_MEMORY_COMPACTION METADATA = org.apache.hadoop.hbase.HConstants::METADATA - STOPROW = "STOPROW" - STARTROW = "STARTROW" - ROWPREFIXFILTER = "ROWPREFIXFILTER" + STOPROW = 'STOPROW'.freeze + STARTROW = 'STARTROW'.freeze + ROWPREFIXFILTER = 'ROWPREFIXFILTER'.freeze ENDROW = STOPROW - RAW = "RAW" - LIMIT = "LIMIT" - METHOD = "METHOD" - MAXLENGTH = "MAXLENGTH" - CACHE_BLOCKS = "CACHE_BLOCKS" - ALL_METRICS = "ALL_METRICS" - METRICS = "METRICS" - REVERSED = "REVERSED" - REPLICATION_SCOPE = "REPLICATION_SCOPE" - INTERVAL = 'INTERVAL' - CACHE = 'CACHE' - FILTER = 'FILTER' - SPLITS = 'SPLITS' - SPLITS_FILE = 'SPLITS_FILE' - SPLITALGO = 'SPLITALGO' - NUMREGIONS = 'NUMREGIONS' - REGION_REPLICATION = 'REGION_REPLICATION' - REGION_REPLICA_ID = 'REGION_REPLICA_ID' + RAW = 'RAW'.freeze + LIMIT = 'LIMIT'.freeze + METHOD = 'METHOD'.freeze + MAXLENGTH = 'MAXLENGTH'.freeze + CACHE_BLOCKS = 'CACHE_BLOCKS'.freeze + ALL_METRICS = 'ALL_METRICS'.freeze + METRICS = 'METRICS'.freeze + REVERSED = 'REVERSED'.freeze + REPLICATION_SCOPE = 'REPLICATION_SCOPE'.freeze + INTERVAL = 'INTERVAL'.freeze + CACHE = 'CACHE'.freeze + FILTER = 'FILTER'.freeze + SPLITS = 'SPLITS'.freeze + SPLITS_FILE = 'SPLITS_FILE'.freeze + SPLITALGO = 'SPLITALGO'.freeze + NUMREGIONS = 'NUMREGIONS'.freeze + REGION_REPLICATION = 'REGION_REPLICATION'.freeze + REGION_REPLICA_ID = 'REGION_REPLICA_ID'.freeze CONFIGURATION = org.apache.hadoop.hbase.HConstants::CONFIGURATION - ATTRIBUTES="ATTRIBUTES" - VISIBILITY="VISIBILITY" - AUTHORIZATIONS = "AUTHORIZATIONS" - SKIP_FLUSH = 'SKIP_FLUSH' - CONSISTENCY = "CONSISTENCY" - USER = 'USER' - TABLE = 'TABLE' - NAMESPACE = 'NAMESPACE' - TYPE = 'TYPE' - NONE = 'NONE' - VALUE = 'VALUE' - ENDPOINT_CLASSNAME = 'ENDPOINT_CLASSNAME' - CLUSTER_KEY = 'CLUSTER_KEY' - TABLE_CFS = 'TABLE_CFS' - NAMESPACES = 'NAMESPACES' - CONFIG = 'CONFIG' - DATA = 'DATA' - SERVER_NAME = 'SERVER_NAME' - LOCALITY_THRESHOLD = 'LOCALITY_THRESHOLD' - RESTORE_ACL = 'RESTORE_ACL' - FORMATTER = 'FORMATTER' - FORMATTER_CLASS = 'FORMATTER_CLASS' - POLICY = 'POLICY' - REGIONSERVER = 'REGIONSERVER' + ATTRIBUTES = 'ATTRIBUTES'.freeze + VISIBILITY = 'VISIBILITY'.freeze + AUTHORIZATIONS = 'AUTHORIZATIONS'.freeze + SKIP_FLUSH = 'SKIP_FLUSH'.freeze + CONSISTENCY = 'CONSISTENCY'.freeze + USER = 'USER'.freeze + TABLE = 'TABLE'.freeze + NAMESPACE = 'NAMESPACE'.freeze + TYPE = 'TYPE'.freeze + NONE = 'NONE'.freeze + VALUE = 'VALUE'.freeze + ENDPOINT_CLASSNAME = 'ENDPOINT_CLASSNAME'.freeze + CLUSTER_KEY = 'CLUSTER_KEY'.freeze + TABLE_CFS = 'TABLE_CFS'.freeze + NAMESPACES = 'NAMESPACES'.freeze + CONFIG = 'CONFIG'.freeze + DATA = 'DATA'.freeze + SERVER_NAME = 'SERVER_NAME'.freeze + LOCALITY_THRESHOLD = 'LOCALITY_THRESHOLD'.freeze + RESTORE_ACL = 'RESTORE_ACL'.freeze + FORMATTER = 'FORMATTER'.freeze + FORMATTER_CLASS = 'FORMATTER_CLASS'.freeze + POLICY = 'POLICY'.freeze + REGIONSERVER = 'REGIONSERVER'.freeze # Load constants from hbase java API def self.promote_constants(constants) http://git-wip-us.apache.org/repos/asf/hbase/blob/97a32318/hbase-shell/src/main/ruby/irb/hirb.rb ---------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/irb/hirb.rb b/hbase-shell/src/main/ruby/irb/hirb.rb index 4d6d277..904a000 100644 --- a/hbase-shell/src/main/ruby/irb/hirb.rb +++ b/hbase-shell/src/main/ruby/irb/hirb.rb @@ -32,28 +32,25 @@ module IRB # down in IRB didn't seem to work. I think the worst thing that can # happen is the shell exiting because of failed IRB construction with # no error (though we're not blanking STDERR) - begin - # Map the '/dev/null' according to the runing platform - # Under Windows platform the 'dev/null' is not fully compliant with unix, - # and the 'NUL' object need to be use instead. - devnull = "/dev/null" - devnull = "NUL" if WINDOZE - f = File.open(devnull, "w") - $stdout = f - super - ensure - f.close() - $stdout = STDOUT - end + + # Map the '/dev/null' according to the runing platform + # Under Windows platform the 'dev/null' is not fully compliant with unix, + # and the 'NUL' object need to be use instead. + devnull = '/dev/null' + devnull = 'NUL' if WINDOZE + f = File.open(devnull, 'w') + $stdout = f + super + ensure + f.close + $stdout = STDOUT end def output_value # Suppress output if last_value is 'nil' # Otherwise, when user types help, get ugly 'nil' # after all output. - if @context.last_value != nil - super - end + super unless @context.last_value.nil? end end end http://git-wip-us.apache.org/repos/asf/hbase/blob/97a32318/hbase-shell/src/main/ruby/shell.rb ---------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/shell.rb b/hbase-shell/src/main/ruby/shell.rb index aaf26b3..01f1145 100644 --- a/hbase-shell/src/main/ruby/shell.rb +++ b/hbase-shell/src/main/ruby/shell.rb @@ -29,7 +29,7 @@ module Shell @@command_groups end - def self.load_command(name, group, aliases=[]) + def self.load_command(name, group, aliases = []) return if commands[name] # Register command in the group @@ -39,7 +39,7 @@ module Shell # Load command begin require "shell/commands/#{name}" - klass_name = name.to_s.gsub(/(?:^|_)(.)/) { $1.upcase } # camelize + klass_name = name.to_s.gsub(/(?:^|_)(.)/) { Regexp.last_match(1).upcase } # camelize commands[name] = eval("Commands::#{klass_name}") aliases.each do |an_alias| commands[an_alias] = commands[name] @@ -53,10 +53,10 @@ module Shell raise ArgumentError, "No :commands for group #{group}" unless opts[:commands] command_groups[group] = { - :commands => [], - :command_names => opts[:commands], - :full_name => opts[:full_name] || group, - :comment => opts[:comment] + commands: [], + command_names: opts[:commands], + full_name: opts[:full_name] || group, + comment: opts[:comment] } all_aliases = opts[:aliases] || {} @@ -76,18 +76,18 @@ module Shell @debug = false attr_accessor :debug - def initialize(hbase, interactive=true) + def initialize(hbase, interactive = true) self.hbase = hbase self.interactive = interactive end # Returns Admin class from admin.rb def admin - @admin ||= hbase.admin() + @admin ||= hbase.admin end def hbase_taskmonitor - @hbase_taskmonitor ||= hbase.taskmonitor() + @hbase_taskmonitor ||= hbase.taskmonitor end def hbase_table(name) @@ -95,23 +95,23 @@ module Shell end def hbase_replication_admin - @hbase_replication_admin ||= hbase.replication_admin() + @hbase_replication_admin ||= hbase.replication_admin end def hbase_security_admin - @hbase_security_admin ||= hbase.security_admin() + @hbase_security_admin ||= hbase.security_admin end def hbase_visibility_labels_admin - @hbase_visibility_labels_admin ||= hbase.visibility_labels_admin() + @hbase_visibility_labels_admin ||= hbase.visibility_labels_admin end def hbase_quotas_admin - @hbase_quotas_admin ||= hbase.quotas_admin() + @hbase_quotas_admin ||= hbase.quotas_admin end def hbase_rsgroup_admin - @rsgroup_admin ||= hbase.rsgroup_admin() + @rsgroup_admin ||= hbase.rsgroup_admin end def export_commands(where) @@ -140,7 +140,7 @@ module Shell # Return value is only useful in non-interactive mode, for e.g. tests. def command(command, *args) ret = internal_command(command, :command, *args) - if self.interactive + if interactive return nil else return ret @@ -151,8 +151,8 @@ module Shell # command - name of the command to call # method_name - name of the method on the command to call. Defaults to just 'command' # args - to be passed to the named method - def internal_command(command, method_name= :command, *args) - command_instance(command).command_safe(self.debug, method_name, *args) + def internal_command(command, method_name = :command, *args) + command_instance(command).command_safe(debug, method_name, *args) end def print_banner @@ -168,12 +168,12 @@ module Shell puts "Command: #{command}" puts command_instance(command).help puts - return nil + nil end def help_command(command) puts command_instance(command).help - return nil + nil end def help_group(group_name) @@ -185,7 +185,7 @@ module Shell puts group[:comment] puts end - return nil + nil end def help(command = nil) @@ -200,23 +200,23 @@ module Shell puts puts 'COMMAND GROUPS:' ::Shell.command_groups.each do |name, group| - puts " Group name: " + name - puts " Commands: " + group[:command_names].sort.join(', ') + puts ' Group name: ' + name + puts ' Commands: ' + group[:command_names].sort.join(', ') puts end unless command puts 'SHELL USAGE:' help_footer end - return nil + nil end def help_header - return "HBase Shell, version #{org.apache.hadoop.hbase.util.VersionInfo.getVersion()}, " + - "r#{org.apache.hadoop.hbase.util.VersionInfo.getRevision()}, " + - "#{org.apache.hadoop.hbase.util.VersionInfo.getDate()}" + "\n" + - "Type 'help \"COMMAND\"', (e.g. 'help \"get\"' -- the quotes are necessary) for help on a specific command.\n" + - "Commands are grouped. Type 'help \"COMMAND_GROUP\"', (e.g. 'help \"general\"') for help on a command group." + "HBase Shell, version #{org.apache.hadoop.hbase.util.VersionInfo.getVersion}, " \ + "r#{org.apache.hadoop.hbase.util.VersionInfo.getRevision}, " \ + "#{org.apache.hadoop.hbase.util.VersionInfo.getDate}" + "\n" \ + "Type 'help \"COMMAND\"', (e.g. 'help \"get\"' -- the quotes are necessary) for help on a specific command.\n" \ + "Commands are grouped. Type 'help \"COMMAND_GROUP\"', (e.g. 'help \"general\"') for help on a command group." end def help_footer @@ -253,8 +253,8 @@ require 'shell/commands' # Load all commands Shell.load_command_group( 'general', - :full_name => 'GENERAL HBASE SHELL COMMANDS', - :commands => %w[ + full_name: 'GENERAL HBASE SHELL COMMANDS', + commands: %w[ status version table_help @@ -265,8 +265,8 @@ Shell.load_command_group( Shell.load_command_group( 'ddl', - :full_name => 'TABLES MANAGEMENT COMMANDS', - :commands => %w[ + full_name: 'TABLES MANAGEMENT COMMANDS', + commands: %w[ alter create describe @@ -287,15 +287,15 @@ Shell.load_command_group( locate_region list_regions ], - :aliases => { + aliases: { 'describe' => ['desc'] } ) Shell.load_command_group( 'namespace', - :full_name => 'NAMESPACE MANAGEMENT COMMANDS', - :commands => %w[ + full_name: 'NAMESPACE MANAGEMENT COMMANDS', + commands: %w[ create_namespace drop_namespace alter_namespace @@ -307,8 +307,8 @@ Shell.load_command_group( Shell.load_command_group( 'dml', - :full_name => 'DATA MANIPULATION COMMANDS', - :commands => %w[ + full_name: 'DATA MANIPULATION COMMANDS', + commands: %w[ count delete deleteall @@ -326,9 +326,9 @@ Shell.load_command_group( Shell.load_command_group( 'tools', - :full_name => 'HBASE SURGERY TOOLS', - :comment => "WARNING: Above commands are for 'experts'-only as misuse can damage an install", - :commands => %w[ + full_name: 'HBASE SURGERY TOOLS', + comment: "WARNING: Above commands are for 'experts'-only as misuse can damage an install", + commands: %w[ assign balancer balance_switch @@ -359,16 +359,16 @@ Shell.load_command_group( splitormerge_enabled clear_compaction_queues ], - # TODO remove older hlog_roll command - :aliases => { + # TODO: remove older hlog_roll command + aliases: { 'wal_roll' => ['hlog_roll'] } ) Shell.load_command_group( 'replication', - :full_name => 'CLUSTER REPLICATION TOOLS', - :commands => %w[ + full_name: 'CLUSTER REPLICATION TOOLS', + commands: %w[ add_peer remove_peer list_peers @@ -393,8 +393,8 @@ Shell.load_command_group( Shell.load_command_group( 'snapshots', - :full_name => 'CLUSTER SNAPSHOT TOOLS', - :commands => %w[ + full_name: 'CLUSTER SNAPSHOT TOOLS', + commands: %w[ snapshot clone_snapshot restore_snapshot @@ -408,8 +408,8 @@ Shell.load_command_group( Shell.load_command_group( 'configuration', - :full_name => 'ONLINE CONFIGURATION TOOLS', - :commands => %w[ + full_name: 'ONLINE CONFIGURATION TOOLS', + commands: %w[ update_config update_all_config ] @@ -417,8 +417,8 @@ Shell.load_command_group( Shell.load_command_group( 'quotas', - :full_name => 'CLUSTER QUOTAS TOOLS', - :commands => %w[ + full_name: 'CLUSTER QUOTAS TOOLS', + commands: %w[ set_quota list_quotas list_quota_table_sizes @@ -428,9 +428,9 @@ Shell.load_command_group( Shell.load_command_group( 'security', - :full_name => 'SECURITY TOOLS', - :comment => "NOTE: Above commands are only applicable if running with the AccessController coprocessor", - :commands => %w[ + full_name: 'SECURITY TOOLS', + comment: 'NOTE: Above commands are only applicable if running with the AccessController coprocessor', + commands: %w[ list_security_capabilities grant revoke @@ -440,8 +440,8 @@ Shell.load_command_group( Shell.load_command_group( 'procedures', - :full_name => 'PROCEDURES & LOCKS MANAGEMENT', - :commands => %w[ + full_name: 'PROCEDURES & LOCKS MANAGEMENT', + commands: %w[ abort_procedure list_procedures list_locks @@ -450,9 +450,9 @@ Shell.load_command_group( Shell.load_command_group( 'visibility labels', - :full_name => 'VISIBILITY LABEL TOOLS', - :comment => "NOTE: Above commands are only applicable if running with the VisibilityController coprocessor", - :commands => %w[ + full_name: 'VISIBILITY LABEL TOOLS', + comment: 'NOTE: Above commands are only applicable if running with the VisibilityController coprocessor', + commands: %w[ add_labels list_labels set_auths @@ -464,10 +464,10 @@ Shell.load_command_group( Shell.load_command_group( 'rsgroup', - :full_name => 'RSGroups', - :comment => "NOTE: The rsgroup Coprocessor Endpoint must be enabled on the Master else commands fail with: + full_name: 'RSGroups', + comment: "NOTE: The rsgroup Coprocessor Endpoint must be enabled on the Master else commands fail with: UnknownProtocolException: No registered Master Coprocessor Endpoint found for RSGroupAdminService", - :commands => %w[ + commands: %w[ list_rsgroups get_rsgroup add_rsgroup http://git-wip-us.apache.org/repos/asf/hbase/blob/97a32318/hbase-shell/src/main/ruby/shell/commands.rb ---------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/shell/commands.rb b/hbase-shell/src/main/ruby/shell/commands.rb index 5f7a2e6..94b5667 100644 --- a/hbase-shell/src/main/ruby/shell/commands.rb +++ b/hbase-shell/src/main/ruby/shell/commands.rb @@ -22,12 +22,11 @@ require 'shell/formatter' module Shell module Commands class Command - def initialize(shell) @shell = shell end - #wrap an execution of cmd to catch hbase exceptions + # wrap an execution of cmd to catch hbase exceptions # cmd - command name to execute # args - arguments to pass to the command def command_safe(debug, cmd = :command, *args) @@ -35,7 +34,7 @@ module Shell # See count.rb for example. @start_time = Time.now # send is internal ruby method to call 'cmd' with *args - #(everything is a message, so this is just the formal semantics to support that idiom) + # (everything is a message, so this is just the formal semantics to support that idiom) translate_hbase_exceptions(*args) { send(cmd, *args) } rescue => e rootCause = e @@ -57,7 +56,7 @@ module Shell ensure # If end_time is not already set by the command, use current time. @end_time ||= Time.now - formatter.output_str("Took %.4f seconds" % [@end_time - @start_time]) + formatter.output_str(format('Took %.4f seconds', @end_time - @start_time)) end # Convenience functions to get different admins @@ -109,44 +108,40 @@ module Shell yield rescue => cause # let individual command handle exceptions first - if self.respond_to?(:handle_exceptions) - self.handle_exceptions(cause, *args) - end + handle_exceptions(cause, *args) if respond_to?(:handle_exceptions) # Global HBase exception handling below if not handled by respective command above - if cause.kind_of?(org.apache.hadoop.hbase.TableNotFoundException) then + if cause.is_a?(org.apache.hadoop.hbase.TableNotFoundException) raise "Unknown table #{args.first}!" end - if cause.kind_of?(org.apache.hadoop.hbase.UnknownRegionException) then + if cause.is_a?(org.apache.hadoop.hbase.UnknownRegionException) raise "Unknown region #{args.first}!" end - if cause.kind_of?(org.apache.hadoop.hbase.NamespaceNotFoundException) then + if cause.is_a?(org.apache.hadoop.hbase.NamespaceNotFoundException) raise "Unknown namespace #{args.first}!" end - if cause.kind_of?(org.apache.hadoop.hbase.snapshot.SnapshotDoesNotExistException) then + if cause.is_a?(org.apache.hadoop.hbase.snapshot.SnapshotDoesNotExistException) raise "Unknown snapshot #{args.first}!" end - if cause.kind_of?(org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException) then + if cause.is_a?(org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException) exceptions = cause.getCauses exceptions.each do |exception| - if exception.kind_of?(org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException) then + if exception.is_a?(org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException) valid_cols = table(args.first).get_all_columns.map { |c| c + '*' } - raise "Unknown column family! Valid column names: #{valid_cols.join(", ")}" + raise "Unknown column family! Valid column names: #{valid_cols.join(', ')}" end end end - if cause.kind_of?(org.apache.hadoop.hbase.TableExistsException) then + if cause.is_a?(org.apache.hadoop.hbase.TableExistsException) raise "Table already exists: #{args.first}!" end # To be safe, here only AccessDeniedException is considered. In future # we might support more in more generic approach when possible. - if cause.kind_of?(org.apache.hadoop.hbase.security.AccessDeniedException) then - str = java.lang.String.new("#{cause}") + if cause.is_a?(org.apache.hadoop.hbase.security.AccessDeniedException) + str = java.lang.String.new(cause.to_s) # Error message is merged with stack trace, reference StringUtils.stringifyException # This is to parse and get the error message from the whole. strs = str.split("\n") - if strs.size > 0 then - raise "#{strs[0]}" - end + raise (strs[0]).to_s unless strs.empty? end # Throw the other exception which hasn't been handled above http://git-wip-us.apache.org/repos/asf/hbase/blob/97a32318/hbase-shell/src/main/ruby/shell/commands/abort_procedure.rb ---------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/shell/commands/abort_procedure.rb b/hbase-shell/src/main/ruby/shell/commands/abort_procedure.rb index e69e133..28c7d85 100644 --- a/hbase-shell/src/main/ruby/shell/commands/abort_procedure.rb +++ b/hbase-shell/src/main/ruby/shell/commands/abort_procedure.rb @@ -21,7 +21,7 @@ module Shell module Commands class AbortProcedure < Command def help - return <<-EOF + <<-EOF Given a procedure Id (and optional boolean may_interrupt_if_running parameter, default is true), abort a procedure in hbase. Use with caution. Some procedures might not be abortable. For experts only. @@ -39,9 +39,9 @@ Examples: EOF end - def command(proc_id, may_interrupt_if_running=nil) + def command(proc_id, may_interrupt_if_running = nil) formatter.row([admin.abort_procedure?(proc_id, may_interrupt_if_running).to_s]) end end end -end \ No newline at end of file +end http://git-wip-us.apache.org/repos/asf/hbase/blob/97a32318/hbase-shell/src/main/ruby/shell/commands/add_labels.rb ---------------------------------------------------------------------- diff --git a/hbase-shell/src/main/ruby/shell/commands/add_labels.rb b/hbase-shell/src/main/ruby/shell/commands/add_labels.rb index 7bde5fb..b021b92 100644 --- a/hbase-shell/src/main/ruby/shell/commands/add_labels.rb +++ b/hbase-shell/src/main/ruby/shell/commands/add_labels.rb @@ -20,7 +20,7 @@ module Shell module Commands class AddLabels < Command def help - return <<-EOF + <<-EOF Add a set of visibility labels. Syntax : add_labels [label1, label2]
