This is an automated email from the ASF dual-hosted git repository.

nihaljain pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/branch-2 by this push:
     new 1c93e96fcf2 HBASE-29026 Replace some deprecated calls (#6658)
1c93e96fcf2 is described below

commit 1c93e96fcf298e063fb6c8f396e45c49b7fc714b
Author: Dávid Paksy <[email protected]>
AuthorDate: Thu Feb 20 07:11:20 2025 +0100

    HBASE-29026 Replace some deprecated calls (#6658)
    
    - Replaced the usage of the following deprecated methods:
      - java.net.URLEncoder.encode(String) -> 
java.net.URLEncoder.encode(String, String)
      - StringUtils.humanReadableInt(long) -> 
StringUtils.TraditionalBinaryPrefix.long2String(long, "", 1): For this a new 
static util method is introduced: 
org.apache.hadoop.hbase.util.Strings.humanReadableInt
      - org.apache.hadoop.fs.FileSystem.getLength(Path) -> 
getFileStatus(Path).getLen()
      - org.apache.hadoop.hbase.ServerName.getStartcode() -> 
org.apache.hadoop.hbase.ServerName.getStartCode()
    - Also removed unused imports in the touched JSP files.
    
    (cherry picked from commit e074dabb08a8bd17c4921b2a25519dfefd05e4c6)
    
    Signed-off-by: Istvan Toth <[email protected]>
    Signed-off-by: Nihal Jain <[email protected]>
---
 .../java/org/apache/hadoop/hbase/util/Strings.java | 12 ++++++
 .../hadoop/hbase/mapreduce/PutSortReducer.java     |  4 +-
 .../hbase/mapreduce/TableInputFormatBase.java      |  3 +-
 .../hadoop/hbase/mapreduce/TextSortReducer.java    |  4 +-
 .../hadoop/hbase/snapshot/ExportSnapshot.java      | 25 ++++++------
 .../hadoop/hbase/ScanPerformanceEvaluation.java    | 46 +++++++++++-----------
 .../hbase/tmpl/master/MasterStatusTmpl.jamon       |  3 +-
 .../hadoop/hbase/io/hfile/HFileBlockIndex.java     |  6 +--
 .../hadoop/hbase/regionserver/wal/MetricsWAL.java  |  4 +-
 .../apache/hadoop/hbase/snapshot/SnapshotInfo.java |  4 +-
 .../resources/hbase-webapps/master/procedures.jsp  | 14 +------
 .../main/resources/hbase-webapps/master/quotas.jsp |  1 -
 .../resources/hbase-webapps/master/rsgroup.jsp     |  2 +-
 .../resources/hbase-webapps/master/snapshot.jsp    |  8 ++--
 .../hbase-webapps/master/snapshotsStats.jsp        | 20 +++++-----
 .../main/resources/hbase-webapps/master/table.jsp  | 22 +++++------
 .../hbase-webapps/master/tablesDetailed.jsp        |  2 -
 .../hbase-webapps/regionserver/region.jsp          |  3 +-
 .../regionserver/TestHRegionReplayEvents.java      |  6 +--
 .../compactions/MockStoreFileGenerator.java        |  4 +-
 .../hadoop/hbase/util/MultiThreadedAction.java     |  3 +-
 21 files changed, 96 insertions(+), 100 deletions(-)

diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java
index 37a54abe00b..49f8d97e744 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java
@@ -134,4 +134,16 @@ public final class Strings {
   public static void applyURIQueriesToConf(URI uri, Configuration conf) {
     parseURIQueries(uri).forEach(conf::set);
   }
+
+  /**
+   * Note: This method was taken from 
org.apache.hadoop.util.StringUtils.humanReadableInt(long).
+   * Reason: that method got deprecated and this method provides an 
easy-to-understand usage of
+   * StringUtils.TraditionalBinaryPrefix.long2String. Given an integer, return 
a string that is in
+   * an approximate, but human readable format.
+   * @param number the number to format
+   * @return a human readable form of the integer
+   */
+  public static String humanReadableInt(long number) {
+    return 
org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.long2String(number, 
"", 1);
+  }
 }
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
index b4061d6be6a..60c83e206f2 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
@@ -37,8 +37,8 @@ import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.security.visibility.CellVisibility;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 
 /**
@@ -122,7 +122,7 @@ public class PutSortReducer
         }
       }
       context.setStatus("Read " + map.size() + " entries of " + map.getClass() 
+ "("
-        + StringUtils.humanReadableInt(curSize) + ")");
+        + Strings.humanReadableInt(curSize) + ")");
       int index = 0;
       for (KeyValue kv : map) {
         context.write(row, kv);
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
index b02517451bc..7caf98a856b 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.net.DNS;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -171,7 +170,7 @@ public abstract class TableInputFormatBase extends 
InputFormat<ImmutableBytesWri
       throw new IOException(INITIALIZATION_ERROR, exception);
     }
     TableSplit tSplit = (TableSplit) split;
-    LOG.info("Input split length: " + 
StringUtils.humanReadableInt(tSplit.getLength()) + " bytes.");
+    LOG.info("Input split length: " + 
Strings.humanReadableInt(tSplit.getLength()) + " bytes.");
     final TableRecordReader trr =
       this.tableRecordReader != null ? this.tableRecordReader : new 
TableRecordReader();
     Scan sc = new Scan(this.scan);
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
index b374aa86c01..0c5e220b2b9 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
@@ -35,10 +35,10 @@ import org.apache.hadoop.hbase.TagType;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.security.visibility.InvalidLabelException;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Counter;
 import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 
 /**
@@ -187,7 +187,7 @@ public class TextSortReducer
         }
       }
       context.setStatus("Read " + kvs.size() + " entries of " + kvs.getClass() 
+ "("
-        + StringUtils.humanReadableInt(curSize) + ")");
+        + Strings.humanReadableInt(curSize) + ")");
       int index = 0;
       for (KeyValue kv : kvs) {
         context.write(rowKey, kv);
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index 85ef9dc574a..6e4530eb491 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -58,6 +58,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.HFileArchiveUtil;
 import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Writable;
@@ -232,7 +233,7 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
       // Use the default block size of the outputFs if bigger
       int defaultBlockSize = Math.max((int) 
outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);
       bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);
-      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));
+      LOG.info("Using bufferSize=" + Strings.humanReadableInt(bufferSize));
 
       for (Counter c : Counter.values()) {
         context.getCounter(c).increment(0);
@@ -333,10 +334,9 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
 
         long etime = EnvironmentEdgeManager.currentTime();
         LOG.info("copy completed for input=" + inputPath + " output=" + 
outputPath);
-        LOG
-          .info("size=" + totalBytesWritten + " (" + 
StringUtils.humanReadableInt(totalBytesWritten)
-            + ")" + " time=" + StringUtils.formatTimeDiff(etime, stime) + 
String
-              .format(" %.3fM/sec", (totalBytesWritten / ((etime - stime) / 
1000.0)) / 1048576.0));
+        LOG.info("size=" + totalBytesWritten + " (" + 
Strings.humanReadableInt(totalBytesWritten)
+          + ")" + " time=" + StringUtils.formatTimeDiff(etime, stime) + 
String.format(" %.3fM/sec",
+            (totalBytesWritten / ((etime - stime) / 1000.0)) / 1048576.0));
         context.getCounter(Counter.FILES_COPIED).increment(1);
 
         // Try to Preserve attributes
@@ -428,7 +428,7 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
       final Path outputPath, final FSDataOutputStream out, final long 
inputFileSize)
       throws IOException {
       final String statusMessage =
-        "copied %s/" + StringUtils.humanReadableInt(inputFileSize) + " 
(%.1f%%)";
+        "copied %s/" + Strings.humanReadableInt(inputFileSize) + " (%.1f%%)";
 
       try {
         byte[] buffer = new byte[bufferSize];
@@ -443,8 +443,8 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
 
           if (reportBytes >= REPORT_SIZE) {
             context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);
-            context.setStatus(
-              String.format(statusMessage, 
StringUtils.humanReadableInt(totalBytesWritten),
+            context
+              .setStatus(String.format(statusMessage, 
Strings.humanReadableInt(totalBytesWritten),
                 (totalBytesWritten / (float) inputFileSize) * 100.0f) + " from 
" + inputPath
                 + " to " + outputPath);
             reportBytes = 0;
@@ -452,10 +452,9 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
         }
 
         context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);
-        context
-          .setStatus(String.format(statusMessage, 
StringUtils.humanReadableInt(totalBytesWritten),
-            (totalBytesWritten / (float) inputFileSize) * 100.0f) + " from " + 
inputPath + " to "
-            + outputPath);
+        context.setStatus(String.format(statusMessage, 
Strings.humanReadableInt(totalBytesWritten),
+          (totalBytesWritten / (float) inputFileSize) * 100.0f) + " from " + 
inputPath + " to "
+          + outputPath);
 
         return totalBytesWritten;
       } finally {
@@ -748,7 +747,7 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
 
     if (LOG.isDebugEnabled()) {
       for (int i = 0; i < sizeGroups.length; ++i) {
-        LOG.debug("export split=" + i + " size=" + 
StringUtils.humanReadableInt(sizeGroups[i]));
+        LOG.debug("export split=" + i + " size=" + 
Strings.humanReadableInt(sizeGroups[i]));
       }
     }
 
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
index af39a7521cd..aa188548c87 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
@@ -36,11 +36,11 @@ import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.mapreduce.TableMapper;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.hbase.util.CommonFSUtils;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.yetus.audience.InterfaceAudience;
 
@@ -125,8 +125,8 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
       .println("total time to open: " + 
fileOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
     System.out.println("total time to read: " + 
streamTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throghput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throghput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
   }
 
   private Scan getScan() {
@@ -189,14 +189,14 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
     System.out.println("Scan metrics:\n" + metrics.getMetricsMap());
 
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throughput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
     System.out.println("total rows  : " + numRows);
     System.out
-      .println("throughput  : " + StringUtils.humanReadableInt((long) 
throughputRows) + " rows/s");
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputRows) + " rows/s");
     System.out.println("total cells : " + numCells);
-    System.out.println(
-      "throughput  : " + StringUtils.humanReadableInt((long) throughputCells) 
+ " cells/s");
+    System.out
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputCells) + " cells/s");
   }
 
   public void testSnapshotScan() throws IOException {
@@ -246,14 +246,14 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
     System.out.println("Scan metrics:\n" + metrics.getMetricsMap());
 
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throughput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
     System.out.println("total rows  : " + numRows);
     System.out
-      .println("throughput  : " + StringUtils.humanReadableInt((long) 
throughputRows) + " rows/s");
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputRows) + " rows/s");
     System.out.println("total cells : " + numCells);
-    System.out.println(
-      "throughput  : " + StringUtils.humanReadableInt((long) throughputCells) 
+ " cells/s");
+    System.out
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputCells) + " cells/s");
 
   }
 
@@ -311,14 +311,14 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
     System.out.println("total time to scan: " + 
scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
 
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throughput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
     System.out.println("total rows  : " + numRows);
     System.out
-      .println("throughput  : " + StringUtils.humanReadableInt((long) 
throughputRows) + " rows/s");
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputRows) + " rows/s");
     System.out.println("total cells : " + numCells);
-    System.out.println(
-      "throughput  : " + StringUtils.humanReadableInt((long) throughputCells) 
+ " cells/s");
+    System.out
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputCells) + " cells/s");
   }
 
   public void testSnapshotScanMapReduce()
@@ -362,14 +362,14 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
     System.out.println("total time to scan: " + 
scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
 
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throughput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
     System.out.println("total rows  : " + numRows);
     System.out
-      .println("throughput  : " + StringUtils.humanReadableInt((long) 
throughputRows) + " rows/s");
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputRows) + " rows/s");
     System.out.println("total cells : " + numCells);
-    System.out.println(
-      "throughput  : " + StringUtils.humanReadableInt((long) throughputCells) 
+ " cells/s");
+    System.out
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputCells) + " cells/s");
   }
 
   @Override
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
index 5452d174a57..b57e76900df 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
@@ -64,6 +64,7 @@ org.apache.hadoop.hbase.util.CommonFSUtils;
 org.apache.hadoop.hbase.util.JvmVersion;
 org.apache.hadoop.hbase.util.PrettyPrinter;
 org.apache.hadoop.util.StringUtils;
+org.apache.hadoop.hbase.util.Strings;
 </%import>
 
 <%if format.equals("json") %>
@@ -786,7 +787,7 @@ AssignmentManager assignmentManager = 
master.getAssignmentManager();
         <td><% peerConfig.getReplicationEndpointImpl() %></td>
         <td><% peer.isEnabled() ? "ENABLED" : "DISABLED" %></td>
         <td><% peerConfig.isSerial() %></td>
-        <td><% peerConfig.getBandwidth() == 0? "UNLIMITED" : 
StringUtils.humanReadableInt(peerConfig.getBandwidth()) %></td>
+        <td><% peerConfig.getBandwidth() == 0? "UNLIMITED" : 
Strings.humanReadableInt(peerConfig.getBandwidth()) %></td>
         <td><% peerConfig.replicateAllUserTables() %></td>
         <td>
            <% peerConfig.getNamespaces() == null ? "" : 
ReplicationPeerConfigUtil.convertToString(peerConfig.getNamespaces()).replaceAll(";",
 "; ") %>
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
index 6f610af1972..00a14134c77 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
@@ -46,8 +46,8 @@ import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
 import org.apache.hadoop.hbase.util.ObjectIntPair;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -1091,8 +1091,8 @@ public class HFileBlockIndex {
         LOG.trace("Wrote a " + numLevels + "-level index with root level at 
pos "
           + rootLevelIndexPos + ", " + rootChunk.getNumEntries() + " 
root-level entries, "
           + totalNumEntries + " total entries, "
-          + StringUtils.humanReadableInt(this.totalBlockOnDiskSize) + " 
on-disk size, "
-          + StringUtils.humanReadableInt(totalBlockUncompressedSize) + " total 
uncompressed size.");
+          + Strings.humanReadableInt(this.totalBlockOnDiskSize) + " on-disk 
size, "
+          + Strings.humanReadableInt(totalBlockUncompressedSize) + " total 
uncompressed size.");
       }
       return rootLevelIndexPos;
     }
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
index 89481161f4a..44241d92895 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
@@ -21,9 +21,9 @@ import java.io.IOException;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.hbase.wal.WALEdit;
 import org.apache.hadoop.hbase.wal.WALKey;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -63,7 +63,7 @@ public class MetricsWAL implements WALActionsListener {
     if (time > 1000) {
       source.incrementSlowAppendCount();
       LOG.warn(String.format("%s took %d ms appending an edit to wal; len~=%s",
-        Thread.currentThread().getName(), time, 
StringUtils.humanReadableInt(size)));
+        Thread.currentThread().getName(), time, 
Strings.humanReadableInt(size)));
     }
   }
 
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
index fb48b03d69a..151319e165a 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
@@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.io.HFileLink;
 import org.apache.hadoop.hbase.io.WALLink;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.hbase.util.CommonFSUtils;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -525,7 +525,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
   }
 
   private String fileSizeToString(long size) {
-    return printSizeInBytes ? Long.toString(size) : 
StringUtils.humanReadableInt(size);
+    return printSizeInBytes ? Long.toString(size) : 
Strings.humanReadableInt(size);
   }
 
   @Override
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
index c25c5c3886b..accc9043e80 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
@@ -21,29 +21,17 @@
   import="static org.apache.commons.lang3.StringEscapeUtils.escapeXml"
   import="java.util.Collections"
   import="java.util.Comparator"
-  import="java.util.ArrayList"
   import="java.util.Date"
   import="java.util.List"
-  import="java.util.Set"
   import="org.apache.hadoop.hbase.master.HMaster"
   import="org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv"
   import="org.apache.hadoop.hbase.procedure2.LockedResource"
   import="org.apache.hadoop.hbase.procedure2.Procedure"
   import="org.apache.hadoop.hbase.procedure2.ProcedureExecutor"
-  import="org.apache.hadoop.hbase.procedure2.util.StringUtils"
-  import="org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix"
 %>
-<%@ page 
import="org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure" %>
-<%@ page 
import="org.apache.hadoop.hbase.master.assignment.TransitRegionStateProcedure" 
%>
-<%@ page 
import="org.apache.hadoop.hbase.master.assignment.OpenRegionProcedure" %>
-<%@ page 
import="org.apache.hadoop.hbase.master.assignment.CloseRegionProcedure" %>
-<%@ page import="org.apache.hadoop.hbase.metrics.OperationMetrics" %>
 <%@ page import="java.util.Map" %>
-<%@ page import="java.util.HashMap" %>
-<%@ page 
import="org.apache.hadoop.hbase.master.MetricsAssignmentManagerSource" %>
 <%@ page import="org.apache.hadoop.hbase.master.MetricsAssignmentManager" %>
 <%@ page import="org.apache.hadoop.hbase.procedure2.ProcedureMetrics" %>
-<%@ page import="org.apache.hadoop.hbase.metrics.Snapshot" %>
 <%@ page import="org.apache.hadoop.hbase.metrics.Histogram" %>
 <%@ page import="java.util.TreeMap" %>
 <%@ page import="org.apache.hadoop.hbase.metrics.impl.HistogramImpl" %>
@@ -90,7 +78,7 @@
       <h1>Procedure Time Statistics</h1>
     </div>
   </div>
-  <p>We list proceduces completed successfully of the following types only: 
ServerCrashProcedure, TransitRegionStateProcedure,
+  <p>We list procedures completed successfully of the following types only: 
ServerCrashProcedure, TransitRegionStateProcedure,
     OpenRegionProcedure, CloseRegionProcedure.</p>
   <table class="table table-striped" width="90%" >
     <tr>
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp
index a52085b529f..575994f50c1 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp
@@ -21,7 +21,6 @@
   import="java.util.concurrent.TimeUnit"
   import="java.util.ArrayList"
   import="java.util.List"
-  import="org.apache.hadoop.conf.Configuration"
   import="org.apache.hadoop.hbase.master.HMaster"
   import="org.apache.hadoop.hbase.quotas.MasterQuotaManager"
   import="org.apache.hadoop.hbase.quotas.QuotaRetriever"
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp
index 6ba6f78a273..ec7ec5dda6b 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp
@@ -169,7 +169,7 @@
                      totalRequestsPerSecond += sl.getRequestCountPerSecond();
                      lastContact = (System.currentTimeMillis() - 
sl.getReportTimestamp())/1000;
                    }
-                   long startcode = serverName.getStartcode();
+                   long startcode = serverName.getStartCode();
                    int infoPort = master.getRegionServerInfoPort(serverName);
                    String url = "//" + serverName.getHostname() + ":" + 
infoPort + "/rs-status";%>
                    <tr>
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
index e85cab95d7e..35b2967fc1c 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
@@ -26,7 +26,7 @@
   import="org.apache.hadoop.hbase.master.HMaster"
   import="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
   import="org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils"
-  import="org.apache.hadoop.util.StringUtils"
+  import="org.apache.hadoop.hbase.util.Strings"
   import="org.apache.hadoop.hbase.TableName"
 %>
 <%
@@ -134,14 +134,14 @@
   <div class="row">
     <div class="span12">
     <%= stats.getStoreFilesCount() %> HFiles (<%= 
stats.getArchivedStoreFilesCount() %> in archive),
-    total size <%= StringUtils.humanReadableInt(stats.getStoreFilesSize()) %>
+    total size <%= Strings.humanReadableInt(stats.getStoreFilesSize()) %>
     (<%= stats.getSharedStoreFilePercentage() %>&#37;
-    <%= StringUtils.humanReadableInt(stats.getSharedStoreFilesSize()) %> 
shared with the source
+    <%= Strings.humanReadableInt(stats.getSharedStoreFilesSize()) %> shared 
with the source
     table)
     </div>
     <div class="span12">
     <%= stats.getLogsCount() %> Logs, total size
-    <%= StringUtils.humanReadableInt(stats.getLogsSize()) %>
+    <%= Strings.humanReadableInt(stats.getLogsSize()) %>
     </div>
   </div>
   <% if (stats.isSnapshotCorrupted()) { %>
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
index 6202d7409b5..bc913188282 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
@@ -28,7 +28,7 @@
   import="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
   import="org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils"
   import="org.apache.hadoop.hbase.TableName"
-  import="org.apache.hadoop.util.StringUtils"
+  import="org.apache.hadoop.hbase.util.Strings"
   
import="org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription"
 %>
 <%@ page import="org.apache.hadoop.hbase.util.PrettyPrinter" %>
@@ -99,18 +99,18 @@
       <td>
         <%= SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDesc.getTtl(), 
snapshotDesc.getCreationTime(), System.currentTimeMillis()) ? "Yes" : "No" %>
       </td>
-      <td><%= StringUtils.humanReadableInt(stats.getSharedStoreFilesSize()) 
%></td>
-      <td><%= StringUtils.humanReadableInt(stats.getMobStoreFilesSize())  
%></td>
-      <td><%= StringUtils.humanReadableInt(stats.getArchivedStoreFileSize()) %>
-        (<%= 
StringUtils.humanReadableInt(stats.getNonSharedArchivedStoreFilesSize()) 
%>)</td>
+      <td><%= Strings.humanReadableInt(stats.getSharedStoreFilesSize()) %></td>
+      <td><%= Strings.humanReadableInt(stats.getMobStoreFilesSize())  %></td>
+      <td><%= Strings.humanReadableInt(stats.getArchivedStoreFileSize()) %>
+        (<%= 
Strings.humanReadableInt(stats.getNonSharedArchivedStoreFilesSize()) %>)</td>
     </tr>
     <% } %>
     <p><%= snapshots.size() %> snapshot(s) in set.</p>
-    <p>Total Storefile Size: <%= StringUtils.humanReadableInt(totalSize) %></p>
-    <p>Total Shared Storefile Size: <%= 
StringUtils.humanReadableInt(totalSharedSize.get()) %>,
-       Total Mob Storefile Size: <%= 
StringUtils.humanReadableInt(totalMobSize.get()) %>,
-       Total Archived Storefile Size: <%= 
StringUtils.humanReadableInt(totalArchivedSize.get()) %>
-       (<%= StringUtils.humanReadableInt(totalUnsharedArchivedSize) %>)</p>
+    <p>Total Storefile Size: <%= Strings.humanReadableInt(totalSize) %></p>
+    <p>Total Shared Storefile Size: <%= 
Strings.humanReadableInt(totalSharedSize.get()) %>,
+       Total Mob Storefile Size: <%= 
Strings.humanReadableInt(totalMobSize.get()) %>,
+       Total Archived Storefile Size: <%= 
Strings.humanReadableInt(totalArchivedSize.get()) %>
+       (<%= Strings.humanReadableInt(totalUnsharedArchivedSize) %>)</p>
     <p>Shared Storefile Size is the Storefile size shared between snapshots 
and active tables.
        Mob Storefile Size is the Mob Storefile size shared between snapshots 
and active tables.
        Archived Storefile Size is the Storefile size in Archive.
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
index fdec6849de0..725353f9b86 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
@@ -76,6 +76,8 @@
 <%@ page 
import="org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota"
 %>
 <%@ page import="java.net.URLEncoder" %>
 <%@ page import="java.util.stream.Collectors" %>
+<%@ page import="java.nio.charset.StandardCharsets" %>
+<%@ page import="java.io.UnsupportedEncodingException" %>
 <%!
   /**
    * @return An empty region load stamped with the passed in 
<code>regionInfo</code>
@@ -115,7 +117,7 @@
    * @return an <td> tag contents server name links to server rs-status page.
    */
   private static String buildRegionDeployedServerTag(RegionInfo regionInfo, 
HMaster master,
-    Map<RegionInfo, ServerName> regionsToServer) {
+                                                     Map<RegionInfo, 
ServerName> regionsToServer) throws UnsupportedEncodingException {
     ServerName serverName = regionsToServer.get(regionInfo);
 
     if (serverName == null) {
@@ -123,7 +125,7 @@
     }
 
     String hostName = serverName.getHostname();
-    String hostNameEncoded = URLEncoder.encode(hostName);
+    String hostNameEncoded = URLEncoder.encode(hostName, 
StandardCharsets.UTF_8.toString());
     // This port might be wrong if RS actually ended up using something else.
     int serverInfoPort = master.getRegionServerInfoPort(serverName);
     String urlRegionServer = "//" + hostNameEncoded + ":" + serverInfoPort + 
"/rs-status";
@@ -135,9 +137,9 @@
   /**
    * @return an <p> tag guide user to see all region messages.
    */
-  private static String moreRegionsToRender(int numRegionsRendered, int 
numRegions, String fqtn) {
+  private static String moreRegionsToRender(int numRegionsRendered, int 
numRegions, String fqtn) throws UnsupportedEncodingException {
     if (numRegions > numRegionsRendered) {
-      String allRegionsUrl = "?name=" + URLEncoder.encode(fqtn) + 
"&numRegions=all";
+      String allRegionsUrl = "?name=" + URLEncoder.encode(fqtn, 
StandardCharsets.UTF_8.toString()) + "&numRegions=all";
 
       return "This table has <b>" + numRegions
         + "</b> regions in total, in order to improve the page load time, only 
<b>"
@@ -348,7 +350,7 @@
               if (metaLocation != null) {
                 ServerMetrics sl = 
master.getServerManager().getLoad(metaLocation);
                 // The host name portion should be safe, but I don't know how 
we handle IDNs so err on the side of failing safely.
-                hostAndPort = URLEncoder.encode(metaLocation.getHostname()) + 
":" + master.getRegionServerInfoPort(metaLocation);
+                hostAndPort = URLEncoder.encode(metaLocation.getHostname(), 
StandardCharsets.UTF_8.toString()) + ":" + 
master.getRegionServerInfoPort(metaLocation);
                 if (sl != null) {
                   Map<byte[], RegionMetrics> map = sl.getRegionMetrics();
                   if (map.containsKey(meta.getRegionName())) {
@@ -418,7 +420,7 @@
 
                if (metaLocation != null) {
                  ServerMetrics sl = 
master.getServerManager().getLoad(metaLocation);
-                 hostAndPort = URLEncoder.encode(metaLocation.getHostname()) + 
":" + master.getRegionServerInfoPort(metaLocation);
+                 hostAndPort = URLEncoder.encode(metaLocation.getHostname(), 
StandardCharsets.UTF_8.toString()) + ":" + 
master.getRegionServerInfoPort(metaLocation);
                  if (sl != null) {
                    Map<byte[], RegionMetrics> map = sl.getRegionMetrics();
                    if (map.containsKey(meta.getRegionName())) {
@@ -471,7 +473,7 @@
 
               if (metaLocation != null) {
                 ServerMetrics sl = 
master.getServerManager().getLoad(metaLocation);
-                hostAndPort = URLEncoder.encode(metaLocation.getHostname()) + 
":" + master.getRegionServerInfoPort(metaLocation);
+                hostAndPort = URLEncoder.encode(metaLocation.getHostname(), 
StandardCharsets.UTF_8.toString()) + ":" + 
master.getRegionServerInfoPort(metaLocation);
                 if (sl != null) {
                   Map<byte[], RegionMetrics> map = sl.getRegionMetrics();
                   if (map.containsKey(meta.getRegionName())) {
@@ -1038,11 +1040,9 @@
           numRegionsRendered = 0;
           for (Map.Entry<RegionInfo, RegionMetrics> hriEntry : entryList) {
             RegionInfo regionInfo = hriEntry.getKey();
-            ServerName addr = regionsToServer.get(regionInfo);
             RegionMetrics load = hriEntry.getValue();
             float locality = 0.0f;
             float localityForSsd = 0.0f;
-            String state = "N/A";
             if (load != null) {
               locality = load.getDataLocality();
               localityForSsd = load.getDataLocalityForSsd();
@@ -1126,11 +1126,11 @@
   <%
     for (Map.Entry<ServerName, Integer> rdEntry : regDistribution.entrySet()) {
       ServerName addr = rdEntry.getKey();
-      String url = "//" + URLEncoder.encode(addr.getHostname()) + ":"
+      String url = "//" + URLEncoder.encode(addr.getHostname(), 
StandardCharsets.UTF_8.toString()) + ":"
         + master.getRegionServerInfoPort(addr) + "/rs-status";
   %>
       <tr>
-        <td><a href="<%= url %>"><%= 
StringEscapeUtils.escapeHtml4(addr.getHostname().toString())
+        <td><a href="<%= url %>"><%= 
StringEscapeUtils.escapeHtml4(addr.getHostname())
           + ":" + master.getRegionServerInfoPort(addr) %></a></td>
         <td><%= rdEntry.getValue()%></td>
         <td><%= primaryRegDistribution.get(addr) == null ? 0 : 
primaryRegDistribution.get(addr)%></td>
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp
index 7e0d623e686..b2f3938f56f 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp
@@ -19,10 +19,8 @@
 --%>
 <%@ page contentType="text/html;charset=UTF-8"
          import="static org.apache.commons.lang3.StringEscapeUtils.escapeXml"
-         import="java.io.IOException"
          import="java.util.ArrayList"
          import="java.util.List"
-         import="java.util.Map"
 %>
 <%@ page import="org.apache.hadoop.hbase.client.TableDescriptor" %>
 <%@ page import="org.apache.hadoop.hbase.master.HMaster" %>
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp 
b/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp
index f44b00301ac..1d98dd0225a 100644
--- a/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp
@@ -29,6 +29,7 @@
   import="org.apache.hadoop.hbase.regionserver.HRegion"
   import="org.apache.hadoop.hbase.regionserver.HStore"
 %>
+<%@ page import="java.nio.charset.StandardCharsets" %>
 <%
   String regionName = request.getParameter("name");
   HRegionServer rs = (HRegionServer) 
getServletContext().getAttribute(HRegionServer.REGIONSERVER);
@@ -89,7 +90,7 @@
             count++; %>
          <tr>
            <td><a href="storeFile.jsp?name=<%= sf.getEncodedPath() %>"><%= 
sf.getPath() %></a></td>
-           <td><%= (int) (fs.getLength(sf.getPath()) / 1024 / 1024) %></td>
+           <td><%= (int) (fs.getFileStatus(sf.getPath()).getLen() / 1024 / 
1024) %></td>
            <td><%= new Date(sf.getModificationTimestamp()) %></td>
            <td><%= String.format("%,1d", 
sf.getFileInfo().getHFileInfo().getLenOfBiggestCell()) %></td>
            <td><%= sf.getFileInfo().getHFileInfo().getKeyOfBiggestCell() 
%></td>
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
index 4c7384a34ed..b278adc4773 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
@@ -72,6 +72,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
 import org.apache.hadoop.hbase.wal.NoEOFWALStreamReader;
 import org.apache.hadoop.hbase.wal.WAL;
@@ -80,7 +81,6 @@ import org.apache.hadoop.hbase.wal.WALFactory;
 import org.apache.hadoop.hbase.wal.WALKeyImpl;
 import org.apache.hadoop.hbase.wal.WALSplitUtil.MutationReplay;
 import org.apache.hadoop.hbase.wal.WALStreamReader;
-import org.apache.hadoop.util.StringUtils;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -388,7 +388,7 @@ public class TestHRegionReplayEvents {
           // assert that the store memstore is smaller now
           long newStoreMemstoreSize = store.getMemStoreSize().getHeapSize();
           LOG.info("Memstore size reduced by:"
-            + StringUtils.humanReadableInt(newStoreMemstoreSize - 
storeMemstoreSize));
+            + Strings.humanReadableInt(newStoreMemstoreSize - 
storeMemstoreSize));
           assertTrue(storeMemstoreSize > newStoreMemstoreSize);
 
         } else if (flushDesc.getAction() == FlushAction.COMMIT_FLUSH) {
@@ -489,7 +489,7 @@ public class TestHRegionReplayEvents {
           // assert that the store memstore is smaller now
           long newStoreMemstoreSize = store.getMemStoreSize().getHeapSize();
           LOG.info("Memstore size reduced by:"
-            + StringUtils.humanReadableInt(newStoreMemstoreSize - 
storeMemstoreSize));
+            + Strings.humanReadableInt(newStoreMemstoreSize - 
storeMemstoreSize));
           assertTrue(storeMemstoreSize > newStoreMemstoreSize);
           verifyData(secondaryRegion, 0, lastReplayed + 1, cq, families);
 
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
index 1638331c288..9bc2f6a5773 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
@@ -27,7 +27,7 @@ import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreFileReader;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.hbase.util.Strings;
 
 import org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
 
@@ -71,7 +71,7 @@ class MockStoreFileGenerator {
     // this when selection takes this into account
     when(mockSf.getReader()).thenReturn(reader);
     String toString = 
MoreObjects.toStringHelper("MockStoreFile").add("isReference", false)
-      .add("fileSize", StringUtils.humanReadableInt(sizeInBytes)).add("seqId", 
seqId)
+      .add("fileSize", Strings.humanReadableInt(sizeInBytes)).add("seqId", 
seqId)
       .add("path", stringPath).toString();
     when(mockSf.toString()).thenReturn(toString);
 
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
index 8d5efd2fb95..a42506cfd7c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
@@ -39,7 +39,6 @@ import org.apache.hadoop.hbase.client.ClusterConnection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
-import org.apache.hadoop.util.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -204,7 +203,7 @@ public abstract class MultiThreadedAction {
           double averageKeysPerSecond = (time > 0) ? (numKeys * 1000 / time) : 
0;
 
           LOG.info(threadsLeft + "Keys=" + numKeys + ", cols="
-            + StringUtils.humanReadableInt(numCols.get()) + ", time=" + 
formatTime(time)
+            + Strings.humanReadableInt(numCols.get()) + ", time=" + 
formatTime(time)
             + ((numKeys > 0 && time > 0)
               ? (" Overall: [" + "keys/s= " + numKeys * 1000 / time + ", 
latency="
                 + String.format("%.2f", (double) totalOpTime / (double) 
numKeys) + " ms]")


Reply via email to