This is an automated email from the ASF dual-hosted git repository.

nihaljain pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git


The following commit(s) were added to refs/heads/master by this push:
     new 2daf846c97d HBASE-29026 Replace some deprecated calls (#6585)
2daf846c97d is described below

commit 2daf846c97db3372678c6ed7b00ac97fcab43d9e
Author: Dávid Paksy <[email protected]>
AuthorDate: Mon Jan 27 11:23:39 2025 +0100

    HBASE-29026 Replace some deprecated calls (#6585)
    
    - Replaced the usage of the following deprecated methods:
      - java.net.URLEncoder.encode(String) -> 
java.net.URLEncoder.encode(String, Charset)
      - StringUtils.humanReadableInt(long) -> 
StringUtils.TraditionalBinaryPrefix.long2String(long, "", 1): For this a new 
static util method is introduced: 
org.apache.hadoop.hbase.util.Strings.humanReadableInt
      - org.apache.hadoop.fs.FileSystem.getLength(Path) -> 
getFileStatus(Path).getLen()
      - org.apache.hadoop.hbase.ServerName.getStartcode() -> 
org.apache.hadoop.hbase.ServerName.getStartCode()
    - Also removed unused imports in the touched JSP files.
    
    Signed-off-by: Istvan Toth <[email protected]>
    Signed-off-by: Nihal Jain <[email protected]>
---
 .../java/org/apache/hadoop/hbase/util/Strings.java | 12 ++++++
 .../hadoop/hbase/ScanPerformanceEvaluation.java    | 46 +++++++++++-----------
 .../hadoop/hbase/util/MultiThreadedAction.java     |  3 +-
 .../hadoop/hbase/mapreduce/PutSortReducer.java     |  4 +-
 .../hbase/mapreduce/TableInputFormatBase.java      |  3 +-
 .../hadoop/hbase/mapreduce/TextSortReducer.java    |  4 +-
 .../hadoop/hbase/snapshot/ExportSnapshot.java      | 25 ++++++------
 .../hbase/tmpl/master/MasterStatusTmpl.jamon       |  3 +-
 .../hadoop/hbase/io/hfile/HFileBlockIndex.java     |  6 +--
 .../hadoop/hbase/regionserver/wal/MetricsWAL.java  |  4 +-
 .../apache/hadoop/hbase/snapshot/SnapshotInfo.java |  4 +-
 .../resources/hbase-webapps/master/procedures.jsp  | 14 +------
 .../main/resources/hbase-webapps/master/quotas.jsp |  1 -
 .../resources/hbase-webapps/master/rsgroup.jsp     |  4 +-
 .../resources/hbase-webapps/master/snapshot.jsp    |  8 ++--
 .../hbase-webapps/master/snapshotsStats.jsp        | 20 +++++-----
 .../main/resources/hbase-webapps/master/table.jsp  | 19 +++++----
 .../hbase-webapps/master/tablesDetailed.jsp        |  2 -
 .../hbase-webapps/regionserver/region.jsp          |  6 +--
 .../regionserver/TestHRegionReplayEvents.java      |  6 +--
 .../compactions/MockStoreFileGenerator.java        |  4 +-
 21 files changed, 95 insertions(+), 103 deletions(-)

diff --git 
a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java
index b5d760bf0d7..6759603f3aa 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Strings.java
@@ -135,4 +135,16 @@ public final class Strings {
   public static void applyURIQueriesToConf(URI uri, Configuration conf) {
     parseURIQueries(uri).forEach(conf::set);
   }
+
+  /**
+   * Note: This method was taken from 
org.apache.hadoop.util.StringUtils.humanReadableInt(long).
+   * Reason: that method got deprecated and this method provides an 
easy-to-understand usage of
+   * StringUtils.TraditionalBinaryPrefix.long2String. Given an integer, return 
a string that is in
+   * an approximate, but human readable format.
+   * @param number the number to format
+   * @return a human readable form of the integer
+   */
+  public static String humanReadableInt(long number) {
+    return 
org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.long2String(number, 
"", 1);
+  }
 }
diff --git 
a/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
 
b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
index 983af877b98..889d769dbc4 100644
--- 
a/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
+++ 
b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/ScanPerformanceEvaluation.java
@@ -36,11 +36,11 @@ import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
 import org.apache.hadoop.hbase.mapreduce.TableMapper;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.hbase.util.CommonFSUtils;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.mapreduce.Counters;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.yetus.audience.InterfaceAudience;
 
@@ -125,8 +125,8 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
       .println("total time to open: " + 
fileOpenTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
     System.out.println("total time to read: " + 
streamTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throghput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throghput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
   }
 
   private Scan getScan() {
@@ -189,14 +189,14 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
     System.out.println("Scan metrics:\n" + metrics.getMetricsMap());
 
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throughput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
     System.out.println("total rows  : " + numRows);
     System.out
-      .println("throughput  : " + StringUtils.humanReadableInt((long) 
throughputRows) + " rows/s");
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputRows) + " rows/s");
     System.out.println("total cells : " + numCells);
-    System.out.println(
-      "throughput  : " + StringUtils.humanReadableInt((long) throughputCells) 
+ " cells/s");
+    System.out
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputCells) + " cells/s");
   }
 
   public void testSnapshotScan() throws IOException {
@@ -246,14 +246,14 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
     System.out.println("Scan metrics:\n" + metrics.getMetricsMap());
 
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throughput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
     System.out.println("total rows  : " + numRows);
     System.out
-      .println("throughput  : " + StringUtils.humanReadableInt((long) 
throughputRows) + " rows/s");
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputRows) + " rows/s");
     System.out.println("total cells : " + numCells);
-    System.out.println(
-      "throughput  : " + StringUtils.humanReadableInt((long) throughputCells) 
+ " cells/s");
+    System.out
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputCells) + " cells/s");
 
   }
 
@@ -311,14 +311,14 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
     System.out.println("total time to scan: " + 
scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
 
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throughput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
     System.out.println("total rows  : " + numRows);
     System.out
-      .println("throughput  : " + StringUtils.humanReadableInt((long) 
throughputRows) + " rows/s");
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputRows) + " rows/s");
     System.out.println("total cells : " + numCells);
-    System.out.println(
-      "throughput  : " + StringUtils.humanReadableInt((long) throughputCells) 
+ " cells/s");
+    System.out
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputCells) + " cells/s");
   }
 
   public void testSnapshotScanMapReduce()
@@ -362,14 +362,14 @@ public class ScanPerformanceEvaluation extends 
AbstractHBaseTool {
     System.out.println("total time to scan: " + 
scanTimer.elapsed(TimeUnit.MILLISECONDS) + " ms");
 
     System.out.println(
-      "total bytes: " + totalBytes + " bytes (" + 
StringUtils.humanReadableInt(totalBytes) + ")");
-    System.out.println("throughput  : " + StringUtils.humanReadableInt((long) 
throughput) + "B/s");
+      "total bytes: " + totalBytes + " bytes (" + 
Strings.humanReadableInt(totalBytes) + ")");
+    System.out.println("throughput  : " + Strings.humanReadableInt((long) 
throughput) + "B/s");
     System.out.println("total rows  : " + numRows);
     System.out
-      .println("throughput  : " + StringUtils.humanReadableInt((long) 
throughputRows) + " rows/s");
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputRows) + " rows/s");
     System.out.println("total cells : " + numCells);
-    System.out.println(
-      "throughput  : " + StringUtils.humanReadableInt((long) throughputCells) 
+ " cells/s");
+    System.out
+      .println("throughput  : " + Strings.humanReadableInt((long) 
throughputCells) + " cells/s");
   }
 
   @Override
diff --git 
a/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
 
b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
index 6eb27c5be8f..dbe22dc0549 100644
--- 
a/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
+++ 
b/hbase-diagnostics/src/main/java/org/apache/hadoop/hbase/util/MultiThreadedAction.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.util.test.LoadTestDataGenerator;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -206,7 +205,7 @@ public abstract class MultiThreadedAction {
           double averageKeysPerSecond = (time > 0) ? (numKeys * 1000.0 / time) 
: 0;
 
           LOG.info(threadsLeft + "Keys=" + numKeys + ", cols="
-            + StringUtils.humanReadableInt(numCols.get()) + ", time=" + 
formatTime(time)
+            + Strings.humanReadableInt(numCols.get()) + ", time=" + 
formatTime(time)
             + ((numKeys > 0 && time > 0)
               ? (" Overall: [" + "keys/s= " + (numKeys * 1000.0 / time) + ", 
latency="
                 + String.format("%.2f", (double) totalOpTime / (double) 
numKeys) + " ms]")
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
index 4cf7bcd9ff2..cd24e01f134 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/PutSortReducer.java
@@ -37,8 +37,8 @@ import 
org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.security.visibility.CellVisibility;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 
 /**
@@ -122,7 +122,7 @@ public class PutSortReducer
         }
       }
       context.setStatus("Read " + map.size() + " entries of " + map.getClass() 
+ "("
-        + StringUtils.humanReadableInt(curSize) + ")");
+        + Strings.humanReadableInt(curSize) + ")");
       int index = 0;
       for (KeyValue kv : map) {
         context.write(row, kv);
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
index 7d172375c10..11ea6e58770 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.net.DNS;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -171,7 +170,7 @@ public abstract class TableInputFormatBase extends 
InputFormat<ImmutableBytesWri
       throw new IOException(INITIALIZATION_ERROR, exception);
     }
     TableSplit tSplit = (TableSplit) split;
-    LOG.info("Input split length: " + 
StringUtils.humanReadableInt(tSplit.getLength()) + " bytes.");
+    LOG.info("Input split length: " + 
Strings.humanReadableInt(tSplit.getLength()) + " bytes.");
     final TableRecordReader trr =
       this.tableRecordReader != null ? this.tableRecordReader : new 
TableRecordReader();
     Scan sc = new Scan(this.scan);
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
index b374aa86c01..0c5e220b2b9 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TextSortReducer.java
@@ -35,10 +35,10 @@ import org.apache.hadoop.hbase.TagType;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.security.visibility.InvalidLabelException;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.Counter;
 import org.apache.hadoop.mapreduce.Reducer;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 
 /**
@@ -187,7 +187,7 @@ public class TextSortReducer
         }
       }
       context.setStatus("Read " + kvs.size() + " entries of " + kvs.getClass() 
+ "("
-        + StringUtils.humanReadableInt(curSize) + ")");
+        + Strings.humanReadableInt(curSize) + ")");
       int index = 0;
       for (KeyValue kv : kvs) {
         context.write(rowKey, kv);
diff --git 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index 6f70eefd3b7..42d10ca8d46 100644
--- 
a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ 
b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.HFileArchiveUtil;
 import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Writable;
@@ -236,7 +237,7 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
       // Use the default block size of the outputFs if bigger
       int defaultBlockSize = Math.max((int) 
outputFs.getDefaultBlockSize(outputRoot), BUFFER_SIZE);
       bufferSize = conf.getInt(CONF_BUFFER_SIZE, defaultBlockSize);
-      LOG.info("Using bufferSize=" + StringUtils.humanReadableInt(bufferSize));
+      LOG.info("Using bufferSize=" + Strings.humanReadableInt(bufferSize));
       reportSize = conf.getInt(CONF_REPORT_SIZE, REPORT_SIZE);
 
       for (Counter c : Counter.values()) {
@@ -338,10 +339,9 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
 
         long etime = EnvironmentEdgeManager.currentTime();
         LOG.info("copy completed for input=" + inputPath + " output=" + 
outputPath);
-        LOG
-          .info("size=" + totalBytesWritten + " (" + 
StringUtils.humanReadableInt(totalBytesWritten)
-            + ")" + " time=" + StringUtils.formatTimeDiff(etime, stime) + 
String
-              .format(" %.3fM/sec", (totalBytesWritten / ((etime - stime) / 
1000.0)) / 1048576.0));
+        LOG.info("size=" + totalBytesWritten + " (" + 
Strings.humanReadableInt(totalBytesWritten)
+          + ")" + " time=" + StringUtils.formatTimeDiff(etime, stime) + 
String.format(" %.3fM/sec",
+            (totalBytesWritten / ((etime - stime) / 1000.0)) / 1048576.0));
         context.getCounter(Counter.FILES_COPIED).increment(1);
 
         // Try to Preserve attributes
@@ -433,7 +433,7 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
       final Path outputPath, final FSDataOutputStream out, final long 
inputFileSize)
       throws IOException {
       final String statusMessage =
-        "copied %s/" + StringUtils.humanReadableInt(inputFileSize) + " 
(%.1f%%)";
+        "copied %s/" + Strings.humanReadableInt(inputFileSize) + " (%.1f%%)";
 
       try {
         byte[] buffer = new byte[bufferSize];
@@ -448,8 +448,8 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
 
           if (reportBytes >= reportSize) {
             context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);
-            context.setStatus(
-              String.format(statusMessage, 
StringUtils.humanReadableInt(totalBytesWritten),
+            context
+              .setStatus(String.format(statusMessage, 
Strings.humanReadableInt(totalBytesWritten),
                 (totalBytesWritten / (float) inputFileSize) * 100.0f) + " from 
" + inputPath
                 + " to " + outputPath);
             reportBytes = 0;
@@ -457,10 +457,9 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
         }
 
         context.getCounter(Counter.BYTES_COPIED).increment(reportBytes);
-        context
-          .setStatus(String.format(statusMessage, 
StringUtils.humanReadableInt(totalBytesWritten),
-            (totalBytesWritten / (float) inputFileSize) * 100.0f) + " from " + 
inputPath + " to "
-            + outputPath);
+        context.setStatus(String.format(statusMessage, 
Strings.humanReadableInt(totalBytesWritten),
+          (totalBytesWritten / (float) inputFileSize) * 100.0f) + " from " + 
inputPath + " to "
+          + outputPath);
 
         return totalBytesWritten;
       } finally {
@@ -760,7 +759,7 @@ public class ExportSnapshot extends AbstractHBaseTool 
implements Tool {
 
     if (LOG.isDebugEnabled()) {
       for (int i = 0; i < sizeGroups.length; ++i) {
-        LOG.debug("export split=" + i + " size=" + 
StringUtils.humanReadableInt(sizeGroups[i]));
+        LOG.debug("export split=" + i + " size=" + 
Strings.humanReadableInt(sizeGroups[i]));
       }
     }
 
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
index d6668232594..04387e12d54 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
@@ -63,6 +63,7 @@ org.apache.hadoop.hbase.util.CommonFSUtils;
 org.apache.hadoop.hbase.util.JvmVersion;
 org.apache.hadoop.hbase.util.PrettyPrinter;
 org.apache.hadoop.util.StringUtils;
+org.apache.hadoop.hbase.util.Strings;
 </%import>
 
 <%if format.equals("json") %>
@@ -789,7 +790,7 @@ AssignmentManager assignmentManager = 
master.getAssignmentManager();
         <td><% peerConfig.isSerial() %></td>
         <td><% peerConfig.getRemoteWALDir() == null ? "" : 
peerConfig.getRemoteWALDir() %>
         <td><% peer.getSyncReplicationState() %>
-        <td><% peerConfig.getBandwidth() == 0? "UNLIMITED" : 
StringUtils.humanReadableInt(peerConfig.getBandwidth()) %></td>
+        <td><% peerConfig.getBandwidth() == 0? "UNLIMITED" : 
Strings.humanReadableInt(peerConfig.getBandwidth()) %></td>
         <td><% peerConfig.replicateAllUserTables() %></td>
         <td>
            <% peerConfig.getNamespaces() == null ? "" : 
ReplicationPeerConfigUtil.convertToString(peerConfig.getNamespaces()).replaceAll(";",
 "; ") %>
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
index 6f610af1972..00a14134c77 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java
@@ -46,8 +46,8 @@ import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.ClassSize;
 import org.apache.hadoop.hbase.util.ObjectIntPair;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -1091,8 +1091,8 @@ public class HFileBlockIndex {
         LOG.trace("Wrote a " + numLevels + "-level index with root level at 
pos "
           + rootLevelIndexPos + ", " + rootChunk.getNumEntries() + " 
root-level entries, "
           + totalNumEntries + " total entries, "
-          + StringUtils.humanReadableInt(this.totalBlockOnDiskSize) + " 
on-disk size, "
-          + StringUtils.humanReadableInt(totalBlockUncompressedSize) + " total 
uncompressed size.");
+          + Strings.humanReadableInt(this.totalBlockOnDiskSize) + " on-disk 
size, "
+          + Strings.humanReadableInt(totalBlockUncompressedSize) + " total 
uncompressed size.");
       }
       return rootLevelIndexPos;
     }
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
index 89481161f4a..44241d92895 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWAL.java
@@ -21,9 +21,9 @@ import java.io.IOException;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.hbase.wal.WALEdit;
 import org.apache.hadoop.hbase.wal.WALKey;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -63,7 +63,7 @@ public class MetricsWAL implements WALActionsListener {
     if (time > 1000) {
       source.incrementSlowAppendCount();
       LOG.warn(String.format("%s took %d ms appending an edit to wal; len~=%s",
-        Thread.currentThread().getName(), time, 
StringUtils.humanReadableInt(size)));
+        Thread.currentThread().getName(), time, 
Strings.humanReadableInt(size)));
     }
   }
 
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
index 25e7f3ebd33..7a7d4424176 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotInfo.java
@@ -42,7 +42,7 @@ import org.apache.hadoop.hbase.io.HFileLink;
 import org.apache.hadoop.hbase.io.WALLink;
 import org.apache.hadoop.hbase.util.AbstractHBaseTool;
 import org.apache.hadoop.hbase.util.CommonFSUtils;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -521,7 +521,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
   }
 
   private String fileSizeToString(long size) {
-    return printSizeInBytes ? Long.toString(size) : 
StringUtils.humanReadableInt(size);
+    return printSizeInBytes ? Long.toString(size) : 
Strings.humanReadableInt(size);
   }
 
   @Override
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
index c25c5c3886b..accc9043e80 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/procedures.jsp
@@ -21,29 +21,17 @@
   import="static org.apache.commons.lang3.StringEscapeUtils.escapeXml"
   import="java.util.Collections"
   import="java.util.Comparator"
-  import="java.util.ArrayList"
   import="java.util.Date"
   import="java.util.List"
-  import="java.util.Set"
   import="org.apache.hadoop.hbase.master.HMaster"
   import="org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv"
   import="org.apache.hadoop.hbase.procedure2.LockedResource"
   import="org.apache.hadoop.hbase.procedure2.Procedure"
   import="org.apache.hadoop.hbase.procedure2.ProcedureExecutor"
-  import="org.apache.hadoop.hbase.procedure2.util.StringUtils"
-  import="org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix"
 %>
-<%@ page 
import="org.apache.hadoop.hbase.master.procedure.ServerCrashProcedure" %>
-<%@ page 
import="org.apache.hadoop.hbase.master.assignment.TransitRegionStateProcedure" 
%>
-<%@ page 
import="org.apache.hadoop.hbase.master.assignment.OpenRegionProcedure" %>
-<%@ page 
import="org.apache.hadoop.hbase.master.assignment.CloseRegionProcedure" %>
-<%@ page import="org.apache.hadoop.hbase.metrics.OperationMetrics" %>
 <%@ page import="java.util.Map" %>
-<%@ page import="java.util.HashMap" %>
-<%@ page 
import="org.apache.hadoop.hbase.master.MetricsAssignmentManagerSource" %>
 <%@ page import="org.apache.hadoop.hbase.master.MetricsAssignmentManager" %>
 <%@ page import="org.apache.hadoop.hbase.procedure2.ProcedureMetrics" %>
-<%@ page import="org.apache.hadoop.hbase.metrics.Snapshot" %>
 <%@ page import="org.apache.hadoop.hbase.metrics.Histogram" %>
 <%@ page import="java.util.TreeMap" %>
 <%@ page import="org.apache.hadoop.hbase.metrics.impl.HistogramImpl" %>
@@ -90,7 +78,7 @@
       <h1>Procedure Time Statistics</h1>
     </div>
   </div>
-  <p>We list proceduces completed successfully of the following types only: 
ServerCrashProcedure, TransitRegionStateProcedure,
+  <p>We list procedures completed successfully of the following types only: 
ServerCrashProcedure, TransitRegionStateProcedure,
     OpenRegionProcedure, CloseRegionProcedure.</p>
   <table class="table table-striped" width="90%" >
     <tr>
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp
index 52a92552432..1c5bfb6647b 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/quotas.jsp
@@ -21,7 +21,6 @@
   import="java.util.concurrent.TimeUnit"
   import="java.util.ArrayList"
   import="java.util.List"
-  import="org.apache.hadoop.conf.Configuration"
   import="org.apache.hadoop.hbase.master.HMaster"
   import="org.apache.hadoop.hbase.quotas.MasterQuotaManager"
   import="org.apache.hadoop.hbase.quotas.QuotaRetriever"
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp
index 02fc1bfc39e..c86c9902ea1 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/rsgroup.jsp
@@ -24,8 +24,6 @@
   import="java.util.List"
   import="java.util.Map"
   import="java.util.function.Function"
-  import="java.util.regex.Pattern"
-  import="java.util.stream.Stream"
   import="java.util.stream.Collectors"
   import="org.apache.hadoop.hbase.ServerName"
   import="org.apache.hadoop.hbase.TableName"
@@ -169,7 +167,7 @@
                      totalRequestsPerSecond += sl.getRequestCountPerSecond();
                      lastContact = (System.currentTimeMillis() - 
sl.getReportTimestamp())/1000;
                    }
-                   long startcode = serverName.getStartcode();
+                   long startcode = serverName.getStartCode();
                    int infoPort = master.getRegionServerInfoPort(serverName);
                    String url = "//" + serverName.getHostname() + ":" + 
infoPort + "/rs-status";%>
                    <tr>
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
index e85cab95d7e..35b2967fc1c 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshot.jsp
@@ -26,7 +26,7 @@
   import="org.apache.hadoop.hbase.master.HMaster"
   import="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
   import="org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils"
-  import="org.apache.hadoop.util.StringUtils"
+  import="org.apache.hadoop.hbase.util.Strings"
   import="org.apache.hadoop.hbase.TableName"
 %>
 <%
@@ -134,14 +134,14 @@
   <div class="row">
     <div class="span12">
     <%= stats.getStoreFilesCount() %> HFiles (<%= 
stats.getArchivedStoreFilesCount() %> in archive),
-    total size <%= StringUtils.humanReadableInt(stats.getStoreFilesSize()) %>
+    total size <%= Strings.humanReadableInt(stats.getStoreFilesSize()) %>
     (<%= stats.getSharedStoreFilePercentage() %>&#37;
-    <%= StringUtils.humanReadableInt(stats.getSharedStoreFilesSize()) %> 
shared with the source
+    <%= Strings.humanReadableInt(stats.getSharedStoreFilesSize()) %> shared 
with the source
     table)
     </div>
     <div class="span12">
     <%= stats.getLogsCount() %> Logs, total size
-    <%= StringUtils.humanReadableInt(stats.getLogsSize()) %>
+    <%= Strings.humanReadableInt(stats.getLogsSize()) %>
     </div>
   </div>
   <% if (stats.isSnapshotCorrupted()) { %>
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
index 6202d7409b5..bc913188282 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/snapshotsStats.jsp
@@ -28,7 +28,7 @@
   import="org.apache.hadoop.hbase.snapshot.SnapshotInfo"
   import="org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils"
   import="org.apache.hadoop.hbase.TableName"
-  import="org.apache.hadoop.util.StringUtils"
+  import="org.apache.hadoop.hbase.util.Strings"
   
import="org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription"
 %>
 <%@ page import="org.apache.hadoop.hbase.util.PrettyPrinter" %>
@@ -99,18 +99,18 @@
       <td>
         <%= SnapshotDescriptionUtils.isExpiredSnapshot(snapshotDesc.getTtl(), 
snapshotDesc.getCreationTime(), System.currentTimeMillis()) ? "Yes" : "No" %>
       </td>
-      <td><%= StringUtils.humanReadableInt(stats.getSharedStoreFilesSize()) 
%></td>
-      <td><%= StringUtils.humanReadableInt(stats.getMobStoreFilesSize())  
%></td>
-      <td><%= StringUtils.humanReadableInt(stats.getArchivedStoreFileSize()) %>
-        (<%= 
StringUtils.humanReadableInt(stats.getNonSharedArchivedStoreFilesSize()) 
%>)</td>
+      <td><%= Strings.humanReadableInt(stats.getSharedStoreFilesSize()) %></td>
+      <td><%= Strings.humanReadableInt(stats.getMobStoreFilesSize())  %></td>
+      <td><%= Strings.humanReadableInt(stats.getArchivedStoreFileSize()) %>
+        (<%= 
Strings.humanReadableInt(stats.getNonSharedArchivedStoreFilesSize()) %>)</td>
     </tr>
     <% } %>
     <p><%= snapshots.size() %> snapshot(s) in set.</p>
-    <p>Total Storefile Size: <%= StringUtils.humanReadableInt(totalSize) %></p>
-    <p>Total Shared Storefile Size: <%= 
StringUtils.humanReadableInt(totalSharedSize.get()) %>,
-       Total Mob Storefile Size: <%= 
StringUtils.humanReadableInt(totalMobSize.get()) %>,
-       Total Archived Storefile Size: <%= 
StringUtils.humanReadableInt(totalArchivedSize.get()) %>
-       (<%= StringUtils.humanReadableInt(totalUnsharedArchivedSize) %>)</p>
+    <p>Total Storefile Size: <%= Strings.humanReadableInt(totalSize) %></p>
+    <p>Total Shared Storefile Size: <%= 
Strings.humanReadableInt(totalSharedSize.get()) %>,
+       Total Mob Storefile Size: <%= 
Strings.humanReadableInt(totalMobSize.get()) %>,
+       Total Archived Storefile Size: <%= 
Strings.humanReadableInt(totalArchivedSize.get()) %>
+       (<%= Strings.humanReadableInt(totalUnsharedArchivedSize) %>)</p>
     <p>Shared Storefile Size is the Storefile size shared between snapshots 
and active tables.
        Mob Storefile Size is the Mob Storefile size shared between snapshots 
and active tables.
        Archived Storefile Size is the Storefile size in Archive.
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
index c4c0e620450..9d1bf203961 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
@@ -71,6 +71,7 @@
 <%@ page 
import="org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas" %>
 <%@ page 
import="org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota"
 %>
 <%@ page import="java.util.stream.Collectors" %>
+<%@ page import="java.nio.charset.StandardCharsets" %>
 <%!
   /**
    * @return An empty region load stamped with the passed in 
<code>regionInfo</code>
@@ -110,7 +111,7 @@
    * @return an <td> tag contents server name links to server rs-status page.
    */
   private static String buildRegionDeployedServerTag(RegionInfo regionInfo, 
HMaster master,
-    Map<RegionInfo, ServerName> regionsToServer) {
+                                                     Map<RegionInfo, 
ServerName> regionsToServer) {
     ServerName serverName = regionsToServer.get(regionInfo);
 
     if (serverName == null) {
@@ -118,7 +119,7 @@
     }
 
     String hostName = serverName.getHostname();
-    String hostNameEncoded = URLEncoder.encode(hostName);
+    String hostNameEncoded = URLEncoder.encode(hostName, 
StandardCharsets.UTF_8);
     // This port might be wrong if RS actually ended up using something else.
     int serverInfoPort = master.getRegionServerInfoPort(serverName);
     String urlRegionServer = "//" + hostNameEncoded + ":" + serverInfoPort + 
"/rs-status";
@@ -132,7 +133,7 @@
    */
   private static String moreRegionsToRender(int numRegionsRendered, int 
numRegions, String fqtn) {
     if (numRegions > numRegionsRendered) {
-      String allRegionsUrl = "?name=" + URLEncoder.encode(fqtn) + 
"&numRegions=all";
+      String allRegionsUrl = "?name=" + URLEncoder.encode(fqtn, 
StandardCharsets.UTF_8) + "&numRegions=all";
 
       return "This table has <b>" + numRegions
         + "</b> regions in total, in order to improve the page load time, only 
<b>"
@@ -345,7 +346,7 @@
                   if (metaLocation != null) {
                     ServerMetrics sl = 
master.getServerManager().getLoad(metaLocation);
                     // The host name portion should be safe, but I don't know 
how we handle IDNs so err on the side of failing safely.
-                    hostAndPort = 
URLEncoder.encode(metaLocation.getHostname()) + ":" + 
master.getRegionServerInfoPort(metaLocation);
+                    hostAndPort = 
URLEncoder.encode(metaLocation.getHostname(), StandardCharsets.UTF_8) + ":" + 
master.getRegionServerInfoPort(metaLocation);
                     if (sl != null) {
                       Map<byte[], RegionMetrics> map = sl.getRegionMetrics();
                       if (map.containsKey(meta.getRegionName())) {
@@ -415,7 +416,7 @@
 
                    if (metaLocation != null) {
                      ServerMetrics sl = 
master.getServerManager().getLoad(metaLocation);
-                     hostAndPort = 
URLEncoder.encode(metaLocation.getHostname()) + ":" + 
master.getRegionServerInfoPort(metaLocation);
+                     hostAndPort = 
URLEncoder.encode(metaLocation.getHostname(), StandardCharsets.UTF_8) + ":" + 
master.getRegionServerInfoPort(metaLocation);
                      if (sl != null) {
                        Map<byte[], RegionMetrics> map = sl.getRegionMetrics();
                        if (map.containsKey(meta.getRegionName())) {
@@ -468,7 +469,7 @@
 
                   if (metaLocation != null) {
                     ServerMetrics sl = 
master.getServerManager().getLoad(metaLocation);
-                    hostAndPort = 
URLEncoder.encode(metaLocation.getHostname()) + ":" + 
master.getRegionServerInfoPort(metaLocation);
+                    hostAndPort = 
URLEncoder.encode(metaLocation.getHostname(), StandardCharsets.UTF_8) + ":" + 
master.getRegionServerInfoPort(metaLocation);
                     if (sl != null) {
                       Map<byte[], RegionMetrics> map = sl.getRegionMetrics();
                       if (map.containsKey(meta.getRegionName())) {
@@ -1054,11 +1055,9 @@
                 numRegionsRendered = 0;
                 for (Map.Entry<RegionInfo, RegionMetrics> hriEntry : 
entryList) {
                   RegionInfo regionInfo = hriEntry.getKey();
-                  ServerName addr = regionsToServer.get(regionInfo);
                   RegionMetrics load = hriEntry.getValue();
                   float locality = 0.0f;
                   float localityForSsd = 0.0f;
-                  String state = "N/A";
                   if (load != null) {
                     locality = load.getDataLocality();
                     localityForSsd = load.getDataLocalityForSsd();
@@ -1146,11 +1145,11 @@
   <%
     for (Map.Entry<ServerName, Integer> rdEntry : regDistribution.entrySet()) {
       ServerName addr = rdEntry.getKey();
-      String url = "//" + URLEncoder.encode(addr.getHostname()) + ":"
+      String url = "//" + URLEncoder.encode(addr.getHostname(), 
StandardCharsets.UTF_8) + ":"
         + master.getRegionServerInfoPort(addr) + "/rs-status";
   %>
       <tr>
-        <td><a href="<%= url %>"><%= 
StringEscapeUtils.escapeHtml4(addr.getHostname().toString())
+        <td><a href="<%= url %>"><%= 
StringEscapeUtils.escapeHtml4(addr.getHostname())
           + ":" + master.getRegionServerInfoPort(addr) %></a></td>
         <td><%= rdEntry.getValue()%></td>
         <td><%= primaryRegDistribution.get(addr) == null ? 0 : 
primaryRegDistribution.get(addr)%></td>
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp
index 7e0d623e686..b2f3938f56f 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/tablesDetailed.jsp
@@ -19,10 +19,8 @@
 --%>
 <%@ page contentType="text/html;charset=UTF-8"
          import="static org.apache.commons.lang3.StringEscapeUtils.escapeXml"
-         import="java.io.IOException"
          import="java.util.ArrayList"
          import="java.util.List"
-         import="java.util.Map"
 %>
 <%@ page import="org.apache.hadoop.hbase.client.TableDescriptor" %>
 <%@ page import="org.apache.hadoop.hbase.master.HMaster" %>
diff --git 
a/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp 
b/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp
index c6084f74e90..a251d335b79 100644
--- a/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/regionserver/region.jsp
@@ -25,7 +25,6 @@
   import="org.apache.hadoop.fs.FileSystem"
   import="org.apache.hadoop.fs.FileStatus"
   import="org.apache.hadoop.fs.Path"
-  import="org.apache.hadoop.hbase.HConstants"
   import="org.apache.hadoop.hbase.client.RegionInfo"
   import="org.apache.hadoop.hbase.client.RegionInfoDisplay"
   import="org.apache.hadoop.hbase.mob.MobUtils"
@@ -35,6 +34,7 @@
   import="org.apache.hadoop.hbase.regionserver.HRegion"
   import="org.apache.hadoop.hbase.regionserver.HStore"
 %>
+<%@ page import="java.nio.charset.StandardCharsets" %>
 <%
   String regionName = request.getParameter("name");
   HRegionServer rs = (HRegionServer) 
getServletContext().getAttribute(HRegionServer.REGIONSERVER);
@@ -95,7 +95,7 @@
             count ++; %>
          <tr>
            <td><a href="storeFile.jsp?name=<%= sf.getEncodedPath() %>"><%= 
sf.getPath() %></a></td>
-           <td><%= (int) (fs.getLength(sf.getPath()) / 1024 / 1024) %></td>
+           <td><%= (int) (fs.getFileStatus(sf.getPath()).getLen() / 1024 / 
1024) %></td>
            <td><%= new Date(sf.getModificationTimestamp()) %></td>
            <td><%= String.format("%,1d", 
sf.getFileInfo().getHFileInfo().getLenOfBiggestCell()) %></td>
            <td><%= sf.getFileInfo().getHFileInfo().getKeyOfBiggestCell() 
%></td>
@@ -130,7 +130,7 @@
                mobCnt ++;
                FileStatus status = rs.getFileSystem().getFileStatus(mobPath);
                String mobPathStr = mobPath.toString();
-               String encodedStr = URLEncoder.encode(mobPathStr, 
HConstants.UTF8_ENCODING); %>
+               String encodedStr = URLEncoder.encode(mobPathStr, 
StandardCharsets.UTF_8); %>
 
                <tr>
                  <td><a href="storeFile.jsp?name=<%= encodedStr%>"><%= 
mobPathStr%></a></td>
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
index dd09a82ad80..3647a4e47ad 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionReplayEvents.java
@@ -73,6 +73,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.hbase.util.Strings;
 import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
 import org.apache.hadoop.hbase.wal.NoEOFWALStreamReader;
 import org.apache.hadoop.hbase.wal.WAL;
@@ -81,7 +82,6 @@ import org.apache.hadoop.hbase.wal.WALFactory;
 import org.apache.hadoop.hbase.wal.WALKeyImpl;
 import org.apache.hadoop.hbase.wal.WALSplitUtil.MutationReplay;
 import org.apache.hadoop.hbase.wal.WALStreamReader;
-import org.apache.hadoop.util.StringUtils;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -390,7 +390,7 @@ public class TestHRegionReplayEvents {
           // assert that the store memstore is smaller now
           long newStoreMemstoreSize = store.getMemStoreSize().getHeapSize();
           LOG.info("Memstore size reduced by:"
-            + StringUtils.humanReadableInt(newStoreMemstoreSize - 
storeMemstoreSize));
+            + Strings.humanReadableInt(newStoreMemstoreSize - 
storeMemstoreSize));
           assertTrue(storeMemstoreSize > newStoreMemstoreSize);
 
         } else if (flushDesc.getAction() == FlushAction.COMMIT_FLUSH) {
@@ -490,7 +490,7 @@ public class TestHRegionReplayEvents {
           // assert that the store memstore is smaller now
           long newStoreMemstoreSize = store.getMemStoreSize().getHeapSize();
           LOG.info("Memstore size reduced by:"
-            + StringUtils.humanReadableInt(newStoreMemstoreSize - 
storeMemstoreSize));
+            + Strings.humanReadableInt(newStoreMemstoreSize - 
storeMemstoreSize));
           assertTrue(storeMemstoreSize > newStoreMemstoreSize);
           verifyData(secondaryRegion, 0, lastReplayed + 1, cq, families);
 
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
index 1638331c288..9bc2f6a5773 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/MockStoreFileGenerator.java
@@ -27,7 +27,7 @@ import org.apache.commons.lang3.RandomStringUtils;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.hbase.regionserver.StoreFileReader;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.hbase.util.Strings;
 
 import org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
 
@@ -71,7 +71,7 @@ class MockStoreFileGenerator {
     // this when selection takes this into account
     when(mockSf.getReader()).thenReturn(reader);
     String toString = 
MoreObjects.toStringHelper("MockStoreFile").add("isReference", false)
-      .add("fileSize", StringUtils.humanReadableInt(sizeInBytes)).add("seqId", 
seqId)
+      .add("fileSize", Strings.humanReadableInt(sizeInBytes)).add("seqId", 
seqId)
       .add("path", stringPath).toString();
     when(mockSf.toString()).thenReturn(toString);
 


Reply via email to