HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa)

Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/946456c6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/946456c6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/946456c6

Branch: refs/heads/HDFS-7285
Commit: 946456c6d88780abe0251b098dd771e9e1e93ab3
Parents: 18fb421
Author: Tsuyoshi Ozawa <oz...@apache.org>
Authored: Thu Feb 19 12:46:46 2015 +0900
Committer: Tsuyoshi Ozawa <oz...@apache.org>
Committed: Thu Feb 19 13:06:53 2015 +0900

----------------------------------------------------------------------
 .../classification/tools/StabilityOptions.java  |  5 +--
 .../AltKerberosAuthenticationHandler.java       |  6 ++--
 .../authentication/util/TestKerberosUtil.java   | 14 ++++----
 hadoop-common-project/hadoop-common/CHANGES.txt |  2 ++
 .../org/apache/hadoop/conf/Configuration.java   |  3 +-
 .../org/apache/hadoop/crypto/CipherSuite.java   |  3 +-
 .../hadoop/crypto/key/JavaKeyStoreProvider.java |  3 +-
 .../java/org/apache/hadoop/fs/FileSystem.java   |  7 ++--
 .../apache/hadoop/fs/permission/AclEntry.java   |  6 ++--
 .../org/apache/hadoop/fs/shell/find/Name.java   |  5 +--
 .../io/compress/CompressionCodecFactory.java    |  6 ++--
 .../hadoop/metrics2/impl/MetricsConfig.java     |  7 ++--
 .../hadoop/metrics2/impl/MetricsSystemImpl.java |  2 +-
 .../apache/hadoop/security/SecurityUtil.java    |  5 +--
 .../hadoop/security/WhitelistBasedResolver.java |  4 ++-
 .../security/ssl/FileBasedKeyStoresFactory.java |  4 ++-
 .../apache/hadoop/security/ssl/SSLFactory.java  |  3 +-
 .../security/ssl/SSLHostnameVerifier.java       | 10 +++---
 .../DelegationTokenAuthenticationHandler.java   |  3 +-
 .../web/DelegationTokenAuthenticator.java       |  5 +--
 .../org/apache/hadoop/util/StringUtils.java     |  2 +-
 .../java/org/apache/hadoop/ipc/TestIPC.java     |  3 +-
 .../java/org/apache/hadoop/ipc/TestSaslRPC.java |  3 +-
 .../hadoop/security/TestSecurityUtil.java       |  6 ++--
 .../security/TestUserGroupInformation.java      |  5 +--
 .../hadoop/test/TimedOutTestsListener.java      |  4 ++-
 .../org/apache/hadoop/util/TestWinUtils.java    |  7 ++--
 .../java/org/apache/hadoop/nfs/NfsExports.java  |  5 +--
 .../server/CheckUploadContentTypeFilter.java    |  4 ++-
 .../hadoop/fs/http/server/FSOperations.java     |  4 ++-
 .../http/server/HttpFSParametersProvider.java   |  4 ++-
 .../org/apache/hadoop/lib/server/Server.java    |  3 +-
 .../service/hadoop/FileSystemAccessService.java |  5 +--
 .../org/apache/hadoop/lib/wsrs/EnumParam.java   |  3 +-
 .../apache/hadoop/lib/wsrs/EnumSetParam.java    |  3 +-
 .../hadoop/lib/wsrs/ParametersProvider.java     |  3 +-
 .../org/apache/hadoop/hdfs/StorageType.java     |  3 +-
 .../org/apache/hadoop/hdfs/XAttrHelper.java     | 21 ++++++++----
 .../hadoop/hdfs/protocol/HdfsConstants.java     |  3 +-
 .../BlockStoragePolicySuite.java                |  4 ++-
 .../hdfs/server/common/HdfsServerConstants.java |  5 +--
 .../hdfs/server/datanode/StorageLocation.java   |  4 ++-
 .../hdfs/server/namenode/FSEditLogOp.java       |  3 +-
 .../namenode/QuotaByStorageTypeEntry.java       |  3 +-
 .../hdfs/server/namenode/SecondaryNameNode.java |  2 +-
 .../org/apache/hadoop/hdfs/tools/GetConf.java   | 17 +++++-----
 .../OfflineEditsVisitorFactory.java             |  7 ++--
 .../offlineImageViewer/FSImageHandler.java      |  3 +-
 .../org/apache/hadoop/hdfs/web/AuthFilter.java  |  3 +-
 .../org/apache/hadoop/hdfs/web/ParamFilter.java |  3 +-
 .../hadoop/hdfs/web/WebHdfsFileSystem.java      |  5 +--
 .../hadoop/hdfs/web/resources/EnumParam.java    |  3 +-
 .../hadoop/hdfs/web/resources/EnumSetParam.java |  4 ++-
 .../namenode/snapshot/TestSnapshotManager.java  |  5 +--
 .../jobhistory/JobHistoryEventHandler.java      |  4 ++-
 .../mapreduce/v2/app/webapp/AppController.java  |  2 +-
 .../apache/hadoop/mapreduce/TypeConverter.java  |  3 +-
 .../apache/hadoop/mapreduce/v2/util/MRApps.java |  5 +--
 .../hadoop/mapreduce/TestTypeConverter.java     |  4 ++-
 .../java/org/apache/hadoop/mapred/Task.java     |  3 +-
 .../counters/FileSystemCounterGroup.java        |  2 +-
 .../mapreduce/filecache/DistributedCache.java   |  4 +--
 .../hadoop/mapreduce/lib/db/DBInputFormat.java  |  4 ++-
 .../org/apache/hadoop/mapreduce/tools/CLI.java  |  8 +++--
 .../java/org/apache/hadoop/fs/TestDFSIO.java    | 20 ++++++------
 .../org/apache/hadoop/fs/TestFileSystem.java    |  6 +++-
 .../org/apache/hadoop/fs/slive/Constants.java   |  6 ++--
 .../apache/hadoop/fs/slive/OperationData.java   |  4 ++-
 .../apache/hadoop/fs/slive/OperationOutput.java |  4 ++-
 .../org/apache/hadoop/fs/slive/SliveTest.java   |  3 +-
 .../java/org/apache/hadoop/io/FileBench.java    | 17 ++++++----
 .../org/apache/hadoop/mapred/TestMapRed.java    |  3 +-
 .../apache/hadoop/examples/DBCountPageView.java |  3 +-
 .../plugin/versioninfo/VersionInfoMojo.java     |  4 ++-
 .../fs/azure/AzureNativeFileSystemStore.java    |  4 +--
 .../apache/hadoop/tools/util/DistCpUtils.java   | 11 ++++---
 .../java/org/apache/hadoop/tools/DistCpV1.java  |  5 ++-
 .../gridmix/GridmixJobSubmissionPolicy.java     |  3 +-
 .../hadoop/tools/rumen/HadoopLogsAnalyzer.java  | 34 +++++++++++---------
 .../apache/hadoop/tools/rumen/JobBuilder.java   |  3 +-
 .../apache/hadoop/tools/rumen/LoggedTask.java   |  3 +-
 .../hadoop/tools/rumen/LoggedTaskAttempt.java   |  3 +-
 .../apache/hadoop/streaming/Environment.java    |  2 +-
 .../hadoop/yarn/client/cli/ApplicationCLI.java  |  5 +--
 .../apache/hadoop/yarn/client/cli/NodeCLI.java  |  4 ++-
 .../impl/pb/GetApplicationsRequestPBImpl.java   |  6 ++--
 .../pb/ApplicationSubmissionContextPBImpl.java  |  3 +-
 .../hadoop/yarn/webapp/hamlet/HamletGen.java    |  5 +--
 .../webapp/AHSWebServices.java                  |  4 ++-
 .../timeline/webapp/TimelineWebServices.java    |  3 +-
 .../hadoop/yarn/server/webapp/WebServices.java  | 10 +++---
 .../server/resourcemanager/ClientRMService.java |  3 +-
 .../resource/ResourceWeights.java               |  3 +-
 .../CapacitySchedulerConfiguration.java         |  6 ++--
 .../fair/FairSchedulerConfiguration.java        |  3 +-
 .../scheduler/fair/SchedulingPolicy.java        |  3 +-
 .../resourcemanager/webapp/NodesPage.java       |  3 +-
 .../resourcemanager/webapp/RMWebServices.java   | 19 +++++++----
 98 files changed, 331 insertions(+), 191 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
 
b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
index dbce31e..657dbce 100644
--- 
a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
+++ 
b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java
@@ -21,6 +21,7 @@ import com.sun.javadoc.DocErrorReporter;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 class StabilityOptions {
   public static final String STABLE_OPTION = "-stable";
@@ -28,7 +29,7 @@ class StabilityOptions {
   public static final String UNSTABLE_OPTION = "-unstable";
 
   public static Integer optionLength(String option) {
-    String opt = option.toLowerCase();
+    String opt = option.toLowerCase(Locale.ENGLISH);
     if (opt.equals(UNSTABLE_OPTION)) return 1;
     if (opt.equals(EVOLVING_OPTION)) return 1;
     if (opt.equals(STABLE_OPTION)) return 1;
@@ -38,7 +39,7 @@ class StabilityOptions {
   public static void validOptions(String[][] options,
       DocErrorReporter reporter) {
     for (int i = 0; i < options.length; i++) {
-      String opt = options[i][0].toLowerCase();
+      String opt = options[i][0].toLowerCase(Locale.ENGLISH);
       if (opt.equals(UNSTABLE_OPTION)) {
        RootDocProcessor.stability = UNSTABLE_OPTION;
       } else if (opt.equals(EVOLVING_OPTION)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java
index 987330f..dae3b50 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java
@@ -14,6 +14,7 @@
 package org.apache.hadoop.security.authentication.server;
 
 import java.io.IOException;
+import java.util.Locale;
 import java.util.Properties;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
@@ -68,7 +69,8 @@ public abstract class AltKerberosAuthenticationHandler
             NON_BROWSER_USER_AGENTS, NON_BROWSER_USER_AGENTS_DEFAULT)
             .split("\\W*,\\W*");
     for (int i = 0; i < nonBrowserUserAgents.length; i++) {
-        nonBrowserUserAgents[i] = nonBrowserUserAgents[i].toLowerCase();
+        nonBrowserUserAgents[i] =
+            nonBrowserUserAgents[i].toLowerCase(Locale.ENGLISH);
     }
   }
 
@@ -120,7 +122,7 @@ public abstract class AltKerberosAuthenticationHandler
     if (userAgent == null) {
       return false;
     }
-    userAgent = userAgent.toLowerCase();
+    userAgent = userAgent.toLowerCase(Locale.ENGLISH);
     boolean isBrowser = true;
     for (String nonBrowserUserAgent : nonBrowserUserAgents) {
         if (userAgent.contains(nonBrowserUserAgent)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
 
b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
index b0e8f04..89e07d1 100644
--- 
a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
+++ 
b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java
@@ -21,6 +21,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.Locale;
 import java.util.regex.Pattern;
 
 import org.apache.directory.server.kerberos.shared.keytab.Keytab;
@@ -58,24 +59,25 @@ public class TestKerberosUtil {
 
     // send null hostname
     Assert.assertEquals("When no hostname is sent",
-        service + "/" + localHostname.toLowerCase(),
+        service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
         KerberosUtil.getServicePrincipal(service, null));
     // send empty hostname
     Assert.assertEquals("When empty hostname is sent",
-        service + "/" + localHostname.toLowerCase(),
+        service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
         KerberosUtil.getServicePrincipal(service, ""));
     // send 0.0.0.0 hostname
     Assert.assertEquals("When 0.0.0.0 hostname is sent",
-        service + "/" + localHostname.toLowerCase(),
+        service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
         KerberosUtil.getServicePrincipal(service, "0.0.0.0"));
     // send uppercase hostname
     Assert.assertEquals("When uppercase hostname is sent",
-        service + "/" + testHost.toLowerCase(),
+        service + "/" + testHost.toLowerCase(Locale.ENGLISH),
         KerberosUtil.getServicePrincipal(service, testHost));
     // send lowercase hostname
     Assert.assertEquals("When lowercase hostname is sent",
-        service + "/" + testHost.toLowerCase(),
-        KerberosUtil.getServicePrincipal(service, testHost.toLowerCase()));
+        service + "/" + testHost.toLowerCase(Locale.ENGLISH),
+        KerberosUtil.getServicePrincipal(
+            service, testHost.toLowerCase(Locale.ENGLISH)));
   }
   
   @Test

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 7a065d5..e6d560a 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -405,6 +405,8 @@ Trunk (Unreleased)
 
     HADOOP-11585. Fix formatting in Tracing.md (Masatake Iwasaki via aw)
 
+    HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index 02654b7..5909e62 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -46,6 +46,7 @@ import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.ListIterator;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
@@ -1451,7 +1452,7 @@ public class Configuration implements 
Iterable<Map.Entry<String,String>>,
       return defaultValue;
     }
 
-    valueString = valueString.toLowerCase();
+    valueString = valueString.toLowerCase(Locale.ENGLISH);
 
     if ("true".equals(valueString))
       return true;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java
index c9355d7..c5601eb 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.crypto;
 
+import java.util.Locale;
 import org.apache.hadoop.classification.InterfaceAudience;
 
 /**
@@ -97,7 +98,7 @@ public enum CipherSuite {
     String[] parts = name.split("/");
     StringBuilder suffix = new StringBuilder();
     for (String part : parts) {
-      suffix.append(".").append(part.toLowerCase());
+      suffix.append(".").append(part.toLowerCase(Locale.ENGLISH));
     }
     
     return suffix.toString();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
index bfec1ef..9e09b6e 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java
@@ -53,6 +53,7 @@ import java.util.Date;
 import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReadWriteLock;
@@ -422,7 +423,7 @@ public class JavaKeyStoreProvider extends KeyProvider {
   @Override
   public KeyVersion createKey(String name, byte[] material,
                                Options options) throws IOException {
-    Preconditions.checkArgument(name.equals(name.toLowerCase()),
+    Preconditions.checkArgument(name.equals(name.toLowerCase(Locale.ENGLISH)),
         "Uppercase key names are unsupported: %s", name);
     writeLock.lock();
     try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index cfa5198..f52ecad 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -33,6 +33,7 @@ import java.util.IdentityHashMap;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.NoSuchElementException;
 import java.util.ServiceLoader;
@@ -2795,8 +2796,10 @@ public abstract class FileSystem extends Configured 
implements Closeable {
       }
 
       Key(URI uri, Configuration conf, long unique) throws IOException {
-        scheme = uri.getScheme()==null?"":uri.getScheme().toLowerCase();
-        authority = 
uri.getAuthority()==null?"":uri.getAuthority().toLowerCase();
+        scheme = uri.getScheme() == null ?
+            "" : uri.getScheme().toLowerCase(Locale.ENGLISH);
+        authority = uri.getAuthority() == null ?
+            "" : uri.getAuthority().toLowerCase(Locale.ENGLISH);
         this.unique = unique;
         
         this.ugi = UserGroupInformation.getCurrentUser();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
index b9def64..6397564 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.fs.permission;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
+import java.util.Locale;
 
 import com.google.common.base.Objects;
 
@@ -106,7 +107,7 @@ public class AclEntry {
       sb.append("default:");
     }
     if (type != null) {
-      sb.append(type.toString().toLowerCase());
+      sb.append(type.toString().toLowerCase(Locale.ENGLISH));
     }
     sb.append(':');
     if (name != null) {
@@ -263,7 +264,8 @@ public class AclEntry {
 
     AclEntryType aclType = null;
     try {
-      aclType = Enum.valueOf(AclEntryType.class, split[index].toUpperCase());
+      aclType = Enum.valueOf(
+          AclEntryType.class, split[index].toUpperCase(Locale.ENGLISH));
       builder.setType(aclType);
       index++;
     } catch (IllegalArgumentException iae) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java
index 88314c6..4c937ef 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.fs.shell.find;
 
 import java.io.IOException;
 import java.util.Deque;
+import java.util.Locale;
 
 import org.apache.hadoop.fs.GlobPattern;
 import org.apache.hadoop.fs.shell.PathData;
@@ -73,7 +74,7 @@ final class Name extends BaseExpression {
   public void prepare() throws IOException {
     String argPattern = getArgument(1);
     if (!caseSensitive) {
-      argPattern = argPattern.toLowerCase();
+      argPattern = argPattern.toLowerCase(Locale.ENGLISH);
     }
     globPattern = new GlobPattern(argPattern);
   }
@@ -82,7 +83,7 @@ final class Name extends BaseExpression {
   public Result apply(PathData item, int depth) throws IOException {
     String name = getPath(item).getName();
     if (!caseSensitive) {
-      name = name.toLowerCase();
+      name = name.toLowerCase(Locale.ENGLISH);
     }
     if (globPattern.matches(name)) {
       return Result.PASS;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
index eb35759..6d16823 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
@@ -65,10 +65,10 @@ public class CompressionCodecFactory {
     codecsByClassName.put(codec.getClass().getCanonicalName(), codec);
 
     String codecName = codec.getClass().getSimpleName();
-    codecsByName.put(codecName.toLowerCase(), codec);
+    codecsByName.put(codecName.toLowerCase(Locale.ENGLISH), codec);
     if (codecName.endsWith("Codec")) {
       codecName = codecName.substring(0, codecName.length() - 
"Codec".length());
-      codecsByName.put(codecName.toLowerCase(), codec);
+      codecsByName.put(codecName.toLowerCase(Locale.ENGLISH), codec);
     }
   }
 
@@ -240,7 +240,7 @@ public class CompressionCodecFactory {
       CompressionCodec codec = getCodecByClassName(codecName);
       if (codec == null) {
         // trying to get the codec by name in case the name was specified 
instead a class
-        codec = codecsByName.get(codecName.toLowerCase());
+        codec = codecsByName.get(codecName.toLowerCase(Locale.ENGLISH));
       }
       return codec;
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
index 167205e..f87ef4d 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
@@ -85,12 +85,13 @@ class MetricsConfig extends SubsetConfiguration {
   private ClassLoader pluginLoader;
 
   MetricsConfig(Configuration c, String prefix) {
-    super(c, prefix.toLowerCase(Locale.US), ".");
+    super(c, prefix.toLowerCase(Locale.ENGLISH), ".");
   }
 
   static MetricsConfig create(String prefix) {
-    return loadFirst(prefix, "hadoop-metrics2-"+ prefix.toLowerCase(Locale.US)
-                     +".properties", DEFAULT_FILE_NAME);
+    return loadFirst(prefix, "hadoop-metrics2" + "-"
+        + prefix.toLowerCase(Locale.ENGLISH)
+        +".properties", DEFAULT_FILE_NAME);
   }
 
   static MetricsConfig create(String prefix, String... fileNames) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
index 32b00f3..8964934 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java
@@ -617,6 +617,6 @@ public class MetricsSystemImpl extends MetricsSystem 
implements MetricsSource {
     String m = System.getProperty(MS_INIT_MODE_KEY);
     String m2 = m == null ? System.getenv(MS_INIT_MODE_KEY) : m;
     return InitMode.valueOf((m2 == null ? InitMode.NORMAL.name() : m2)
-                            .toUpperCase(Locale.US));
+                            .toUpperCase(Locale.ENGLISH));
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
index 7cbee26..355ea91 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
@@ -182,7 +182,8 @@ public class SecurityUtil {
     if (fqdn == null || fqdn.isEmpty() || fqdn.equals("0.0.0.0")) {
       fqdn = getLocalHostName();
     }
-    return components[0] + "/" + fqdn.toLowerCase(Locale.US) + "@" + 
components[2];
+    return components[0] + "/" + fqdn.toLowerCase(Locale.ENGLISH) + "@"
+        + components[2];
   }
   
   static String getLocalHostName() throws UnknownHostException {
@@ -379,7 +380,7 @@ public class SecurityUtil {
       }
       host = addr.getAddress().getHostAddress();
     } else {
-      host = addr.getHostName().toLowerCase();
+      host = addr.getHostName().toLowerCase(Locale.ENGLISH);
     }
     return new Text(host + ":" + addr.getPort());
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
index dc0815e..59d1492 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.security;
 
 import java.net.InetAddress;
 import java.net.UnknownHostException;
+import java.util.Locale;
 import java.util.Map;
 import java.util.TreeMap;
 
@@ -138,7 +139,8 @@ public class WhitelistBasedResolver extends 
SaslPropertiesResolver {
         QualityOfProtection.PRIVACY.toString());
 
     for (int i=0; i < qop.length; i++) {
-      qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase()).getSaslQop();
+      qop[i] = QualityOfProtection.valueOf(
+          qop[i].toUpperCase(Locale.ENGLISH)).getSaslQop();
     }
 
     saslProps.put(Sasl.QOP, StringUtils.join(",", qop));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
index 4b81e17..50f96d6 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
@@ -33,6 +33,7 @@ import java.io.InputStream;
 import java.security.GeneralSecurityException;
 import java.security.KeyStore;
 import java.text.MessageFormat;
+import java.util.Locale;
 
 /**
  * {@link KeyStoresFactory} implementation that reads the certificates from
@@ -94,7 +95,8 @@ public class FileBasedKeyStoresFactory implements 
KeyStoresFactory {
   @VisibleForTesting
   public static String resolvePropertyName(SSLFactory.Mode mode,
                                            String template) {
-    return MessageFormat.format(template, mode.toString().toLowerCase());
+    return MessageFormat.format(
+        template, mode.toString().toLowerCase(Locale.ENGLISH));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
index bbea33b..370f09f 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
@@ -33,6 +33,7 @@ import javax.net.ssl.SSLSocketFactory;
 import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.security.GeneralSecurityException;
+import java.util.Locale;
 
 /**
  * Factory that creates SSLEngine and SSLSocketFactory instances using
@@ -138,7 +139,7 @@ public class SSLFactory implements ConnectionConfigurator {
   private HostnameVerifier getHostnameVerifier(Configuration conf)
       throws GeneralSecurityException, IOException {
     return getHostnameVerifier(conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").
-        trim().toUpperCase());
+        trim().toUpperCase(Locale.ENGLISH));
   }
 
   public static HostnameVerifier getHostnameVerifier(String verifier)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
index dd5e67b..7a905f1 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java
@@ -41,6 +41,7 @@ import java.util.Collection;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Locale;
 import java.util.Set;
 import java.util.StringTokenizer;
 import java.util.TreeSet;
@@ -365,7 +366,7 @@ public interface SSLHostnameVerifier extends 
javax.net.ssl.HostnameVerifier {
             buf.append('<');
             for (int i = 0; i < hosts.length; i++) {
                 String h = hosts[i];
-                h = h != null ? h.trim().toLowerCase() : "";
+                h = h != null ? h.trim().toLowerCase(Locale.ENGLISH) : "";
                 hosts[i] = h;
                 if (i > 0) {
                     buf.append('/');
@@ -406,7 +407,7 @@ public interface SSLHostnameVerifier extends 
javax.net.ssl.HostnameVerifier {
             out:
             for (Iterator<String> it = names.iterator(); it.hasNext();) {
                 // Don't trim the CN, though!
-                final String cn = it.next().toLowerCase();
+                final String cn = it.next().toLowerCase(Locale.ENGLISH);
                 // Store CN in StringBuffer in case we need to report an error.
                 buf.append(" <");
                 buf.append(cn);
@@ -424,7 +425,8 @@ public interface SSLHostnameVerifier extends 
javax.net.ssl.HostnameVerifier {
                                      acceptableCountryWildcard(cn);
 
                 for (int i = 0; i < hosts.length; i++) {
-                    final String hostName = hosts[i].trim().toLowerCase();
+                    final String hostName =
+                        hosts[i].trim().toLowerCase(Locale.ENGLISH);
                     if (doWildcard) {
                         match = hostName.endsWith(cn.substring(1));
                         if (match && strictWithSubDomains) {
@@ -479,7 +481,7 @@ public interface SSLHostnameVerifier extends 
javax.net.ssl.HostnameVerifier {
         }
 
         public static boolean isLocalhost(String host) {
-            host = host != null ? host.trim().toLowerCase() : "";
+            host = host != null ? host.trim().toLowerCase(Locale.ENGLISH) : "";
             if (host.startsWith("::1")) {
                 int x = host.lastIndexOf('%');
                 if (x >= 0) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
index c18b5d3..f1bcd5d 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java
@@ -23,6 +23,7 @@ import java.text.MessageFormat;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedHashMap;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
@@ -169,7 +170,7 @@ public abstract class DelegationTokenAuthenticationHandler
     boolean requestContinues = true;
     String op = ServletUtils.getParameter(request,
         KerberosDelegationTokenAuthenticator.OP_PARAM);
-    op = (op != null) ? op.toUpperCase() : null;
+    op = (op != null) ? op.toUpperCase(Locale.ENGLISH) : null;
     if (DELEGATION_TOKEN_OPS.contains(op) &&
         !request.getMethod().equals("OPTIONS")) {
       KerberosDelegationTokenAuthenticator.DelegationTokenOperation dtOp =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
index d93f7ac..5d826b7 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
@@ -37,6 +37,7 @@ import java.net.InetSocketAddress;
 import java.net.URL;
 import java.net.URLEncoder;
 import java.util.HashMap;
+import java.util.Locale;
 import java.util.Map;
 
 /**
@@ -286,8 +287,8 @@ public abstract class DelegationTokenAuthenticator 
implements Authenticator {
     HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
     if (hasResponse) {
       String contentType = conn.getHeaderField(CONTENT_TYPE);
-      contentType = (contentType != null) ? contentType.toLowerCase()
-                                          : null;
+      contentType = (contentType != null) ?
+          contentType.toLowerCase(Locale.ENGLISH) : null;
       if (contentType != null &&
           contentType.contains(APPLICATION_JSON_MIME)) {
         try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
index ff8edc3..c1acc7e 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
@@ -901,7 +901,7 @@ public class StringUtils {
    */
   public static String camelize(String s) {
     StringBuilder sb = new StringBuilder();
-    String[] words = split(s.toLowerCase(Locale.US), ESCAPE_CHAR, '_');
+    String[] words = split(s.toLowerCase(Locale.ENGLISH), ESCAPE_CHAR, '_');
 
     for (String word : words)
       sb.append(org.apache.commons.lang.StringUtils.capitalize(word));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
index eb19f48..e52cb26 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java
@@ -43,6 +43,7 @@ import java.net.SocketTimeoutException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.Locale;
 import java.util.Random;
 import java.util.concurrent.BrokenBarrierException;
 import java.util.concurrent.CountDownLatch;
@@ -1296,7 +1297,7 @@ public class TestIPC {
     
     StringBuilder hexString = new StringBuilder();
     
-    for (String line : hexdump.toUpperCase().split("\n")) {
+    for (String line : hexdump.toUpperCase(Locale.ENGLISH).split("\n")) {
       hexString.append(line.substring(0, LAST_HEX_COL).replace(" ", ""));
     }
     return StringUtils.hexStringToByte(hexString.toString());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
index 903990b..9b25b77 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
@@ -41,6 +41,7 @@ import java.security.Security;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.regex.Pattern;
@@ -181,7 +182,7 @@ public class TestSaslRPC {
     StringBuilder sb = new StringBuilder();
     int i = 0;
     for (QualityOfProtection qop:qops){
-     sb.append(qop.name().toLowerCase());
+     sb.append(qop.name().toLowerCase(Locale.ENGLISH));
      if (++i < qops.length){
        sb.append(",");
      }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
index 4616c90..1d68f8a 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
@@ -103,13 +103,13 @@ public class TestSecurityUtil {
     String realm = "@REALM";
     String principalInConf = service + SecurityUtil.HOSTNAME_PATTERN + realm;
     String hostname = "FooHost";
-    String principal = service + hostname.toLowerCase() + realm;
+    String principal = service + hostname.toLowerCase(Locale.ENGLISH) + realm;
     verify(principalInConf, hostname, principal);
   }
 
   @Test
   public void testLocalHostNameForNullOrWild() throws Exception {
-    String local = SecurityUtil.getLocalHostName().toLowerCase(Locale.US);
+    String local = SecurityUtil.getLocalHostName().toLowerCase(Locale.ENGLISH);
     assertEquals("hdfs/" + local + "@REALM",
                  SecurityUtil.getServerPrincipal("hdfs/_HOST@REALM", 
(String)null));
     assertEquals("hdfs/" + local + "@REALM",
@@ -260,7 +260,7 @@ public class TestSecurityUtil {
     //LOG.info("address:"+addr+" host:"+host+" ip:"+ip+" port:"+port);
 
     SecurityUtil.setTokenServiceUseIp(useIp);
-    String serviceHost = useIp ? ip : host.toLowerCase();
+    String serviceHost = useIp ? ip : host.toLowerCase(Locale.ENGLISH);
     
     Token<?> token = new Token<TokenIdentifier>();
     Text service = new Text(serviceHost+":"+port);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
index 48b9b99..d77d9b5 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
@@ -41,6 +41,7 @@ import java.security.PrivilegedExceptionAction;
 import java.util.Collection;
 import java.util.ConcurrentModificationException;
 import java.util.LinkedHashSet;
+import java.util.Locale;
 import java.util.Set;
 
 import static 
org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS;
@@ -213,7 +214,7 @@ public class TestUserGroupInformation {
         userName = userName.substring(sp + 1);
       }
       // user names are case insensitive on Windows. Make consistent
-      userName = userName.toLowerCase();
+      userName = userName.toLowerCase(Locale.ENGLISH);
     }
     // get the groups
     pp = Runtime.getRuntime().exec(Shell.WINDOWS ?
@@ -233,7 +234,7 @@ public class TestUserGroupInformation {
     String loginUserName = login.getShortUserName();
     if(Shell.WINDOWS) {
       // user names are case insensitive on Windows. Make consistent
-      loginUserName = loginUserName.toLowerCase();
+      loginUserName = loginUserName.toLowerCase(Locale.ENGLISH);
     }
     assertEquals(userName, loginUserName);
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java
index 220ab1d..4ebf29a 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java
@@ -27,6 +27,7 @@ import java.lang.management.ThreadMXBean;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.Locale;
 import java.util.Map;
 
 import org.junit.runner.notification.Failure;
@@ -93,7 +94,8 @@ public class TimedOutTestsListener extends RunListener {
           thread.getPriority(),
           thread.getId(),
           Thread.State.WAITING.equals(thread.getState()) ? 
-              "in Object.wait()" : thread.getState().name().toLowerCase(),
+              "in Object.wait()" :
+              thread.getState().name().toLowerCase(Locale.ENGLISH),
           Thread.State.WAITING.equals(thread.getState()) ? 
               "WAITING (on object monitor)" : thread.getState()));
       for (StackTraceElement stackTraceElement : e.getValue()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
index 2d4e442..708fc4c 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java
@@ -27,6 +27,7 @@ import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.FileWriter;
 import java.io.IOException;
+import java.util.Locale;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
@@ -382,8 +383,10 @@ public class TestWinUtils {
   private void assertOwners(File file, String expectedUser,
       String expectedGroup) throws IOException {
     String [] args = lsF(file).trim().split("[\\|]");
-    assertEquals(expectedUser.toLowerCase(), args[2].toLowerCase());
-    assertEquals(expectedGroup.toLowerCase(), args[3].toLowerCase());
+    assertEquals(expectedUser.toLowerCase(Locale.ENGLISH),
+        args[2].toLowerCase(Locale.ENGLISH));
+    assertEquals(expectedGroup.toLowerCase(Locale.ENGLISH),
+        args[3].toLowerCase(Locale.ENGLISH));
   }
 
   @Test (timeout = 30000)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
 
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
index b617ae5..cef8f99 100644
--- 
a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
+++ 
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.nfs;
 import java.net.InetAddress;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 import java.util.regex.Pattern;
 
 import org.apache.commons.logging.Log;
@@ -359,10 +360,10 @@ public class NfsExports {
     AccessPrivilege privilege = AccessPrivilege.READ_ONLY;
     switch (parts.length) {
     case 1:
-      host = parts[0].toLowerCase().trim();
+      host = parts[0].toLowerCase(Locale.ENGLISH).trim();
       break;
     case 2:
-      host = parts[0].toLowerCase().trim();
+      host = parts[0].toLowerCase(Locale.ENGLISH).trim();
       String option = parts[1].trim();
       if ("rw".equalsIgnoreCase(option)) {
         privilege = AccessPrivilege.READ_WRITE;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
index 836b4ce..7074ba2 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
@@ -32,6 +32,7 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import java.io.IOException;
 import java.util.HashSet;
+import java.util.Locale;
 import java.util.Set;
 
 /**
@@ -82,7 +83,8 @@ public class CheckUploadContentTypeFilter implements Filter {
     String method = httpReq.getMethod();
     if (method.equals("PUT") || method.equals("POST")) {
       String op = httpReq.getParameter(HttpFSFileSystem.OP_PARAM);
-      if (op != null && UPLOAD_OPERATIONS.contains(op.toUpperCase())) {
+      if (op != null &&
+          UPLOAD_OPERATIONS.contains(op.toUpperCase(Locale.ENGLISH))) {
         if 
("true".equalsIgnoreCase(httpReq.getParameter(HttpFSParametersProvider.DataParam.NAME)))
 {
           String contentType = httpReq.getContentType();
           contentTypeOK =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
index e7d92f5..4b72a51 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
@@ -43,6 +43,7 @@ import java.io.OutputStream;
 import java.util.EnumSet;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Map.Entry;
 
@@ -527,7 +528,8 @@ public class FSOperations {
     @Override
     public JSONObject execute(FileSystem fs) throws IOException {
       boolean deleted = fs.delete(path, recursive);
-      return toJSON(HttpFSFileSystem.DELETE_JSON.toLowerCase(), deleted);
+      return toJSON(
+          HttpFSFileSystem.DELETE_JSON.toLowerCase(Locale.ENGLISH), deleted);
     }
 
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
index 9b0be9b..fb06667 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.lib.wsrs.StringParam;
 
 import javax.ws.rs.ext.Provider;
 import java.util.HashMap;
+import java.util.Locale;
 import java.util.Map;
 import java.util.regex.Pattern;
 
@@ -167,7 +168,8 @@ public class HttpFSParametersProvider extends 
ParametersProvider {
      */
     public OperationParam(String operation) {
       super(NAME, HttpFSFileSystem.Operation.class,
-            HttpFSFileSystem.Operation.valueOf(operation.toUpperCase()));
+            HttpFSFileSystem.Operation.valueOf(
+                operation.toUpperCase(Locale.ENGLISH)));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
index 5c1bb4f..e2ef7a1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java
@@ -36,6 +36,7 @@ import java.util.ArrayList;
 import java.util.Collections;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Properties;
 
@@ -202,7 +203,7 @@ public class Server {
    * @param config server configuration.
    */
   public Server(String name, String homeDir, String configDir, String logDir, 
String tempDir, Configuration config) {
-    this.name = Check.notEmpty(name, "name").trim().toLowerCase();
+    this.name = Check.notEmpty(name, 
"name").trim().toLowerCase(Locale.ENGLISH);
     this.homeDir = Check.notEmpty(homeDir, "homeDir");
     this.configDir = Check.notEmpty(configDir, "configDir");
     this.logDir = Check.notEmpty(logDir, "logDir");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
index ccb15a3..fd2e822 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java
@@ -43,6 +43,7 @@ import java.net.URI;
 import java.security.PrivilegedExceptionAction;
 import java.util.Collection;
 import java.util.HashSet;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
@@ -254,7 +255,7 @@ public class FileSystemAccessService extends BaseService 
implements FileSystemAc
   private Set<String> toLowerCase(Collection<String> collection) {
     Set<String> set = new HashSet<String>();
     for (String value : collection) {
-      set.add(value.toLowerCase());
+      set.add(value.toLowerCase(Locale.ENGLISH));
     }
     return set;
   }
@@ -300,7 +301,7 @@ public class FileSystemAccessService extends BaseService 
implements FileSystemAc
 
   protected void validateNamenode(String namenode) throws 
FileSystemAccessException {
     if (nameNodeWhitelist.size() > 0 && !nameNodeWhitelist.contains("*")) {
-      if (!nameNodeWhitelist.contains(namenode.toLowerCase())) {
+      if (!nameNodeWhitelist.contains(namenode.toLowerCase(Locale.ENGLISH))) {
         throw new 
FileSystemAccessException(FileSystemAccessException.ERROR.H05, namenode, "not 
in whitelist");
       }
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
index 8baef67..08eb60d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.util.StringUtils;
 
 import java.util.Arrays;
+import java.util.Locale;
 
 @InterfaceAudience.Private
 public abstract class EnumParam<E extends Enum<E>> extends Param<E> {
@@ -34,7 +35,7 @@ public abstract class EnumParam<E extends Enum<E>> extends 
Param<E> {
 
   @Override
   protected E parse(String str) throws Exception {
-    return Enum.valueOf(klass, str.toUpperCase());
+    return Enum.valueOf(klass, str.toUpperCase(Locale.ENGLISH));
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java
index 8d79b71..25158fd 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.lib.wsrs;
 import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.Iterator;
+import java.util.Locale;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 
@@ -37,7 +38,7 @@ public abstract class EnumSetParam<E extends Enum<E>> extends 
Param<EnumSet<E>>
     final EnumSet<E> set = EnumSet.noneOf(klass);
     if (!str.isEmpty()) {
       for (String sub : str.split(",")) {
-        set.add(Enum.valueOf(klass, sub.trim().toUpperCase()));
+        set.add(Enum.valueOf(klass, sub.trim().toUpperCase(Locale.ENGLISH)));
       }
     }
     return set;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
index 4703a90..9857244 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
@@ -33,6 +33,7 @@ import java.lang.reflect.Type;
 import java.text.MessageFormat;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 /**
@@ -70,7 +71,7 @@ public class ParametersProvider
     }
     Enum op;
     try {
-      op = Enum.valueOf(enumClass, str.toUpperCase());
+      op = Enum.valueOf(enumClass, str.toUpperCase(Locale.ENGLISH));
     } catch (IllegalArgumentException ex) {
       throw new IllegalArgumentException(
         MessageFormat.format("Invalid Operation [{0}]", str));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java
index a26ed91..745e44b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hdfs;
 import java.util.Arrays;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Locale;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -78,7 +79,7 @@ public enum StorageType {
   }
 
   public static StorageType parseStorageType(String s) {
-    return StorageType.valueOf(s.toUpperCase());
+    return StorageType.valueOf(s.toUpperCase(Locale.ENGLISH));
   }
 
   private static List<StorageType> getNonTransientTypes() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java
index 04364ccf..1b5b8eb 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs;
 
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
@@ -57,16 +58,22 @@ public class XAttrHelper {
     }
     
     NameSpace ns;
-    final String prefix = name.substring(0, prefixIndex).toLowerCase();
-    if (prefix.equals(NameSpace.USER.toString().toLowerCase())) {
+    final String prefix = name.substring(0, prefixIndex)
+        .toLowerCase(Locale.ENGLISH);
+    if (prefix.equals(
+        NameSpace.USER.toString().toLowerCase(Locale.ENGLISH))) {
       ns = NameSpace.USER;
-    } else if (prefix.equals(NameSpace.TRUSTED.toString().toLowerCase())) {
+    } else if (prefix.equals(
+        NameSpace.TRUSTED.toString().toLowerCase(Locale.ENGLISH))) {
       ns = NameSpace.TRUSTED;
-    } else if (prefix.equals(NameSpace.SYSTEM.toString().toLowerCase())) {
+    } else if (prefix.equals(
+        NameSpace.SYSTEM.toString().toLowerCase(Locale.ENGLISH))) {
       ns = NameSpace.SYSTEM;
-    } else if (prefix.equals(NameSpace.SECURITY.toString().toLowerCase())) {
+    } else if (prefix.equals(
+        NameSpace.SECURITY.toString().toLowerCase(Locale.ENGLISH))) {
       ns = NameSpace.SECURITY;
-    } else if (prefix.equals(NameSpace.RAW.toString().toLowerCase())) {
+    } else if (prefix.equals(
+        NameSpace.RAW.toString().toLowerCase(Locale.ENGLISH))) {
       ns = NameSpace.RAW;
     } else {
       throw new HadoopIllegalArgumentException("An XAttr name must be " +
@@ -145,7 +152,7 @@ public class XAttrHelper {
     }
     
     String namespace = xAttr.getNameSpace().toString();
-    return namespace.toLowerCase() + "." + xAttr.getName();
+    return namespace.toLowerCase(Locale.ENGLISH) + "." + xAttr.getName();
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
index 54da8eb..1769794 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs.protocol;
 
 import java.util.HashMap;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -98,7 +99,7 @@ public class HdfsConstants {
 
     /** Covert the given String to a RollingUpgradeAction. */
     public static RollingUpgradeAction fromString(String s) {
-      return MAP.get(s.toUpperCase());
+      return MAP.get(s.toUpperCase(Locale.ENGLISH));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java
index ce87b06..a0eddad 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java
@@ -30,6 +30,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.List;
+import java.util.Locale;
 
 /** A collection of block storage policies. */
 public class BlockStoragePolicySuite {
@@ -131,7 +132,8 @@ public class BlockStoragePolicySuite {
   }
 
   public static String buildXAttrName() {
-    return XAttrNS.toString().toLowerCase() + "." + STORAGE_POLICY_XATTR_NAME;
+    return XAttrNS.toString().toLowerCase(Locale.ENGLISH) + "."
+        + STORAGE_POLICY_XATTR_NAME;
   }
 
   public static XAttr buildXAttr(byte policyId) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
index 9bba2c9..3674d2c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.common;
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
+import java.util.Locale;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -53,7 +54,7 @@ public final class HdfsServerConstants {
 
     public String getOptionString() {
       return StartupOption.ROLLINGUPGRADE.getName() + " "
-          + name().toLowerCase();
+          + name().toLowerCase(Locale.ENGLISH);
     }
 
     public boolean matches(StartupOption option) {
@@ -76,7 +77,7 @@ public final class HdfsServerConstants {
     public static String getAllOptionString() {
       final StringBuilder b = new StringBuilder("<");
       for(RollingUpgradeStartupOption opt : VALUES) {
-        b.append(opt.name().toLowerCase()).append("|");
+        b.append(opt.name().toLowerCase(Locale.ENGLISH)).append("|");
       }
       b.setCharAt(b.length() - 1, '>');
       return b.toString();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
index feb5ac9..2b2da5c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hdfs.server.datanode;
 
+import java.util.Locale;
 import java.util.regex.Pattern;
 
 import java.io.File;
@@ -88,7 +89,8 @@ public class StorageLocation {
       String classString = matcher.group(1);
       location = matcher.group(2);
       if (!classString.isEmpty()) {
-        storageType = StorageType.valueOf(classString.toUpperCase());
+        storageType = StorageType.valueOf(
+            classString.toUpperCase(Locale.ENGLISH));
       }
     }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
index dab10d3..3dcce5f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
@@ -75,6 +75,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.EnumMap;
+import java.util.Locale;
 import java.util.List;
 import java.util.zip.CheckedInputStream;
 import java.util.zip.Checksum;
@@ -4348,7 +4349,7 @@ public abstract class FSEditLogOp {
 
     public RollingUpgradeOp(FSEditLogOpCodes code, String name) {
       super(code);
-      this.name = name.toUpperCase();
+      this.name = name.toUpperCase(Locale.ENGLISH);
     }
 
     static RollingUpgradeOp getStartInstance(OpInstanceCache cache) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
index d115acc..fe185f6 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.namenode;
 
 import com.google.common.base.Objects;
 import org.apache.hadoop.hdfs.StorageType;
+import java.util.Locale;
 
  public class QuotaByStorageTypeEntry {
    private StorageType type;
@@ -53,7 +54,7 @@ import org.apache.hadoop.hdfs.StorageType;
    public String toString() {
      StringBuilder sb = new StringBuilder();
      assert (type != null);
-     sb.append(type.toString().toLowerCase());
+     sb.append(type.toString().toLowerCase(Locale.ENGLISH));
      sb.append(':');
      sb.append(quota);
      return sb.toString();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
index 83e6426..1157bb8 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
@@ -587,7 +587,7 @@ public class SecondaryNameNode implements Runnable,
       return 0;
     }
     
-    String cmd = opts.getCommand().toString().toLowerCase();
+    String cmd = opts.getCommand().toString().toLowerCase(Locale.ENGLISH);
     
     int exitCode = 0;
     try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
index 92a16cd..c380901 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java
@@ -24,6 +24,7 @@ import java.security.PrivilegedExceptionAction;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
@@ -79,19 +80,19 @@ public class GetConf extends Configured implements Tool {
     private static final Map<String, CommandHandler> map;
     static  {
       map = new HashMap<String, CommandHandler>();
-      map.put(NAMENODE.getName().toLowerCase(), 
+      map.put(NAMENODE.getName().toLowerCase(Locale.ENGLISH),
           new NameNodesCommandHandler());
-      map.put(SECONDARY.getName().toLowerCase(),
+      map.put(SECONDARY.getName().toLowerCase(Locale.ENGLISH),
           new SecondaryNameNodesCommandHandler());
-      map.put(BACKUP.getName().toLowerCase(), 
+      map.put(BACKUP.getName().toLowerCase(Locale.ENGLISH),
           new BackupNodesCommandHandler());
-      map.put(INCLUDE_FILE.getName().toLowerCase(), 
+      map.put(INCLUDE_FILE.getName().toLowerCase(Locale.ENGLISH),
           new CommandHandler(DFSConfigKeys.DFS_HOSTS));
-      map.put(EXCLUDE_FILE.getName().toLowerCase(),
+      map.put(EXCLUDE_FILE.getName().toLowerCase(Locale.ENGLISH),
           new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE));
-      map.put(NNRPCADDRESSES.getName().toLowerCase(),
+      map.put(NNRPCADDRESSES.getName().toLowerCase(Locale.ENGLISH),
           new NNRpcAddressesCommandHandler());
-      map.put(CONFKEY.getName().toLowerCase(),
+      map.put(CONFKEY.getName().toLowerCase(Locale.ENGLISH),
           new PrintConfKeyCommandHandler());
     }
     
@@ -116,7 +117,7 @@ public class GetConf extends Configured implements Tool {
     }
     
     public static CommandHandler getHandler(String cmd) {
-      return map.get(cmd.toLowerCase());
+      return map.get(cmd.toLowerCase(Locale.ENGLISH));
     }
   }
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
index c4b8424..aa542d3 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.tools.offlineEditsViewer;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
+import java.util.Locale;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -43,7 +44,7 @@ public class OfflineEditsVisitorFactory {
    */
   static public OfflineEditsVisitor getEditsVisitor(String filename,
     String processor, boolean printToScreen) throws IOException {
-    if(processor.toLowerCase().equals("binary")) {
+    if(processor.toLowerCase(Locale.ENGLISH).equals("binary")) {
       return new BinaryEditsVisitor(filename);
     }
     OfflineEditsVisitor vis;
@@ -59,9 +60,9 @@ public class OfflineEditsVisitorFactory {
         outs[1] = System.out;
         out = new TeeOutputStream(outs);
       }
-      if(processor.toLowerCase().equals("xml")) {
+      if(processor.toLowerCase(Locale.ENGLISH).equals("xml")) {
         vis = new XmlEditsVisitor(out);
-      } else if(processor.toLowerCase().equals("stats")) {
+      } else if(processor.toLowerCase(Locale.ENGLISH).equals("stats")) {
         vis = new StatisticsEditsVisitor(out);
       } else {
         throw new IOException("Unknown proccesor " + processor +

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
index 43fcd69..aa36516 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.hdfs.web.JsonUtil;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION;
@@ -141,7 +142,7 @@ class FSImageHandler extends 
SimpleChannelInboundHandler<HttpRequest> {
   private static String getOp(QueryStringDecoder decoder) {
     Map<String, List<String>> parameters = decoder.parameters();
     return parameters.containsKey("op")
-            ? parameters.get("op").get(0).toUpperCase() : null;
+            ? parameters.get("op").get(0).toUpperCase(Locale.ENGLISH) : null;
   }
 
   private static String getPath(QueryStringDecoder decoder)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
index b6ff4b6..1a0aaeb 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java
@@ -23,6 +23,7 @@ import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Properties;
 
@@ -96,7 +97,7 @@ public class AuthFilter extends AuthenticationFilter {
 
     final Map<String, List<String>> m = new HashMap<String, List<String>>();
     for(Map.Entry<String, String[]> entry : original.entrySet()) {
-      final String key = entry.getKey().toLowerCase();
+      final String key = entry.getKey().toLowerCase(Locale.ENGLISH);
       List<String> strings = m.get(key);
       if (strings == null) {
         strings = new ArrayList<String>();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
index 2ae3445..6a18377 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.web;
 
 import java.net.URI;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 
 import javax.ws.rs.core.MultivaluedMap;
@@ -75,7 +76,7 @@ public class ParamFilter implements ResourceFilter {
       final MultivaluedMap<String, String> parameters) {
     UriBuilder b = UriBuilder.fromUri(uri).replaceQuery("");
     for(Map.Entry<String, List<String>> e : parameters.entrySet()) {
-      final String key = e.getKey().toLowerCase();
+      final String key = e.getKey().toLowerCase(Locale.ENGLISH);
       for(String v : e.getValue()) {
         b = b.queryParam(key, v);
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index 938f7c7..b1026c0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -32,6 +32,7 @@ import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.StringTokenizer;
 
@@ -1242,7 +1243,7 @@ public class WebHdfsFileSystem extends FileSystem
     if (query == null) {
       return url;
     }
-    final String lower = query.toLowerCase();
+    final String lower = query.toLowerCase(Locale.ENGLISH);
     if (!lower.startsWith(OFFSET_PARAM_PREFIX)
         && !lower.contains("&" + OFFSET_PARAM_PREFIX)) {
       return url;
@@ -1253,7 +1254,7 @@ public class WebHdfsFileSystem extends FileSystem
     for(final StringTokenizer st = new StringTokenizer(query, "&");
         st.hasMoreTokens();) {
       final String token = st.nextToken();
-      if (!token.toLowerCase().startsWith(OFFSET_PARAM_PREFIX)) {
+      if (!token.toLowerCase(Locale.ENGLISH).startsWith(OFFSET_PARAM_PREFIX)) {
         if (b == null) {
           b = new StringBuilder("?").append(token);
         } else {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
index 1703e3b..6b4ec2c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs.web.resources;
 
 import java.util.Arrays;
+import java.util.Locale;
 
 abstract class EnumParam<E extends Enum<E>> extends Param<E, 
EnumParam.Domain<E>> {
   EnumParam(final Domain<E> domain, final E value) {
@@ -40,7 +41,7 @@ abstract class EnumParam<E extends Enum<E>> extends Param<E, 
EnumParam.Domain<E>
 
     @Override
     final E parse(final String str) {
-      return Enum.valueOf(enumClass, str.toUpperCase());
+      return Enum.valueOf(enumClass, str.toUpperCase(Locale.ENGLISH));
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
index 5adb5a6..23ba16c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.web.resources;
 import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.Iterator;
+import java.util.Locale;
 
 abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>, 
EnumSetParam.Domain<E>> {
   /** Convert an EnumSet to a string of comma separated values. */
@@ -82,7 +83,8 @@ abstract class EnumSetParam<E extends Enum<E>> extends 
Param<EnumSet<E>, EnumSet
           i = j > 0 ? j + 1 : 0;
           j = str.indexOf(',', i);
           final String sub = j >= 0? str.substring(i, j): str.substring(i);
-          set.add(Enum.valueOf(enumClass, sub.trim().toUpperCase()));
+          set.add(
+              Enum.valueOf(enumClass, sub.trim().toUpperCase(Locale.ENGLISH)));
         }
       }
       return set;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/946456c6/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
index ac6acf9..db8f290 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java
@@ -25,6 +25,7 @@ import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.spy;
 
 import java.util.ArrayList;
+import java.util.Locale;
 
 import org.apache.hadoop.hdfs.protocol.SnapshotException;
 import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
@@ -70,7 +71,7 @@ public class TestSnapshotManager {
       Assert.fail("Expected SnapshotException not thrown");
     } catch (SnapshotException se) {
       Assert.assertTrue(
-          se.getMessage().toLowerCase().contains("rollover"));
+          se.getMessage().toLowerCase(Locale.ENGLISH).contains("rollover"));
     }
 
     // Delete a snapshot to free up a slot.
@@ -86,7 +87,7 @@ public class TestSnapshotManager {
       Assert.fail("Expected SnapshotException not thrown");
     } catch (SnapshotException se) {
       Assert.assertTrue(
-          se.getMessage().toLowerCase().contains("rollover"));
+          se.getMessage().toLowerCase(Locale.ENGLISH).contains("rollover"));
     }
   }
 }

Reply via email to