HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa)
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d1c6accb Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d1c6accb Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d1c6accb Branch: refs/heads/YARN-2928 Commit: d1c6accb6f87b08975175580e15f1ff1fe29ab04 Parents: b442aee Author: Tsuyoshi Ozawa <oz...@apache.org> Authored: Tue Mar 3 14:12:34 2015 +0900 Committer: Tsuyoshi Ozawa <oz...@apache.org> Committed: Tue Mar 3 14:17:52 2015 +0900 ---------------------------------------------------------------------- .../classification/tools/StabilityOptions.java | 5 ++- .../AltKerberosAuthenticationHandler.java | 6 ++- .../authentication/util/TestKerberosUtil.java | 14 ++++--- hadoop-common-project/hadoop-common/CHANGES.txt | 2 + .../org/apache/hadoop/conf/Configuration.java | 6 +-- .../org/apache/hadoop/crypto/CipherSuite.java | 3 +- .../hadoop/crypto/key/JavaKeyStoreProvider.java | 3 +- .../java/org/apache/hadoop/fs/FileSystem.java | 7 +++- .../java/org/apache/hadoop/fs/StorageType.java | 3 +- .../apache/hadoop/fs/permission/AclEntry.java | 5 ++- .../apache/hadoop/fs/shell/XAttrCommands.java | 2 +- .../org/apache/hadoop/fs/shell/find/Name.java | 5 ++- .../io/compress/CompressionCodecFactory.java | 7 ++-- .../hadoop/metrics2/impl/MetricsConfig.java | 7 ++-- .../hadoop/metrics2/impl/MetricsSystemImpl.java | 5 ++- .../hadoop/security/SaslPropertiesResolver.java | 3 +- .../apache/hadoop/security/SecurityUtil.java | 12 +++--- .../hadoop/security/WhitelistBasedResolver.java | 3 +- .../security/ssl/FileBasedKeyStoresFactory.java | 4 +- .../apache/hadoop/security/ssl/SSLFactory.java | 5 ++- .../security/ssl/SSLHostnameVerifier.java | 10 +++-- .../DelegationTokenAuthenticationHandler.java | 3 +- .../web/DelegationTokenAuthenticator.java | 3 +- .../apache/hadoop/util/ComparableVersion.java | 3 +- .../org/apache/hadoop/util/StringUtils.java | 40 +++++++++++++++++++- .../hadoop/fs/FileSystemContractBaseTest.java | 4 +- .../java/org/apache/hadoop/ipc/TestIPC.java | 2 +- .../java/org/apache/hadoop/ipc/TestSaslRPC.java | 2 +- .../hadoop/security/TestSecurityUtil.java | 10 +++-- .../security/TestUserGroupInformation.java | 5 ++- .../hadoop/test/TimedOutTestsListener.java | 6 ++- .../org/apache/hadoop/util/TestStringUtils.java | 21 ++++++++++ .../org/apache/hadoop/util/TestWinUtils.java | 6 ++- .../java/org/apache/hadoop/nfs/NfsExports.java | 5 ++- .../server/CheckUploadContentTypeFilter.java | 4 +- .../hadoop/fs/http/server/FSOperations.java | 7 +++- .../http/server/HttpFSParametersProvider.java | 4 +- .../org/apache/hadoop/lib/server/Server.java | 3 +- .../service/hadoop/FileSystemAccessService.java | 6 ++- .../org/apache/hadoop/lib/wsrs/EnumParam.java | 2 +- .../apache/hadoop/lib/wsrs/EnumSetParam.java | 3 +- .../hadoop/lib/wsrs/ParametersProvider.java | 3 +- .../org/apache/hadoop/hdfs/XAttrHelper.java | 19 ++++++---- .../hadoop/hdfs/protocol/HdfsConstants.java | 3 +- .../BlockStoragePolicySuite.java | 4 +- .../hdfs/server/common/HdfsServerConstants.java | 5 ++- .../hdfs/server/datanode/StorageLocation.java | 4 +- .../hdfs/server/namenode/FSEditLogOp.java | 3 +- .../namenode/QuotaByStorageTypeEntry.java | 3 +- .../hdfs/server/namenode/SecondaryNameNode.java | 2 +- .../org/apache/hadoop/hdfs/tools/GetConf.java | 17 +++++---- .../OfflineEditsVisitorFactory.java | 7 ++-- .../offlineImageViewer/FSImageHandler.java | 4 +- .../org/apache/hadoop/hdfs/web/AuthFilter.java | 3 +- .../org/apache/hadoop/hdfs/web/ParamFilter.java | 3 +- .../hadoop/hdfs/web/WebHdfsFileSystem.java | 5 ++- .../hadoop/hdfs/web/resources/EnumParam.java | 3 +- .../hadoop/hdfs/web/resources/EnumSetParam.java | 3 +- .../namenode/snapshot/TestSnapshotManager.java | 6 +-- .../jobhistory/JobHistoryEventHandler.java | 3 +- .../mapreduce/v2/app/webapp/AppController.java | 6 +-- .../apache/hadoop/mapreduce/TypeConverter.java | 3 +- .../apache/hadoop/mapreduce/v2/util/MRApps.java | 4 +- .../hadoop/mapreduce/TestTypeConverter.java | 6 ++- .../java/org/apache/hadoop/mapred/Task.java | 2 +- .../counters/FileSystemCounterGroup.java | 4 +- .../mapreduce/filecache/DistributedCache.java | 4 +- .../hadoop/mapreduce/lib/db/DBInputFormat.java | 5 ++- .../org/apache/hadoop/mapreduce/tools/CLI.java | 9 +++-- .../java/org/apache/hadoop/fs/TestDFSIO.java | 18 ++++----- .../org/apache/hadoop/fs/TestFileSystem.java | 4 +- .../org/apache/hadoop/fs/slive/Constants.java | 6 ++- .../apache/hadoop/fs/slive/OperationData.java | 3 +- .../apache/hadoop/fs/slive/OperationOutput.java | 4 +- .../org/apache/hadoop/fs/slive/SliveTest.java | 3 +- .../java/org/apache/hadoop/io/FileBench.java | 17 +++++---- .../org/apache/hadoop/mapred/TestMapRed.java | 3 +- .../apache/hadoop/examples/DBCountPageView.java | 2 +- .../plugin/versioninfo/VersionInfoMojo.java | 4 +- .../fs/azure/AzureNativeFileSystemStore.java | 4 +- .../apache/hadoop/tools/util/DistCpUtils.java | 12 ++++-- .../java/org/apache/hadoop/tools/DistCpV1.java | 4 +- .../gridmix/GridmixJobSubmissionPolicy.java | 3 +- .../TestSwiftFileSystemExtendedContract.java | 4 +- .../hadoop/tools/rumen/HadoopLogsAnalyzer.java | 33 ++++++++-------- .../apache/hadoop/tools/rumen/JobBuilder.java | 2 +- .../apache/hadoop/tools/rumen/LoggedTask.java | 3 +- .../hadoop/tools/rumen/LoggedTaskAttempt.java | 3 +- .../apache/hadoop/streaming/Environment.java | 3 +- .../hadoop/yarn/client/cli/ApplicationCLI.java | 7 ++-- .../apache/hadoop/yarn/client/cli/NodeCLI.java | 3 +- .../impl/pb/GetApplicationsRequestPBImpl.java | 6 ++- .../pb/ApplicationSubmissionContextPBImpl.java | 3 +- .../org/apache/hadoop/yarn/util/FSDownload.java | 6 +-- .../hadoop/yarn/webapp/hamlet/HamletGen.java | 6 +-- .../registry/client/binding/RegistryUtils.java | 3 +- .../webapp/AHSWebServices.java | 4 +- .../timeline/webapp/TimelineWebServices.java | 3 +- .../hadoop/yarn/server/webapp/WebServices.java | 18 +++++---- .../server/resourcemanager/ClientRMService.java | 3 +- .../resource/ResourceWeights.java | 3 +- .../CapacitySchedulerConfiguration.java | 4 +- .../fair/FairSchedulerConfiguration.java | 3 +- .../scheduler/fair/SchedulingPolicy.java | 3 +- .../resourcemanager/webapp/NodesPage.java | 2 +- .../resourcemanager/webapp/RMWebServices.java | 20 ++++++---- 106 files changed, 407 insertions(+), 224 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java index dbce31e..657dbce 100644 --- a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java +++ b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java @@ -21,6 +21,7 @@ import com.sun.javadoc.DocErrorReporter; import java.util.ArrayList; import java.util.List; +import java.util.Locale; class StabilityOptions { public static final String STABLE_OPTION = "-stable"; @@ -28,7 +29,7 @@ class StabilityOptions { public static final String UNSTABLE_OPTION = "-unstable"; public static Integer optionLength(String option) { - String opt = option.toLowerCase(); + String opt = option.toLowerCase(Locale.ENGLISH); if (opt.equals(UNSTABLE_OPTION)) return 1; if (opt.equals(EVOLVING_OPTION)) return 1; if (opt.equals(STABLE_OPTION)) return 1; @@ -38,7 +39,7 @@ class StabilityOptions { public static void validOptions(String[][] options, DocErrorReporter reporter) { for (int i = 0; i < options.length; i++) { - String opt = options[i][0].toLowerCase(); + String opt = options[i][0].toLowerCase(Locale.ENGLISH); if (opt.equals(UNSTABLE_OPTION)) { RootDocProcessor.stability = UNSTABLE_OPTION; } else if (opt.equals(EVOLVING_OPTION)) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java index 987330f..dae3b50 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java @@ -14,6 +14,7 @@ package org.apache.hadoop.security.authentication.server; import java.io.IOException; +import java.util.Locale; import java.util.Properties; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; @@ -68,7 +69,8 @@ public abstract class AltKerberosAuthenticationHandler NON_BROWSER_USER_AGENTS, NON_BROWSER_USER_AGENTS_DEFAULT) .split("\\W*,\\W*"); for (int i = 0; i < nonBrowserUserAgents.length; i++) { - nonBrowserUserAgents[i] = nonBrowserUserAgents[i].toLowerCase(); + nonBrowserUserAgents[i] = + nonBrowserUserAgents[i].toLowerCase(Locale.ENGLISH); } } @@ -120,7 +122,7 @@ public abstract class AltKerberosAuthenticationHandler if (userAgent == null) { return false; } - userAgent = userAgent.toLowerCase(); + userAgent = userAgent.toLowerCase(Locale.ENGLISH); boolean isBrowser = true; for (String nonBrowserUserAgent : nonBrowserUserAgents) { if (userAgent.contains(nonBrowserUserAgent)) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java index b0e8f04..89e07d1 100644 --- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java +++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Locale; import java.util.regex.Pattern; import org.apache.directory.server.kerberos.shared.keytab.Keytab; @@ -58,24 +59,25 @@ public class TestKerberosUtil { // send null hostname Assert.assertEquals("When no hostname is sent", - service + "/" + localHostname.toLowerCase(), + service + "/" + localHostname.toLowerCase(Locale.ENGLISH), KerberosUtil.getServicePrincipal(service, null)); // send empty hostname Assert.assertEquals("When empty hostname is sent", - service + "/" + localHostname.toLowerCase(), + service + "/" + localHostname.toLowerCase(Locale.ENGLISH), KerberosUtil.getServicePrincipal(service, "")); // send 0.0.0.0 hostname Assert.assertEquals("When 0.0.0.0 hostname is sent", - service + "/" + localHostname.toLowerCase(), + service + "/" + localHostname.toLowerCase(Locale.ENGLISH), KerberosUtil.getServicePrincipal(service, "0.0.0.0")); // send uppercase hostname Assert.assertEquals("When uppercase hostname is sent", - service + "/" + testHost.toLowerCase(), + service + "/" + testHost.toLowerCase(Locale.ENGLISH), KerberosUtil.getServicePrincipal(service, testHost)); // send lowercase hostname Assert.assertEquals("When lowercase hostname is sent", - service + "/" + testHost.toLowerCase(), - KerberosUtil.getServicePrincipal(service, testHost.toLowerCase())); + service + "/" + testHost.toLowerCase(Locale.ENGLISH), + KerberosUtil.getServicePrincipal( + service, testHost.toLowerCase(Locale.ENGLISH))); } @Test http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index ebe23c7..11785f2 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -409,6 +409,8 @@ Trunk (Unreleased) HADOOP-10774. Update KerberosTestUtils for hadoop-auth tests when using IBM Java (sangamesh via aw) + HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa) + OPTIMIZATIONS HADOOP-7761. Improve the performance of raw comparisons. (todd) http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index 02654b7..753f515 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -1451,11 +1451,9 @@ public class Configuration implements Iterable<Map.Entry<String,String>>, return defaultValue; } - valueString = valueString.toLowerCase(); - - if ("true".equals(valueString)) + if (StringUtils.equalsIgnoreCase("true", valueString)) return true; - else if ("false".equals(valueString)) + else if (StringUtils.equalsIgnoreCase("false", valueString)) return false; else return defaultValue; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java index c9355d7..a811aa7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java @@ -19,6 +19,7 @@ package org.apache.hadoop.crypto; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.util.StringUtils; /** * Defines properties of a CipherSuite. Modeled after the ciphers in @@ -97,7 +98,7 @@ public enum CipherSuite { String[] parts = name.split("/"); StringBuilder suffix = new StringBuilder(); for (String part : parts) { - suffix.append(".").append(part.toLowerCase()); + suffix.append(".").append(StringUtils.toLowerCase(part)); } return suffix.toString(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java index bfec1ef..c0d510d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java @@ -28,6 +28,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.security.ProviderUtils; +import org.apache.hadoop.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -422,7 +423,7 @@ public class JavaKeyStoreProvider extends KeyProvider { @Override public KeyVersion createKey(String name, byte[] material, Options options) throws IOException { - Preconditions.checkArgument(name.equals(name.toLowerCase()), + Preconditions.checkArgument(name.equals(StringUtils.toLowerCase(name)), "Uppercase key names are unsupported: %s", name); writeLock.lock(); try { http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index cfa5198..42434f1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -65,6 +65,7 @@ import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ShutdownHookManager; +import org.apache.hadoop.util.StringUtils; import com.google.common.annotations.VisibleForTesting; @@ -2795,8 +2796,10 @@ public abstract class FileSystem extends Configured implements Closeable { } Key(URI uri, Configuration conf, long unique) throws IOException { - scheme = uri.getScheme()==null?"":uri.getScheme().toLowerCase(); - authority = uri.getAuthority()==null?"":uri.getAuthority().toLowerCase(); + scheme = uri.getScheme()==null ? + "" : StringUtils.toLowerCase(uri.getScheme()); + authority = uri.getAuthority()==null ? + "" : StringUtils.toLowerCase(uri.getAuthority()); this.unique = unique; this.ugi = UserGroupInformation.getCurrentUser(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java index e306502..68069d7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java @@ -24,6 +24,7 @@ import java.util.List; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.util.StringUtils; /** * Defines the types of supported storage media. The default storage @@ -78,7 +79,7 @@ public enum StorageType { } public static StorageType parseStorageType(String s) { - return StorageType.valueOf(s.toUpperCase()); + return StorageType.valueOf(StringUtils.toUpperCase(s)); } private static List<StorageType> getNonTransientTypes() { http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java index b9def64..45402f8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java @@ -106,7 +106,7 @@ public class AclEntry { sb.append("default:"); } if (type != null) { - sb.append(type.toString().toLowerCase()); + sb.append(StringUtils.toLowerCase(type.toString())); } sb.append(':'); if (name != null) { @@ -263,7 +263,8 @@ public class AclEntry { AclEntryType aclType = null; try { - aclType = Enum.valueOf(AclEntryType.class, split[index].toUpperCase()); + aclType = Enum.valueOf( + AclEntryType.class, StringUtils.toUpperCase(split[index])); builder.setType(aclType); index++; } catch (IllegalArgumentException iae) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java index 4efda87..d55c80b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java @@ -79,7 +79,7 @@ class XAttrCommands extends FsCommand { String en = StringUtils.popOptionWithArgument("-e", args); if (en != null) { try { - encoding = enValueOfFunc.apply(en.toUpperCase(Locale.ENGLISH)); + encoding = enValueOfFunc.apply(StringUtils.toUpperCase(en)); } catch (IllegalArgumentException e) { throw new IllegalArgumentException( "Invalid/unsupported encoding option specified: " + en); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java index 88314c6..c89daa9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java @@ -22,6 +22,7 @@ import java.util.Deque; import org.apache.hadoop.fs.GlobPattern; import org.apache.hadoop.fs.shell.PathData; +import org.apache.hadoop.util.StringUtils; /** * Implements the -name expression for the @@ -73,7 +74,7 @@ final class Name extends BaseExpression { public void prepare() throws IOException { String argPattern = getArgument(1); if (!caseSensitive) { - argPattern = argPattern.toLowerCase(); + argPattern = StringUtils.toLowerCase(argPattern); } globPattern = new GlobPattern(argPattern); } @@ -82,7 +83,7 @@ final class Name extends BaseExpression { public Result apply(PathData item, int depth) throws IOException { String name = getPath(item).getName(); if (!caseSensitive) { - name = name.toLowerCase(); + name = StringUtils.toLowerCase(name); } if (globPattern.matches(name)) { return Result.PASS; http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java index 7476a15..8fff75d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java @@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.util.StringUtils; /** * A factory that will find the correct codec for a given filename. @@ -66,10 +67,10 @@ public class CompressionCodecFactory { codecsByClassName.put(codec.getClass().getCanonicalName(), codec); String codecName = codec.getClass().getSimpleName(); - codecsByName.put(codecName.toLowerCase(), codec); + codecsByName.put(StringUtils.toLowerCase(codecName), codec); if (codecName.endsWith("Codec")) { codecName = codecName.substring(0, codecName.length() - "Codec".length()); - codecsByName.put(codecName.toLowerCase(), codec); + codecsByName.put(StringUtils.toLowerCase(codecName), codec); } } @@ -246,7 +247,7 @@ public class CompressionCodecFactory { if (codec == null) { // trying to get the codec by name in case the name was specified // instead a class - codec = codecsByName.get(codecName.toLowerCase()); + codec = codecsByName.get(StringUtils.toLowerCase(codecName)); } return codec; } http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java index 167205e..cbe60b5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java @@ -44,6 +44,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsFilter; import org.apache.hadoop.metrics2.MetricsPlugin; import org.apache.hadoop.metrics2.filter.GlobFilter; +import org.apache.hadoop.util.StringUtils; /** * Metrics configuration for MetricsSystemImpl @@ -85,12 +86,12 @@ class MetricsConfig extends SubsetConfiguration { private ClassLoader pluginLoader; MetricsConfig(Configuration c, String prefix) { - super(c, prefix.toLowerCase(Locale.US), "."); + super(c, StringUtils.toLowerCase(prefix), "."); } static MetricsConfig create(String prefix) { - return loadFirst(prefix, "hadoop-metrics2-"+ prefix.toLowerCase(Locale.US) - +".properties", DEFAULT_FILE_NAME); + return loadFirst(prefix, "hadoop-metrics2-" + + StringUtils.toLowerCase(prefix) + ".properties", DEFAULT_FILE_NAME); } static MetricsConfig create(String prefix, String... fileNames) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java index 32b00f3..a94d814 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java @@ -61,6 +61,7 @@ import org.apache.hadoop.metrics2.lib.MetricsRegistry; import org.apache.hadoop.metrics2.lib.MetricsSourceBuilder; import org.apache.hadoop.metrics2.lib.MutableStat; import org.apache.hadoop.metrics2.util.MBeans; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; /** @@ -616,7 +617,7 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource { LOG.debug("from environment variable: "+ System.getenv(MS_INIT_MODE_KEY)); String m = System.getProperty(MS_INIT_MODE_KEY); String m2 = m == null ? System.getenv(MS_INIT_MODE_KEY) : m; - return InitMode.valueOf((m2 == null ? InitMode.NORMAL.name() : m2) - .toUpperCase(Locale.US)); + return InitMode.valueOf( + StringUtils.toUpperCase((m2 == null ? InitMode.NORMAL.name() : m2))); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java index 0b49cfb..305443c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java @@ -66,7 +66,8 @@ public class SaslPropertiesResolver implements Configurable{ CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION, QualityOfProtection.AUTHENTICATION.toString()); for (int i=0; i < qop.length; i++) { - qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase(Locale.ENGLISH)).getSaslQop(); + qop[i] = QualityOfProtection.valueOf( + StringUtils.toUpperCase(qop[i])).getSaslQop(); } properties.put(Sasl.QOP, StringUtils.join(",", qop)); properties.put(Sasl.SERVER_AUTH, "true"); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java index 7cbee26..eddf98d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java @@ -27,7 +27,6 @@ import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import java.util.Arrays; import java.util.List; -import java.util.Locale; import java.util.ServiceLoader; import javax.security.auth.kerberos.KerberosPrincipal; @@ -44,6 +43,7 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenInfo; +import org.apache.hadoop.util.StringUtils; //this will need to be replaced someday when there is a suitable replacement @@ -182,7 +182,8 @@ public class SecurityUtil { if (fqdn == null || fqdn.isEmpty() || fqdn.equals("0.0.0.0")) { fqdn = getLocalHostName(); } - return components[0] + "/" + fqdn.toLowerCase(Locale.US) + "@" + components[2]; + return components[0] + "/" + + StringUtils.toLowerCase(fqdn) + "@" + components[2]; } static String getLocalHostName() throws UnknownHostException { @@ -379,7 +380,7 @@ public class SecurityUtil { } host = addr.getAddress().getHostAddress(); } else { - host = addr.getHostName().toLowerCase(); + host = StringUtils.toLowerCase(addr.getHostName()); } return new Text(host + ":" + addr.getPort()); } @@ -606,7 +607,8 @@ public class SecurityUtil { public static AuthenticationMethod getAuthenticationMethod(Configuration conf) { String value = conf.get(HADOOP_SECURITY_AUTHENTICATION, "simple"); try { - return Enum.valueOf(AuthenticationMethod.class, value.toUpperCase(Locale.ENGLISH)); + return Enum.valueOf(AuthenticationMethod.class, + StringUtils.toUpperCase(value)); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("Invalid attribute value for " + HADOOP_SECURITY_AUTHENTICATION + " of " + value); @@ -619,7 +621,7 @@ public class SecurityUtil { authenticationMethod = AuthenticationMethod.SIMPLE; } conf.set(HADOOP_SECURITY_AUTHENTICATION, - authenticationMethod.toString().toLowerCase(Locale.ENGLISH)); + StringUtils.toLowerCase(authenticationMethod.toString())); } /* http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java index dc0815e..8d4df64 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java @@ -138,7 +138,8 @@ public class WhitelistBasedResolver extends SaslPropertiesResolver { QualityOfProtection.PRIVACY.toString()); for (int i=0; i < qop.length; i++) { - qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase()).getSaslQop(); + qop[i] = QualityOfProtection.valueOf( + StringUtils.toUpperCase(qop[i])).getSaslQop(); } saslProps.put(Sasl.QOP, StringUtils.join(",", qop)); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java index 4b81e17..609c71f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java @@ -23,6 +23,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.util.StringUtils; import javax.net.ssl.KeyManager; import javax.net.ssl.KeyManagerFactory; @@ -94,7 +95,8 @@ public class FileBasedKeyStoresFactory implements KeyStoresFactory { @VisibleForTesting public static String resolvePropertyName(SSLFactory.Mode mode, String template) { - return MessageFormat.format(template, mode.toString().toLowerCase()); + return MessageFormat.format( + template, StringUtils.toLowerCase(mode.toString())); } /** http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java index bbea33b..edec347 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java @@ -22,6 +22,7 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.util.StringUtils; import static org.apache.hadoop.util.PlatformName.IBM_JAVA; import javax.net.ssl.HostnameVerifier; @@ -137,8 +138,8 @@ public class SSLFactory implements ConnectionConfigurator { private HostnameVerifier getHostnameVerifier(Configuration conf) throws GeneralSecurityException, IOException { - return getHostnameVerifier(conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT"). - trim().toUpperCase()); + return getHostnameVerifier(StringUtils.toUpperCase( + conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").trim())); } public static HostnameVerifier getHostnameVerifier(String verifier) http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java index dd5e67b..b5ef2b2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java @@ -52,6 +52,7 @@ import javax.net.ssl.SSLSocket; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.util.StringUtils; /** ************************************************************************ @@ -365,7 +366,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { buf.append('<'); for (int i = 0; i < hosts.length; i++) { String h = hosts[i]; - h = h != null ? h.trim().toLowerCase() : ""; + h = h != null ? StringUtils.toLowerCase(h.trim()) : ""; hosts[i] = h; if (i > 0) { buf.append('/'); @@ -406,7 +407,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { out: for (Iterator<String> it = names.iterator(); it.hasNext();) { // Don't trim the CN, though! - final String cn = it.next().toLowerCase(); + final String cn = StringUtils.toLowerCase(it.next()); // Store CN in StringBuffer in case we need to report an error. buf.append(" <"); buf.append(cn); @@ -424,7 +425,8 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { acceptableCountryWildcard(cn); for (int i = 0; i < hosts.length; i++) { - final String hostName = hosts[i].trim().toLowerCase(); + final String hostName = + StringUtils.toLowerCase(hosts[i].trim()); if (doWildcard) { match = hostName.endsWith(cn.substring(1)); if (match && strictWithSubDomains) { @@ -479,7 +481,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier { } public static boolean isLocalhost(String host) { - host = host != null ? host.trim().toLowerCase() : ""; + host = host != null ? StringUtils.toLowerCase(host.trim()) : ""; if (host.startsWith("::1")) { int x = host.lastIndexOf('%'); if (x >= 0) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java index c18b5d3..c498f70 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java @@ -47,6 +47,7 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager; import org.apache.hadoop.util.HttpExceptionUtils; +import org.apache.hadoop.util.StringUtils; import org.codehaus.jackson.map.ObjectMapper; import com.google.common.annotations.VisibleForTesting; @@ -169,7 +170,7 @@ public abstract class DelegationTokenAuthenticationHandler boolean requestContinues = true; String op = ServletUtils.getParameter(request, KerberosDelegationTokenAuthenticator.OP_PARAM); - op = (op != null) ? op.toUpperCase() : null; + op = (op != null) ? StringUtils.toUpperCase(op) : null; if (DELEGATION_TOKEN_OPS.contains(op) && !request.getMethod().equals("OPTIONS")) { KerberosDelegationTokenAuthenticator.DelegationTokenOperation dtOp = http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java index d93f7ac..8a3a57f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java @@ -27,6 +27,7 @@ import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier; import org.apache.hadoop.util.HttpExceptionUtils; +import org.apache.hadoop.util.StringUtils; import org.codehaus.jackson.map.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -286,7 +287,7 @@ public abstract class DelegationTokenAuthenticator implements Authenticator { HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK); if (hasResponse) { String contentType = conn.getHeaderField(CONTENT_TYPE); - contentType = (contentType != null) ? contentType.toLowerCase() + contentType = (contentType != null) ? StringUtils.toLowerCase(contentType) : null; if (contentType != null && contentType.contains(APPLICATION_JSON_MIME)) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java index 65d85f7..9d34518 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java @@ -37,7 +37,6 @@ import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.ListIterator; -import java.util.Locale; import java.util.Properties; import java.util.Stack; @@ -363,7 +362,7 @@ public class ComparableVersion items = new ListItem(); - version = version.toLowerCase( Locale.ENGLISH ); + version = StringUtils.toLowerCase(version); ListItem list = items; http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java index ff8edc3..fc4b0ab 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java @@ -18,6 +18,7 @@ package org.apache.hadoop.util; +import com.google.common.base.Preconditions; import java.io.PrintWriter; import java.io.StringWriter; import java.net.URI; @@ -901,7 +902,7 @@ public class StringUtils { */ public static String camelize(String s) { StringBuilder sb = new StringBuilder(); - String[] words = split(s.toLowerCase(Locale.US), ESCAPE_CHAR, '_'); + String[] words = split(StringUtils.toLowerCase(s), ESCAPE_CHAR, '_'); for (String word : words) sb.append(org.apache.commons.lang.StringUtils.capitalize(word)); @@ -1032,4 +1033,41 @@ public class StringUtils { } return null; } + + /** + * Converts all of the characters in this String to lower case with + * Locale.ENGLISH. + * + * @param str string to be converted + * @return the str, converted to lowercase. + */ + public static String toLowerCase(String str) { + return str.toLowerCase(Locale.ENGLISH); + } + + /** + * Converts all of the characters in this String to upper case with + * Locale.ENGLISH. + * + * @param str string to be converted + * @return the str, converted to uppercase. + */ + public static String toUpperCase(String str) { + return str.toUpperCase(Locale.ENGLISH); + } + + /** + * Compare strings locale-freely by using String#equalsIgnoreCase. + * + * @param s1 Non-null string to be converted + * @param s2 string to be converted + * @return the str, converted to uppercase. + */ + public static boolean equalsIgnoreCase(String s1, String s2) { + Preconditions.checkNotNull(s1); + // don't check non-null against s2 to make the semantics same as + // s1.equals(s2) + return s1.equalsIgnoreCase(s2); + } + } http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java index e2005be..2ca81e9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java @@ -20,7 +20,6 @@ package org.apache.hadoop.fs; import java.io.FileNotFoundException; import java.io.IOException; -import java.util.Locale; import junit.framework.TestCase; @@ -28,6 +27,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.util.StringUtils; /** * <p> @@ -527,7 +527,7 @@ public abstract class FileSystemContractBaseTest extends TestCase { } String mixedCaseFilename = "/test/UPPER.TXT"; Path upper = path(mixedCaseFilename); - Path lower = path(mixedCaseFilename.toLowerCase(Locale.ENGLISH)); + Path lower = path(StringUtils.toLowerCase(mixedCaseFilename)); assertFalse("File exists" + upper, fs.exists(upper)); assertFalse("File exists" + lower, fs.exists(lower)); FSDataOutputStream out = fs.create(upper); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java index eb19f48..b443011 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java @@ -1296,7 +1296,7 @@ public class TestIPC { StringBuilder hexString = new StringBuilder(); - for (String line : hexdump.toUpperCase().split("\n")) { + for (String line : StringUtils.toUpperCase(hexdump).split("\n")) { hexString.append(line.substring(0, LAST_HEX_COL).replace(" ", "")); } return StringUtils.hexStringToByte(hexString.toString()); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java index 903990b..f6ab380 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java @@ -181,7 +181,7 @@ public class TestSaslRPC { StringBuilder sb = new StringBuilder(); int i = 0; for (QualityOfProtection qop:qops){ - sb.append(qop.name().toLowerCase()); + sb.append(org.apache.hadoop.util.StringUtils.toLowerCase(qop.name())); if (++i < qops.length){ sb.append(","); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java index 4616c90..e523e18 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java @@ -18,13 +18,13 @@ package org.apache.hadoop.security; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION; import static org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod.*; + import static org.junit.Assert.*; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.URI; -import java.util.Locale; import javax.security.auth.kerberos.KerberosPrincipal; @@ -33,6 +33,7 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.util.StringUtils; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.Mockito; @@ -103,13 +104,14 @@ public class TestSecurityUtil { String realm = "@REALM"; String principalInConf = service + SecurityUtil.HOSTNAME_PATTERN + realm; String hostname = "FooHost"; - String principal = service + hostname.toLowerCase() + realm; + String principal = + service + StringUtils.toLowerCase(hostname) + realm; verify(principalInConf, hostname, principal); } @Test public void testLocalHostNameForNullOrWild() throws Exception { - String local = SecurityUtil.getLocalHostName().toLowerCase(Locale.US); + String local = StringUtils.toLowerCase(SecurityUtil.getLocalHostName()); assertEquals("hdfs/" + local + "@REALM", SecurityUtil.getServerPrincipal("hdfs/_HOST@REALM", (String)null)); assertEquals("hdfs/" + local + "@REALM", @@ -260,7 +262,7 @@ public class TestSecurityUtil { //LOG.info("address:"+addr+" host:"+host+" ip:"+ip+" port:"+port); SecurityUtil.setTokenServiceUseIp(useIp); - String serviceHost = useIp ? ip : host.toLowerCase(); + String serviceHost = useIp ? ip : StringUtils.toLowerCase(host); Token<?> token = new Token<TokenIdentifier>(); Text service = new Text(serviceHost+":"+port); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java index 48b9b99..5b8eac6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java @@ -26,6 +26,7 @@ import org.apache.hadoop.security.authentication.util.KerberosName; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.Shell; +import org.apache.hadoop.util.StringUtils; import org.junit.*; import javax.security.auth.Subject; @@ -213,7 +214,7 @@ public class TestUserGroupInformation { userName = userName.substring(sp + 1); } // user names are case insensitive on Windows. Make consistent - userName = userName.toLowerCase(); + userName = StringUtils.toLowerCase(userName); } // get the groups pp = Runtime.getRuntime().exec(Shell.WINDOWS ? @@ -233,7 +234,7 @@ public class TestUserGroupInformation { String loginUserName = login.getShortUserName(); if(Shell.WINDOWS) { // user names are case insensitive on Windows. Make consistent - loginUserName = loginUserName.toLowerCase(); + loginUserName = StringUtils.toLowerCase(loginUserName); } assertEquals(userName, loginUserName); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java index 220ab1d..1bdeddb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java @@ -29,6 +29,7 @@ import java.text.SimpleDateFormat; import java.util.Date; import java.util.Map; +import org.apache.hadoop.util.StringUtils; import org.junit.runner.notification.Failure; import org.junit.runner.notification.RunListener; @@ -93,8 +94,9 @@ public class TimedOutTestsListener extends RunListener { thread.getPriority(), thread.getId(), Thread.State.WAITING.equals(thread.getState()) ? - "in Object.wait()" : thread.getState().name().toLowerCase(), - Thread.State.WAITING.equals(thread.getState()) ? + "in Object.wait()" : + StringUtils.toLowerCase(thread.getState().name()), + Thread.State.WAITING.equals(thread.getState()) ? "WAITING (on object monitor)" : thread.getState())); for (StackTraceElement stackTraceElement : e.getValue()) { dump.append("\n at "); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java index 0c930d4..515c3e0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java @@ -18,10 +18,12 @@ package org.apache.hadoop.util; +import java.util.Locale; import static org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.long2String; import static org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.string2long; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -412,6 +414,25 @@ public class TestStringUtils extends UnitTestcaseTimeLimit { assertTrue(col.containsAll(Arrays.asList(new String[]{"foo","bar","baz","blah"}))); } + @Test + public void testLowerAndUpperStrings() { + Locale defaultLocale = Locale.getDefault(); + try { + Locale.setDefault(new Locale("tr", "TR")); + String upperStr = "TITLE"; + String lowerStr = "title"; + // Confirming TR locale. + assertNotEquals(lowerStr, upperStr.toLowerCase()); + assertNotEquals(upperStr, lowerStr.toUpperCase()); + // This should be true regardless of locale. + assertEquals(lowerStr, StringUtils.toLowerCase(upperStr)); + assertEquals(upperStr, StringUtils.toUpperCase(lowerStr)); + assertTrue(StringUtils.equalsIgnoreCase(upperStr, lowerStr)); + } finally { + Locale.setDefault(defaultLocale); + } + } + // Benchmark for StringUtils split public static void main(String []args) { final String TO_SPLIT = "foo,bar,baz,blah,blah"; http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java index 2d4e442..8ac6e40 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java @@ -382,8 +382,10 @@ public class TestWinUtils { private void assertOwners(File file, String expectedUser, String expectedGroup) throws IOException { String [] args = lsF(file).trim().split("[\\|]"); - assertEquals(expectedUser.toLowerCase(), args[2].toLowerCase()); - assertEquals(expectedGroup.toLowerCase(), args[3].toLowerCase()); + assertEquals(StringUtils.toLowerCase(expectedUser), + StringUtils.toLowerCase(args[2])); + assertEquals(StringUtils.toLowerCase(expectedGroup), + StringUtils.toLowerCase(args[3])); } @Test (timeout = 30000) http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java index b617ae5..8b6b46a 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java @@ -32,6 +32,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3Constant; import org.apache.hadoop.util.LightWeightCache; import org.apache.hadoop.util.LightWeightGSet; import org.apache.hadoop.util.LightWeightGSet.LinkedElement; +import org.apache.hadoop.util.StringUtils; import com.google.common.base.Preconditions; @@ -359,10 +360,10 @@ public class NfsExports { AccessPrivilege privilege = AccessPrivilege.READ_ONLY; switch (parts.length) { case 1: - host = parts[0].toLowerCase().trim(); + host = StringUtils.toLowerCase(parts[0]).trim(); break; case 2: - host = parts[0].toLowerCase().trim(); + host = StringUtils.toLowerCase(parts[0]).trim(); String option = parts[1].trim(); if ("rw".equalsIgnoreCase(option)) { privilege = AccessPrivilege.READ_WRITE; http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java index 836b4ce..81b0b7a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java @@ -21,6 +21,7 @@ package org.apache.hadoop.fs.http.server; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.http.client.HttpFSFileSystem; +import org.apache.hadoop.util.StringUtils; import javax.servlet.Filter; import javax.servlet.FilterChain; @@ -82,7 +83,8 @@ public class CheckUploadContentTypeFilter implements Filter { String method = httpReq.getMethod(); if (method.equals("PUT") || method.equals("POST")) { String op = httpReq.getParameter(HttpFSFileSystem.OP_PARAM); - if (op != null && UPLOAD_OPERATIONS.contains(op.toUpperCase())) { + if (op != null && UPLOAD_OPERATIONS.contains( + StringUtils.toUpperCase(op))) { if ("true".equalsIgnoreCase(httpReq.getParameter(HttpFSParametersProvider.DataParam.NAME))) { String contentType = httpReq.getContentType(); contentTypeOK = http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java index 633589c..11cdb4d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java @@ -34,6 +34,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.protocol.AclException; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.lib.service.FileSystemAccess; +import org.apache.hadoop.util.StringUtils; import org.json.simple.JSONArray; import org.json.simple.JSONObject; @@ -439,7 +440,8 @@ public class FSOperations { @Override public JSONObject execute(FileSystem fs) throws IOException { boolean result = fs.truncate(path, newLength); - return toJSON(HttpFSFileSystem.TRUNCATE_JSON.toLowerCase(), result); + return toJSON( + StringUtils.toLowerCase(HttpFSFileSystem.TRUNCATE_JSON), result); } } @@ -568,7 +570,8 @@ public class FSOperations { @Override public JSONObject execute(FileSystem fs) throws IOException { boolean deleted = fs.delete(path, recursive); - return toJSON(HttpFSFileSystem.DELETE_JSON.toLowerCase(), deleted); + return toJSON( + StringUtils.toLowerCase(HttpFSFileSystem.DELETE_JSON), deleted); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java index 271f3d9..5c4204a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java @@ -30,6 +30,7 @@ import org.apache.hadoop.lib.wsrs.Param; import org.apache.hadoop.lib.wsrs.ParametersProvider; import org.apache.hadoop.lib.wsrs.ShortParam; import org.apache.hadoop.lib.wsrs.StringParam; +import org.apache.hadoop.util.StringUtils; import javax.ws.rs.ext.Provider; import java.util.HashMap; @@ -168,7 +169,8 @@ public class HttpFSParametersProvider extends ParametersProvider { */ public OperationParam(String operation) { super(NAME, HttpFSFileSystem.Operation.class, - HttpFSFileSystem.Operation.valueOf(operation.toUpperCase())); + HttpFSFileSystem.Operation.valueOf( + StringUtils.toUpperCase(operation))); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java index 5c1bb4f..1a0f9ff 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java @@ -22,6 +22,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.lib.util.Check; import org.apache.hadoop.lib.util.ConfigurationUtils; +import org.apache.hadoop.util.StringUtils; import org.apache.log4j.LogManager; import org.apache.log4j.PropertyConfigurator; import org.slf4j.Logger; @@ -202,7 +203,7 @@ public class Server { * @param config server configuration. */ public Server(String name, String homeDir, String configDir, String logDir, String tempDir, Configuration config) { - this.name = Check.notEmpty(name, "name").trim().toLowerCase(); + this.name = StringUtils.toLowerCase(Check.notEmpty(name, "name").trim()); this.homeDir = Check.notEmpty(homeDir, "homeDir"); this.configDir = Check.notEmpty(configDir, "configDir"); this.logDir = Check.notEmpty(logDir, "logDir"); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java index ccb15a3..88780cb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java @@ -33,6 +33,7 @@ import org.apache.hadoop.lib.service.Scheduler; import org.apache.hadoop.lib.util.Check; import org.apache.hadoop.lib.util.ConfigurationUtils; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.VersionInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -254,7 +255,7 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc private Set<String> toLowerCase(Collection<String> collection) { Set<String> set = new HashSet<String>(); for (String value : collection) { - set.add(value.toLowerCase()); + set.add(StringUtils.toLowerCase(value)); } return set; } @@ -300,7 +301,8 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc protected void validateNamenode(String namenode) throws FileSystemAccessException { if (nameNodeWhitelist.size() > 0 && !nameNodeWhitelist.contains("*")) { - if (!nameNodeWhitelist.contains(namenode.toLowerCase())) { + if (!nameNodeWhitelist.contains( + StringUtils.toLowerCase(namenode))) { throw new FileSystemAccessException(FileSystemAccessException.ERROR.H05, namenode, "not in whitelist"); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java index 8baef67..f95a6e6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java @@ -34,7 +34,7 @@ public abstract class EnumParam<E extends Enum<E>> extends Param<E> { @Override protected E parse(String str) throws Exception { - return Enum.valueOf(klass, str.toUpperCase()); + return Enum.valueOf(klass, StringUtils.toUpperCase(str)); } @Override http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java index 8d79b71..ba6e5aa 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java @@ -22,6 +22,7 @@ import java.util.EnumSet; import java.util.Iterator; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.util.StringUtils; @InterfaceAudience.Private public abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>> { @@ -37,7 +38,7 @@ public abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>> final EnumSet<E> set = EnumSet.noneOf(klass); if (!str.isEmpty()) { for (String sub : str.split(",")) { - set.add(Enum.valueOf(klass, sub.trim().toUpperCase())); + set.add(Enum.valueOf(klass, StringUtils.toUpperCase(sub.trim()))); } } return set; http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java index 4703a90..c93f8f2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java @@ -26,6 +26,7 @@ import com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable; import com.sun.jersey.spi.inject.Injectable; import com.sun.jersey.spi.inject.InjectableProvider; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.util.StringUtils; import javax.ws.rs.core.Context; import javax.ws.rs.core.MultivaluedMap; @@ -70,7 +71,7 @@ public class ParametersProvider } Enum op; try { - op = Enum.valueOf(enumClass, str.toUpperCase()); + op = Enum.valueOf(enumClass, StringUtils.toUpperCase(str)); } catch (IllegalArgumentException ex) { throw new IllegalArgumentException( MessageFormat.format("Invalid Operation [{0}]", str)); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java index 04364ccf..5cafb3c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java @@ -24,6 +24,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.fs.XAttr.NameSpace; +import org.apache.hadoop.util.StringUtils; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; @@ -57,16 +58,20 @@ public class XAttrHelper { } NameSpace ns; - final String prefix = name.substring(0, prefixIndex).toLowerCase(); - if (prefix.equals(NameSpace.USER.toString().toLowerCase())) { + final String prefix = name.substring(0, prefixIndex); + if (StringUtils.equalsIgnoreCase(prefix, NameSpace.USER.toString())) { ns = NameSpace.USER; - } else if (prefix.equals(NameSpace.TRUSTED.toString().toLowerCase())) { + } else if ( + StringUtils.equalsIgnoreCase(prefix, NameSpace.TRUSTED.toString())) { ns = NameSpace.TRUSTED; - } else if (prefix.equals(NameSpace.SYSTEM.toString().toLowerCase())) { + } else if ( + StringUtils.equalsIgnoreCase(prefix, NameSpace.SYSTEM.toString())) { ns = NameSpace.SYSTEM; - } else if (prefix.equals(NameSpace.SECURITY.toString().toLowerCase())) { + } else if ( + StringUtils.equalsIgnoreCase(prefix, NameSpace.SECURITY.toString())) { ns = NameSpace.SECURITY; - } else if (prefix.equals(NameSpace.RAW.toString().toLowerCase())) { + } else if ( + StringUtils.equalsIgnoreCase(prefix, NameSpace.RAW.toString())) { ns = NameSpace.RAW; } else { throw new HadoopIllegalArgumentException("An XAttr name must be " + @@ -145,7 +150,7 @@ public class XAttrHelper { } String namespace = xAttr.getNameSpace().toString(); - return namespace.toLowerCase() + "." + xAttr.getName(); + return StringUtils.toLowerCase(namespace) + "." + xAttr.getName(); } /** http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java index 54da8eb..7cf8a47 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.server.datanode.DataNodeLayoutVersion; import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion; import org.apache.hadoop.hdfs.server.namenode.FSDirectory; +import org.apache.hadoop.util.StringUtils; /************************************ * Some handy constants @@ -98,7 +99,7 @@ public class HdfsConstants { /** Covert the given String to a RollingUpgradeAction. */ public static RollingUpgradeAction fromString(String s) { - return MAP.get(s.toUpperCase()); + return MAP.get(StringUtils.toUpperCase(s)); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java index 0c03a42..020cb5f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java @@ -26,6 +26,7 @@ import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.HdfsConstants; +import org.apache.hadoop.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -131,7 +132,8 @@ public class BlockStoragePolicySuite { } public static String buildXAttrName() { - return XAttrNS.toString().toLowerCase() + "." + STORAGE_POLICY_XATTR_NAME; + return StringUtils.toLowerCase(XAttrNS.toString()) + + "." + STORAGE_POLICY_XATTR_NAME; } public static XAttr buildXAttr(byte policyId) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java index ff64524..2d267ce 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java @@ -27,6 +27,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hdfs.server.namenode.MetaRecoveryContext; import com.google.common.base.Preconditions; +import org.apache.hadoop.util.StringUtils; /************************************ * Some handy internal HDFS constants @@ -53,7 +54,7 @@ public final class HdfsServerConstants { public String getOptionString() { return StartupOption.ROLLINGUPGRADE.getName() + " " - + name().toLowerCase(); + + StringUtils.toLowerCase(name()); } public boolean matches(StartupOption option) { @@ -84,7 +85,7 @@ public final class HdfsServerConstants { public static String getAllOptionString() { final StringBuilder b = new StringBuilder("<"); for(RollingUpgradeStartupOption opt : VALUES) { - b.append(opt.name().toLowerCase()).append("|"); + b.append(StringUtils.toLowerCase(opt.name())).append("|"); } b.setCharAt(b.length() - 1, '>'); return b.toString(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java index 7cda670..126086f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java @@ -28,6 +28,7 @@ import java.util.regex.Matcher; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.server.common.Util; +import org.apache.hadoop.util.StringUtils; /** * Encapsulates the URI and storage medium that together describe a @@ -88,7 +89,8 @@ public class StorageLocation { String classString = matcher.group(1); location = matcher.group(2); if (!classString.isEmpty()) { - storageType = StorageType.valueOf(classString.toUpperCase()); + storageType = + StorageType.valueOf(StringUtils.toUpperCase(classString)); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java index c41a46a..c768690 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java @@ -123,6 +123,7 @@ import org.apache.hadoop.ipc.ClientId; import org.apache.hadoop.ipc.RpcConstants; import org.apache.hadoop.security.token.delegation.DelegationKey; import org.apache.hadoop.util.DataChecksum; +import org.apache.hadoop.util.StringUtils; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; import org.xml.sax.helpers.AttributesImpl; @@ -4348,7 +4349,7 @@ public abstract class FSEditLogOp { public RollingUpgradeOp(FSEditLogOpCodes code, String name) { super(code); - this.name = name.toUpperCase(); + this.name = StringUtils.toUpperCase(name); } static RollingUpgradeOp getStartInstance(OpInstanceCache cache) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java index 711d0f8..39ce2dc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.namenode; import com.google.common.base.Objects; import org.apache.hadoop.fs.StorageType; +import org.apache.hadoop.util.StringUtils; public class QuotaByStorageTypeEntry { private StorageType type; @@ -53,7 +54,7 @@ public class QuotaByStorageTypeEntry { public String toString() { StringBuilder sb = new StringBuilder(); assert (type != null); - sb.append(type.toString().toLowerCase()); + sb.append(StringUtils.toLowerCase(type.toString())); sb.append(':'); sb.append(quota); return sb.toString(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1c6accb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java index 83e6426..ec7e0c9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java @@ -587,7 +587,7 @@ public class SecondaryNameNode implements Runnable, return 0; } - String cmd = opts.getCommand().toString().toLowerCase(); + String cmd = StringUtils.toLowerCase(opts.getCommand().toString()); int exitCode = 0; try {