From d1c6accb6f87b08975175580e15f1ff1fe29ab04 Mon Sep 17 00:00:00 2001 From: Tsuyoshi Ozawa Date: Tue, 3 Mar 2015 14:12:34 +0900 Subject: [PATCH] HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa) --- .../tools/StabilityOptions.java | 5 ++- .../AltKerberosAuthenticationHandler.java | 6 ++- .../authentication/util/TestKerberosUtil.java | 14 ++++--- .../hadoop-common/CHANGES.txt | 2 + .../org/apache/hadoop/conf/Configuration.java | 6 +-- .../org/apache/hadoop/crypto/CipherSuite.java | 3 +- .../crypto/key/JavaKeyStoreProvider.java | 3 +- .../java/org/apache/hadoop/fs/FileSystem.java | 7 +++- .../org/apache/hadoop/fs/StorageType.java | 3 +- .../apache/hadoop/fs/permission/AclEntry.java | 5 ++- .../apache/hadoop/fs/shell/XAttrCommands.java | 2 +- .../org/apache/hadoop/fs/shell/find/Name.java | 5 ++- .../io/compress/CompressionCodecFactory.java | 7 ++-- .../hadoop/metrics2/impl/MetricsConfig.java | 7 ++-- .../metrics2/impl/MetricsSystemImpl.java | 5 ++- .../security/SaslPropertiesResolver.java | 3 +- .../apache/hadoop/security/SecurityUtil.java | 12 +++--- .../security/WhitelistBasedResolver.java | 3 +- .../ssl/FileBasedKeyStoresFactory.java | 4 +- .../hadoop/security/ssl/SSLFactory.java | 5 ++- .../security/ssl/SSLHostnameVerifier.java | 10 +++-- .../DelegationTokenAuthenticationHandler.java | 3 +- .../web/DelegationTokenAuthenticator.java | 3 +- .../apache/hadoop/util/ComparableVersion.java | 3 +- .../org/apache/hadoop/util/StringUtils.java | 40 ++++++++++++++++++- .../hadoop/fs/FileSystemContractBaseTest.java | 4 +- .../java/org/apache/hadoop/ipc/TestIPC.java | 2 +- .../org/apache/hadoop/ipc/TestSaslRPC.java | 2 +- .../hadoop/security/TestSecurityUtil.java | 10 +++-- .../security/TestUserGroupInformation.java | 5 ++- .../hadoop/test/TimedOutTestsListener.java | 6 ++- .../apache/hadoop/util/TestStringUtils.java | 21 ++++++++++ .../org/apache/hadoop/util/TestWinUtils.java | 6 ++- .../org/apache/hadoop/nfs/NfsExports.java | 5 ++- .../server/CheckUploadContentTypeFilter.java | 4 +- .../hadoop/fs/http/server/FSOperations.java | 7 +++- .../http/server/HttpFSParametersProvider.java | 4 +- .../org/apache/hadoop/lib/server/Server.java | 3 +- .../hadoop/FileSystemAccessService.java | 6 ++- .../org/apache/hadoop/lib/wsrs/EnumParam.java | 2 +- .../apache/hadoop/lib/wsrs/EnumSetParam.java | 3 +- .../hadoop/lib/wsrs/ParametersProvider.java | 3 +- .../org/apache/hadoop/hdfs/XAttrHelper.java | 19 +++++---- .../hadoop/hdfs/protocol/HdfsConstants.java | 3 +- .../BlockStoragePolicySuite.java | 4 +- .../server/common/HdfsServerConstants.java | 5 ++- .../hdfs/server/datanode/StorageLocation.java | 4 +- .../hdfs/server/namenode/FSEditLogOp.java | 3 +- .../namenode/QuotaByStorageTypeEntry.java | 3 +- .../server/namenode/SecondaryNameNode.java | 2 +- .../org/apache/hadoop/hdfs/tools/GetConf.java | 17 ++++---- .../OfflineEditsVisitorFactory.java | 7 ++-- .../offlineImageViewer/FSImageHandler.java | 4 +- .../apache/hadoop/hdfs/web/AuthFilter.java | 3 +- .../apache/hadoop/hdfs/web/ParamFilter.java | 3 +- .../hadoop/hdfs/web/WebHdfsFileSystem.java | 5 ++- .../hadoop/hdfs/web/resources/EnumParam.java | 3 +- .../hdfs/web/resources/EnumSetParam.java | 3 +- .../snapshot/TestSnapshotManager.java | 6 +-- .../jobhistory/JobHistoryEventHandler.java | 3 +- .../v2/app/webapp/AppController.java | 6 +-- .../hadoop/mapreduce/TypeConverter.java | 3 +- .../hadoop/mapreduce/v2/util/MRApps.java | 4 +- .../hadoop/mapreduce/TestTypeConverter.java | 6 ++- .../java/org/apache/hadoop/mapred/Task.java | 2 +- .../counters/FileSystemCounterGroup.java | 4 +- .../mapreduce/filecache/DistributedCache.java | 4 +- .../mapreduce/lib/db/DBInputFormat.java | 5 ++- .../apache/hadoop/mapreduce/tools/CLI.java | 9 +++-- .../java/org/apache/hadoop/fs/TestDFSIO.java | 18 ++++----- .../org/apache/hadoop/fs/TestFileSystem.java | 4 +- .../org/apache/hadoop/fs/slive/Constants.java | 6 ++- .../apache/hadoop/fs/slive/OperationData.java | 3 +- .../hadoop/fs/slive/OperationOutput.java | 4 +- .../org/apache/hadoop/fs/slive/SliveTest.java | 3 +- .../java/org/apache/hadoop/io/FileBench.java | 17 ++++---- .../org/apache/hadoop/mapred/TestMapRed.java | 3 +- .../hadoop/examples/DBCountPageView.java | 2 +- .../plugin/versioninfo/VersionInfoMojo.java | 4 +- .../fs/azure/AzureNativeFileSystemStore.java | 4 +- .../apache/hadoop/tools/util/DistCpUtils.java | 12 ++++-- .../org/apache/hadoop/tools/DistCpV1.java | 4 +- .../gridmix/GridmixJobSubmissionPolicy.java | 3 +- .../TestSwiftFileSystemExtendedContract.java | 4 +- .../tools/rumen/HadoopLogsAnalyzer.java | 33 +++++++-------- .../apache/hadoop/tools/rumen/JobBuilder.java | 2 +- .../apache/hadoop/tools/rumen/LoggedTask.java | 3 +- .../hadoop/tools/rumen/LoggedTaskAttempt.java | 3 +- .../apache/hadoop/streaming/Environment.java | 3 +- .../yarn/client/cli/ApplicationCLI.java | 7 ++-- .../hadoop/yarn/client/cli/NodeCLI.java | 3 +- .../impl/pb/GetApplicationsRequestPBImpl.java | 6 ++- .../ApplicationSubmissionContextPBImpl.java | 3 +- .../apache/hadoop/yarn/util/FSDownload.java | 6 +-- .../hadoop/yarn/webapp/hamlet/HamletGen.java | 6 +-- .../client/binding/RegistryUtils.java | 3 +- .../webapp/AHSWebServices.java | 4 +- .../timeline/webapp/TimelineWebServices.java | 3 +- .../yarn/server/webapp/WebServices.java | 18 +++++---- .../resourcemanager/ClientRMService.java | 3 +- .../resource/ResourceWeights.java | 3 +- .../CapacitySchedulerConfiguration.java | 4 +- .../fair/FairSchedulerConfiguration.java | 3 +- .../scheduler/fair/SchedulingPolicy.java | 3 +- .../resourcemanager/webapp/NodesPage.java | 2 +- .../resourcemanager/webapp/RMWebServices.java | 20 ++++++---- 106 files changed, 407 insertions(+), 224 deletions(-) diff --git a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java index dbce31e1eb..657dbce812 100644 --- a/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java +++ b/hadoop-common-project/hadoop-annotations/src/main/java/org/apache/hadoop/classification/tools/StabilityOptions.java @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Locale; class StabilityOptions { public static final String STABLE_OPTION = "-stable"; @@ -28,7 +29,7 @@ class StabilityOptions { public static final String UNSTABLE_OPTION = "-unstable"; public static Integer optionLength(String option) { - String opt = option.toLowerCase(); + String opt = option.toLowerCase(Locale.ENGLISH); if (opt.equals(UNSTABLE_OPTION)) return 1; if (opt.equals(EVOLVING_OPTION)) return 1; if (opt.equals(STABLE_OPTION)) return 1; @@ -38,7 +39,7 @@ public static Integer optionLength(String option) { public static void validOptions(String[][] options, DocErrorReporter reporter) { for (int i = 0; i < options.length; i++) { - String opt = options[i][0].toLowerCase(); + String opt = options[i][0].toLowerCase(Locale.ENGLISH); if (opt.equals(UNSTABLE_OPTION)) { RootDocProcessor.stability = UNSTABLE_OPTION; } else if (opt.equals(EVOLVING_OPTION)) { diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java index 987330fa0e..dae3b50ad1 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AltKerberosAuthenticationHandler.java @@ -14,6 +14,7 @@ package org.apache.hadoop.security.authentication.server; import java.io.IOException; +import java.util.Locale; import java.util.Properties; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; @@ -68,7 +69,8 @@ public void init(Properties config) throws ServletException { NON_BROWSER_USER_AGENTS, NON_BROWSER_USER_AGENTS_DEFAULT) .split("\\W*,\\W*"); for (int i = 0; i < nonBrowserUserAgents.length; i++) { - nonBrowserUserAgents[i] = nonBrowserUserAgents[i].toLowerCase(); + nonBrowserUserAgents[i] = + nonBrowserUserAgents[i].toLowerCase(Locale.ENGLISH); } } @@ -120,7 +122,7 @@ protected boolean isBrowser(String userAgent) { if (userAgent == null) { return false; } - userAgent = userAgent.toLowerCase(); + userAgent = userAgent.toLowerCase(Locale.ENGLISH); boolean isBrowser = true; for (String nonBrowserUserAgent : nonBrowserUserAgents) { if (userAgent.contains(nonBrowserUserAgent)) { diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java index b0e8f04a8f..89e07d1a5f 100644 --- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java +++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestKerberosUtil.java @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Locale; import java.util.regex.Pattern; import org.apache.directory.server.kerberos.shared.keytab.Keytab; @@ -58,24 +59,25 @@ public void testGetServerPrincipal() throws IOException { // send null hostname Assert.assertEquals("When no hostname is sent", - service + "/" + localHostname.toLowerCase(), + service + "/" + localHostname.toLowerCase(Locale.ENGLISH), KerberosUtil.getServicePrincipal(service, null)); // send empty hostname Assert.assertEquals("When empty hostname is sent", - service + "/" + localHostname.toLowerCase(), + service + "/" + localHostname.toLowerCase(Locale.ENGLISH), KerberosUtil.getServicePrincipal(service, "")); // send 0.0.0.0 hostname Assert.assertEquals("When 0.0.0.0 hostname is sent", - service + "/" + localHostname.toLowerCase(), + service + "/" + localHostname.toLowerCase(Locale.ENGLISH), KerberosUtil.getServicePrincipal(service, "0.0.0.0")); // send uppercase hostname Assert.assertEquals("When uppercase hostname is sent", - service + "/" + testHost.toLowerCase(), + service + "/" + testHost.toLowerCase(Locale.ENGLISH), KerberosUtil.getServicePrincipal(service, testHost)); // send lowercase hostname Assert.assertEquals("When lowercase hostname is sent", - service + "/" + testHost.toLowerCase(), - KerberosUtil.getServicePrincipal(service, testHost.toLowerCase())); + service + "/" + testHost.toLowerCase(Locale.ENGLISH), + KerberosUtil.getServicePrincipal( + service, testHost.toLowerCase(Locale.ENGLISH))); } @Test diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index ebe23c7fa0..11785f2431 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -409,6 +409,8 @@ Trunk (Unreleased) HADOOP-10774. Update KerberosTestUtils for hadoop-auth tests when using IBM Java (sangamesh via aw) + HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa) + OPTIMIZATIONS HADOOP-7761. Improve the performance of raw comparisons. (todd) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index 02654b77b5..753f51530d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -1451,11 +1451,9 @@ public boolean getBoolean(String name, boolean defaultValue) { return defaultValue; } - valueString = valueString.toLowerCase(); - - if ("true".equals(valueString)) + if (StringUtils.equalsIgnoreCase("true", valueString)) return true; - else if ("false".equals(valueString)) + else if (StringUtils.equalsIgnoreCase("false", valueString)) return false; else return defaultValue; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java index c9355d7cbf..a811aa7271 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java @@ -19,6 +19,7 @@ package org.apache.hadoop.crypto; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.util.StringUtils; /** * Defines properties of a CipherSuite. Modeled after the ciphers in @@ -97,7 +98,7 @@ public String getConfigSuffix() { String[] parts = name.split("/"); StringBuilder suffix = new StringBuilder(); for (String part : parts) { - suffix.append(".").append(part.toLowerCase()); + suffix.append(".").append(StringUtils.toLowerCase(part)); } return suffix.toString(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java index bfec1ef1a9..c0d510d51f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/JavaKeyStoreProvider.java @@ -28,6 +28,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.security.ProviderUtils; +import org.apache.hadoop.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -422,7 +423,7 @@ public Metadata getMetadata(String name) throws IOException { @Override public KeyVersion createKey(String name, byte[] material, Options options) throws IOException { - Preconditions.checkArgument(name.equals(name.toLowerCase()), + Preconditions.checkArgument(name.equals(StringUtils.toLowerCase(name)), "Uppercase key names are unsupported: %s", name); writeLock.lock(); try { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index cfa519861a..42434f1945 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -65,6 +65,7 @@ import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ShutdownHookManager; +import org.apache.hadoop.util.StringUtils; import com.google.common.annotations.VisibleForTesting; @@ -2795,8 +2796,10 @@ static class Key { } Key(URI uri, Configuration conf, long unique) throws IOException { - scheme = uri.getScheme()==null?"":uri.getScheme().toLowerCase(); - authority = uri.getAuthority()==null?"":uri.getAuthority().toLowerCase(); + scheme = uri.getScheme()==null ? + "" : StringUtils.toLowerCase(uri.getScheme()); + authority = uri.getAuthority()==null ? + "" : StringUtils.toLowerCase(uri.getAuthority()); this.unique = unique; this.ugi = UserGroupInformation.getCurrentUser(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java index e306502121..68069d7256 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageType.java @@ -24,6 +24,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.util.StringUtils; /** * Defines the types of supported storage media. The default storage @@ -78,7 +79,7 @@ public static StorageType parseStorageType(int i) { } public static StorageType parseStorageType(String s) { - return StorageType.valueOf(s.toUpperCase()); + return StorageType.valueOf(StringUtils.toUpperCase(s)); } private static List getNonTransientTypes() { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java index b9def6447a..45402f8a2f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java @@ -106,7 +106,7 @@ public String toString() { sb.append("default:"); } if (type != null) { - sb.append(type.toString().toLowerCase()); + sb.append(StringUtils.toLowerCase(type.toString())); } sb.append(':'); if (name != null) { @@ -263,7 +263,8 @@ public static AclEntry parseAclEntry(String aclStr, AclEntryType aclType = null; try { - aclType = Enum.valueOf(AclEntryType.class, split[index].toUpperCase()); + aclType = Enum.valueOf( + AclEntryType.class, StringUtils.toUpperCase(split[index])); builder.setType(aclType); index++; } catch (IllegalArgumentException iae) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java index 4efda87444..d55c80b1a7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java @@ -79,7 +79,7 @@ protected void processOptions(LinkedList args) throws IOException { String en = StringUtils.popOptionWithArgument("-e", args); if (en != null) { try { - encoding = enValueOfFunc.apply(en.toUpperCase(Locale.ENGLISH)); + encoding = enValueOfFunc.apply(StringUtils.toUpperCase(en)); } catch (IllegalArgumentException e) { throw new IllegalArgumentException( "Invalid/unsupported encoding option specified: " + en); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java index 88314c6474..c89daa98e9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Name.java @@ -22,6 +22,7 @@ import org.apache.hadoop.fs.GlobPattern; import org.apache.hadoop.fs.shell.PathData; +import org.apache.hadoop.util.StringUtils; /** * Implements the -name expression for the @@ -73,7 +74,7 @@ public void addArguments(Deque args) { public void prepare() throws IOException { String argPattern = getArgument(1); if (!caseSensitive) { - argPattern = argPattern.toLowerCase(); + argPattern = StringUtils.toLowerCase(argPattern); } globPattern = new GlobPattern(argPattern); } @@ -82,7 +83,7 @@ public void prepare() throws IOException { public Result apply(PathData item, int depth) throws IOException { String name = getPath(item).getName(); if (!caseSensitive) { - name = name.toLowerCase(); + name = StringUtils.toLowerCase(name); } if (globPattern.matches(name)) { return Result.PASS; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java index 7476a15aaa..8fff75d01d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java @@ -27,6 +27,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.util.StringUtils; /** * A factory that will find the correct codec for a given filename. @@ -66,10 +67,10 @@ private void addCodec(CompressionCodec codec) { codecsByClassName.put(codec.getClass().getCanonicalName(), codec); String codecName = codec.getClass().getSimpleName(); - codecsByName.put(codecName.toLowerCase(), codec); + codecsByName.put(StringUtils.toLowerCase(codecName), codec); if (codecName.endsWith("Codec")) { codecName = codecName.substring(0, codecName.length() - "Codec".length()); - codecsByName.put(codecName.toLowerCase(), codec); + codecsByName.put(StringUtils.toLowerCase(codecName), codec); } } @@ -246,7 +247,7 @@ public CompressionCodec getCodecByName(String codecName) { if (codec == null) { // trying to get the codec by name in case the name was specified // instead a class - codec = codecsByName.get(codecName.toLowerCase()); + codec = codecsByName.get(StringUtils.toLowerCase(codecName)); } return codec; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java index 167205e93e..cbe60b5197 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java @@ -44,6 +44,7 @@ import org.apache.hadoop.metrics2.MetricsFilter; import org.apache.hadoop.metrics2.MetricsPlugin; import org.apache.hadoop.metrics2.filter.GlobFilter; +import org.apache.hadoop.util.StringUtils; /** * Metrics configuration for MetricsSystemImpl @@ -85,12 +86,12 @@ class MetricsConfig extends SubsetConfiguration { private ClassLoader pluginLoader; MetricsConfig(Configuration c, String prefix) { - super(c, prefix.toLowerCase(Locale.US), "."); + super(c, StringUtils.toLowerCase(prefix), "."); } static MetricsConfig create(String prefix) { - return loadFirst(prefix, "hadoop-metrics2-"+ prefix.toLowerCase(Locale.US) - +".properties", DEFAULT_FILE_NAME); + return loadFirst(prefix, "hadoop-metrics2-" + + StringUtils.toLowerCase(prefix) + ".properties", DEFAULT_FILE_NAME); } static MetricsConfig create(String prefix, String... fileNames) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java index 32b00f3fdf..a94d814431 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsSystemImpl.java @@ -61,6 +61,7 @@ import org.apache.hadoop.metrics2.lib.MetricsSourceBuilder; import org.apache.hadoop.metrics2.lib.MutableStat; import org.apache.hadoop.metrics2.util.MBeans; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; /** @@ -616,7 +617,7 @@ private InitMode initMode() { LOG.debug("from environment variable: "+ System.getenv(MS_INIT_MODE_KEY)); String m = System.getProperty(MS_INIT_MODE_KEY); String m2 = m == null ? System.getenv(MS_INIT_MODE_KEY) : m; - return InitMode.valueOf((m2 == null ? InitMode.NORMAL.name() : m2) - .toUpperCase(Locale.US)); + return InitMode.valueOf( + StringUtils.toUpperCase((m2 == null ? InitMode.NORMAL.name() : m2))); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java index 0b49cfbc44..305443cea8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPropertiesResolver.java @@ -66,7 +66,8 @@ public void setConf(Configuration conf) { CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION, QualityOfProtection.AUTHENTICATION.toString()); for (int i=0; i < qop.length; i++) { - qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase(Locale.ENGLISH)).getSaslQop(); + qop[i] = QualityOfProtection.valueOf( + StringUtils.toUpperCase(qop[i])).getSaslQop(); } properties.put(Sasl.QOP, StringUtils.join(",", qop)); properties.put(Sasl.SERVER_AUTH, "true"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java index 7cbee26f18..eddf98d07f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java @@ -27,7 +27,6 @@ import java.security.PrivilegedExceptionAction; import java.util.Arrays; import java.util.List; -import java.util.Locale; import java.util.ServiceLoader; import javax.security.auth.kerberos.KerberosPrincipal; @@ -44,6 +43,7 @@ import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenInfo; +import org.apache.hadoop.util.StringUtils; //this will need to be replaced someday when there is a suitable replacement @@ -182,7 +182,8 @@ private static String replacePattern(String[] components, String hostname) if (fqdn == null || fqdn.isEmpty() || fqdn.equals("0.0.0.0")) { fqdn = getLocalHostName(); } - return components[0] + "/" + fqdn.toLowerCase(Locale.US) + "@" + components[2]; + return components[0] + "/" + + StringUtils.toLowerCase(fqdn) + "@" + components[2]; } static String getLocalHostName() throws UnknownHostException { @@ -379,7 +380,7 @@ public static Text buildTokenService(InetSocketAddress addr) { } host = addr.getAddress().getHostAddress(); } else { - host = addr.getHostName().toLowerCase(); + host = StringUtils.toLowerCase(addr.getHostName()); } return new Text(host + ":" + addr.getPort()); } @@ -606,7 +607,8 @@ void setSearchDomains(String ... domains) { public static AuthenticationMethod getAuthenticationMethod(Configuration conf) { String value = conf.get(HADOOP_SECURITY_AUTHENTICATION, "simple"); try { - return Enum.valueOf(AuthenticationMethod.class, value.toUpperCase(Locale.ENGLISH)); + return Enum.valueOf(AuthenticationMethod.class, + StringUtils.toUpperCase(value)); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("Invalid attribute value for " + HADOOP_SECURITY_AUTHENTICATION + " of " + value); @@ -619,7 +621,7 @@ public static void setAuthenticationMethod( authenticationMethod = AuthenticationMethod.SIMPLE; } conf.set(HADOOP_SECURITY_AUTHENTICATION, - authenticationMethod.toString().toLowerCase(Locale.ENGLISH)); + StringUtils.toLowerCase(authenticationMethod.toString())); } /* diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java index dc0815ed76..8d4df64299 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/WhitelistBasedResolver.java @@ -138,7 +138,8 @@ static Map getSaslProperties(Configuration conf) { QualityOfProtection.PRIVACY.toString()); for (int i=0; i < qop.length; i++) { - qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase()).getSaslQop(); + qop[i] = QualityOfProtection.valueOf( + StringUtils.toUpperCase(qop[i])).getSaslQop(); } saslProps.put(Sasl.QOP, StringUtils.join(",", qop)); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java index 4b81e17095..609c71f5f5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java @@ -23,6 +23,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.util.StringUtils; import javax.net.ssl.KeyManager; import javax.net.ssl.KeyManagerFactory; @@ -94,7 +95,8 @@ public class FileBasedKeyStoresFactory implements KeyStoresFactory { @VisibleForTesting public static String resolvePropertyName(SSLFactory.Mode mode, String template) { - return MessageFormat.format(template, mode.toString().toLowerCase()); + return MessageFormat.format( + template, StringUtils.toLowerCase(mode.toString())); } /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java index bbea33b7a0..edec347d0d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java @@ -22,6 +22,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.util.StringUtils; import static org.apache.hadoop.util.PlatformName.IBM_JAVA; import javax.net.ssl.HostnameVerifier; @@ -137,8 +138,8 @@ public void init() throws GeneralSecurityException, IOException { private HostnameVerifier getHostnameVerifier(Configuration conf) throws GeneralSecurityException, IOException { - return getHostnameVerifier(conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT"). - trim().toUpperCase()); + return getHostnameVerifier(StringUtils.toUpperCase( + conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").trim())); } public static HostnameVerifier getHostnameVerifier(String verifier) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java index dd5e67b48b..b5ef2b24ee 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java @@ -52,6 +52,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.util.StringUtils; /** ************************************************************************ @@ -365,7 +366,7 @@ public void check(final String[] hosts, final String[] cns, buf.append('<'); for (int i = 0; i < hosts.length; i++) { String h = hosts[i]; - h = h != null ? h.trim().toLowerCase() : ""; + h = h != null ? StringUtils.toLowerCase(h.trim()) : ""; hosts[i] = h; if (i > 0) { buf.append('/'); @@ -406,7 +407,7 @@ public void check(final String[] hosts, final String[] cns, out: for (Iterator it = names.iterator(); it.hasNext();) { // Don't trim the CN, though! - final String cn = it.next().toLowerCase(); + final String cn = StringUtils.toLowerCase(it.next()); // Store CN in StringBuffer in case we need to report an error. buf.append(" <"); buf.append(cn); @@ -424,7 +425,8 @@ public void check(final String[] hosts, final String[] cns, acceptableCountryWildcard(cn); for (int i = 0; i < hosts.length; i++) { - final String hostName = hosts[i].trim().toLowerCase(); + final String hostName = + StringUtils.toLowerCase(hosts[i].trim()); if (doWildcard) { match = hostName.endsWith(cn.substring(1)); if (match && strictWithSubDomains) { @@ -479,7 +481,7 @@ public static boolean acceptableCountryWildcard(final String cn) { } public static boolean isLocalhost(String host) { - host = host != null ? host.trim().toLowerCase() : ""; + host = host != null ? StringUtils.toLowerCase(host.trim()) : ""; if (host.startsWith("::1")) { int x = host.lastIndexOf('%'); if (x >= 0) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java index c18b5d32cf..c498f70e52 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationHandler.java @@ -47,6 +47,7 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager; import org.apache.hadoop.util.HttpExceptionUtils; +import org.apache.hadoop.util.StringUtils; import org.codehaus.jackson.map.ObjectMapper; import com.google.common.annotations.VisibleForTesting; @@ -169,7 +170,7 @@ public boolean managementOperation(AuthenticationToken token, boolean requestContinues = true; String op = ServletUtils.getParameter(request, KerberosDelegationTokenAuthenticator.OP_PARAM); - op = (op != null) ? op.toUpperCase() : null; + op = (op != null) ? StringUtils.toUpperCase(op) : null; if (DELEGATION_TOKEN_OPS.contains(op) && !request.getMethod().equals("OPTIONS")) { KerberosDelegationTokenAuthenticator.DelegationTokenOperation dtOp = diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java index d93f7acf6c..8a3a57f5f1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java @@ -27,6 +27,7 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier; import org.apache.hadoop.util.HttpExceptionUtils; +import org.apache.hadoop.util.StringUtils; import org.codehaus.jackson.map.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -286,7 +287,7 @@ private Map doDelegationTokenOperation(URL url, HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK); if (hasResponse) { String contentType = conn.getHeaderField(CONTENT_TYPE); - contentType = (contentType != null) ? contentType.toLowerCase() + contentType = (contentType != null) ? StringUtils.toLowerCase(contentType) : null; if (contentType != null && contentType.contains(APPLICATION_JSON_MIME)) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java index 65d85f79f8..9d34518ee7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ComparableVersion.java @@ -37,7 +37,6 @@ import java.util.Iterator; import java.util.List; import java.util.ListIterator; -import java.util.Locale; import java.util.Properties; import java.util.Stack; @@ -363,7 +362,7 @@ public final void parseVersion( String version ) items = new ListItem(); - version = version.toLowerCase( Locale.ENGLISH ); + version = StringUtils.toLowerCase(version); ListItem list = items; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java index ff8edc389d..fc4b0ab6bf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java @@ -18,6 +18,7 @@ package org.apache.hadoop.util; +import com.google.common.base.Preconditions; import java.io.PrintWriter; import java.io.StringWriter; import java.net.URI; @@ -901,7 +902,7 @@ public static String join(CharSequence separator, String[] strings) { */ public static String camelize(String s) { StringBuilder sb = new StringBuilder(); - String[] words = split(s.toLowerCase(Locale.US), ESCAPE_CHAR, '_'); + String[] words = split(StringUtils.toLowerCase(s), ESCAPE_CHAR, '_'); for (String word : words) sb.append(org.apache.commons.lang.StringUtils.capitalize(word)); @@ -1032,4 +1033,41 @@ public static String popFirstNonOption(List args) { } return null; } + + /** + * Converts all of the characters in this String to lower case with + * Locale.ENGLISH. + * + * @param str string to be converted + * @return the str, converted to lowercase. + */ + public static String toLowerCase(String str) { + return str.toLowerCase(Locale.ENGLISH); + } + + /** + * Converts all of the characters in this String to upper case with + * Locale.ENGLISH. + * + * @param str string to be converted + * @return the str, converted to uppercase. + */ + public static String toUpperCase(String str) { + return str.toUpperCase(Locale.ENGLISH); + } + + /** + * Compare strings locale-freely by using String#equalsIgnoreCase. + * + * @param s1 Non-null string to be converted + * @param s2 string to be converted + * @return the str, converted to uppercase. + */ + public static boolean equalsIgnoreCase(String s1, String s2) { + Preconditions.checkNotNull(s1); + // don't check non-null against s2 to make the semantics same as + // s1.equals(s2) + return s1.equalsIgnoreCase(s2); + } + } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java index e2005be546..2ca81e9e90 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java @@ -20,7 +20,6 @@ import java.io.FileNotFoundException; import java.io.IOException; -import java.util.Locale; import junit.framework.TestCase; @@ -28,6 +27,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.util.StringUtils; /** *

@@ -527,7 +527,7 @@ public void testFilesystemIsCaseSensitive() throws Exception { } String mixedCaseFilename = "/test/UPPER.TXT"; Path upper = path(mixedCaseFilename); - Path lower = path(mixedCaseFilename.toLowerCase(Locale.ENGLISH)); + Path lower = path(StringUtils.toLowerCase(mixedCaseFilename)); assertFalse("File exists" + upper, fs.exists(upper)); assertFalse("File exists" + lower, fs.exists(lower)); FSDataOutputStream out = fs.create(upper); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java index eb19f48e63..b44301127d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestIPC.java @@ -1296,7 +1296,7 @@ private static byte[] hexDumpToBytes(String hexdump) { StringBuilder hexString = new StringBuilder(); - for (String line : hexdump.toUpperCase().split("\n")) { + for (String line : StringUtils.toUpperCase(hexdump).split("\n")) { hexString.append(line.substring(0, LAST_HEX_COL).replace(" ", "")); } return StringUtils.hexStringToByte(hexString.toString()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java index 903990b78a..f6ab38043c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java @@ -181,7 +181,7 @@ static String getQOPNames (QualityOfProtection[] qops){ StringBuilder sb = new StringBuilder(); int i = 0; for (QualityOfProtection qop:qops){ - sb.append(qop.name().toLowerCase()); + sb.append(org.apache.hadoop.util.StringUtils.toLowerCase(qop.name())); if (++i < qops.length){ sb.append(","); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java index 4616c9071d..e523e1864e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java @@ -18,13 +18,13 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION; import static org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod.*; + import static org.junit.Assert.*; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.URI; -import java.util.Locale; import javax.security.auth.kerberos.KerberosPrincipal; @@ -33,6 +33,7 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.util.StringUtils; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.Mockito; @@ -103,13 +104,14 @@ public void testPrincipalsWithLowerCaseHosts() throws IOException { String realm = "@REALM"; String principalInConf = service + SecurityUtil.HOSTNAME_PATTERN + realm; String hostname = "FooHost"; - String principal = service + hostname.toLowerCase() + realm; + String principal = + service + StringUtils.toLowerCase(hostname) + realm; verify(principalInConf, hostname, principal); } @Test public void testLocalHostNameForNullOrWild() throws Exception { - String local = SecurityUtil.getLocalHostName().toLowerCase(Locale.US); + String local = StringUtils.toLowerCase(SecurityUtil.getLocalHostName()); assertEquals("hdfs/" + local + "@REALM", SecurityUtil.getServerPrincipal("hdfs/_HOST@REALM", (String)null)); assertEquals("hdfs/" + local + "@REALM", @@ -260,7 +262,7 @@ void runBadPortPermutes(String arg, boolean validIfPosPort) { //LOG.info("address:"+addr+" host:"+host+" ip:"+ip+" port:"+port); SecurityUtil.setTokenServiceUseIp(useIp); - String serviceHost = useIp ? ip : host.toLowerCase(); + String serviceHost = useIp ? ip : StringUtils.toLowerCase(host); Token token = new Token(); Text service = new Text(serviceHost+":"+port); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java index 48b9b99928..5b8eac60f9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java @@ -26,6 +26,7 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.Shell; +import org.apache.hadoop.util.StringUtils; import org.junit.*; import javax.security.auth.Subject; @@ -213,7 +214,7 @@ public void testGetServerSideGroups() throws IOException, userName = userName.substring(sp + 1); } // user names are case insensitive on Windows. Make consistent - userName = userName.toLowerCase(); + userName = StringUtils.toLowerCase(userName); } // get the groups pp = Runtime.getRuntime().exec(Shell.WINDOWS ? @@ -233,7 +234,7 @@ public void testGetServerSideGroups() throws IOException, String loginUserName = login.getShortUserName(); if(Shell.WINDOWS) { // user names are case insensitive on Windows. Make consistent - loginUserName = loginUserName.toLowerCase(); + loginUserName = StringUtils.toLowerCase(loginUserName); } assertEquals(userName, loginUserName); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java index 220ab1daea..1bdeddb57a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/TimedOutTestsListener.java @@ -29,6 +29,7 @@ import java.util.Date; import java.util.Map; +import org.apache.hadoop.util.StringUtils; import org.junit.runner.notification.Failure; import org.junit.runner.notification.RunListener; @@ -93,8 +94,9 @@ static String buildThreadDump() { thread.getPriority(), thread.getId(), Thread.State.WAITING.equals(thread.getState()) ? - "in Object.wait()" : thread.getState().name().toLowerCase(), - Thread.State.WAITING.equals(thread.getState()) ? + "in Object.wait()" : + StringUtils.toLowerCase(thread.getState().name()), + Thread.State.WAITING.equals(thread.getState()) ? "WAITING (on object monitor)" : thread.getState())); for (StackTraceElement stackTraceElement : e.getValue()) { dump.append("\n at "); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java index 0c930d4392..515c3e0c0e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java @@ -18,10 +18,12 @@ package org.apache.hadoop.util; +import java.util.Locale; import static org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.long2String; import static org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.string2long; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -412,6 +414,25 @@ public void testGetUniqueNonEmptyTrimmedStrings (){ assertTrue(col.containsAll(Arrays.asList(new String[]{"foo","bar","baz","blah"}))); } + @Test + public void testLowerAndUpperStrings() { + Locale defaultLocale = Locale.getDefault(); + try { + Locale.setDefault(new Locale("tr", "TR")); + String upperStr = "TITLE"; + String lowerStr = "title"; + // Confirming TR locale. + assertNotEquals(lowerStr, upperStr.toLowerCase()); + assertNotEquals(upperStr, lowerStr.toUpperCase()); + // This should be true regardless of locale. + assertEquals(lowerStr, StringUtils.toLowerCase(upperStr)); + assertEquals(upperStr, StringUtils.toUpperCase(lowerStr)); + assertTrue(StringUtils.equalsIgnoreCase(upperStr, lowerStr)); + } finally { + Locale.setDefault(defaultLocale); + } + } + // Benchmark for StringUtils split public static void main(String []args) { final String TO_SPLIT = "foo,bar,baz,blah,blah"; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java index 2d4e442046..8ac6e40a04 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java @@ -382,8 +382,10 @@ private void chown(String userGroup, File file) throws IOException { private void assertOwners(File file, String expectedUser, String expectedGroup) throws IOException { String [] args = lsF(file).trim().split("[\\|]"); - assertEquals(expectedUser.toLowerCase(), args[2].toLowerCase()); - assertEquals(expectedGroup.toLowerCase(), args[3].toLowerCase()); + assertEquals(StringUtils.toLowerCase(expectedUser), + StringUtils.toLowerCase(args[2])); + assertEquals(StringUtils.toLowerCase(expectedGroup), + StringUtils.toLowerCase(args[3])); } @Test (timeout = 30000) diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java index b617ae5088..8b6b46a723 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/NfsExports.java @@ -32,6 +32,7 @@ import org.apache.hadoop.util.LightWeightCache; import org.apache.hadoop.util.LightWeightGSet; import org.apache.hadoop.util.LightWeightGSet.LinkedElement; +import org.apache.hadoop.util.StringUtils; import com.google.common.base.Preconditions; @@ -359,10 +360,10 @@ private static Match getMatch(String line) { AccessPrivilege privilege = AccessPrivilege.READ_ONLY; switch (parts.length) { case 1: - host = parts[0].toLowerCase().trim(); + host = StringUtils.toLowerCase(parts[0]).trim(); break; case 2: - host = parts[0].toLowerCase().trim(); + host = StringUtils.toLowerCase(parts[0]).trim(); String option = parts[1].trim(); if ("rw".equalsIgnoreCase(option)) { privilege = AccessPrivilege.READ_WRITE; diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java index 836b4ce9ff..81b0b7a2b7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java @@ -21,6 +21,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.http.client.HttpFSFileSystem; +import org.apache.hadoop.util.StringUtils; import javax.servlet.Filter; import javax.servlet.FilterChain; @@ -82,7 +83,8 @@ public void doFilter(ServletRequest request, ServletResponse response, String method = httpReq.getMethod(); if (method.equals("PUT") || method.equals("POST")) { String op = httpReq.getParameter(HttpFSFileSystem.OP_PARAM); - if (op != null && UPLOAD_OPERATIONS.contains(op.toUpperCase())) { + if (op != null && UPLOAD_OPERATIONS.contains( + StringUtils.toUpperCase(op))) { if ("true".equalsIgnoreCase(httpReq.getParameter(HttpFSParametersProvider.DataParam.NAME))) { String contentType = httpReq.getContentType(); contentTypeOK = diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java index 633589cd46..11cdb4d02c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hdfs.protocol.AclException; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.lib.service.FileSystemAccess; +import org.apache.hadoop.util.StringUtils; import org.json.simple.JSONArray; import org.json.simple.JSONObject; @@ -439,7 +440,8 @@ public FSTruncate(String path, long newLength) { @Override public JSONObject execute(FileSystem fs) throws IOException { boolean result = fs.truncate(path, newLength); - return toJSON(HttpFSFileSystem.TRUNCATE_JSON.toLowerCase(), result); + return toJSON( + StringUtils.toLowerCase(HttpFSFileSystem.TRUNCATE_JSON), result); } } @@ -568,7 +570,8 @@ public FSDelete(String path, boolean recursive) { @Override public JSONObject execute(FileSystem fs) throws IOException { boolean deleted = fs.delete(path, recursive); - return toJSON(HttpFSFileSystem.DELETE_JSON.toLowerCase(), deleted); + return toJSON( + StringUtils.toLowerCase(HttpFSFileSystem.DELETE_JSON), deleted); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java index 271f3d9f00..5c4204a237 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java @@ -30,6 +30,7 @@ import org.apache.hadoop.lib.wsrs.ParametersProvider; import org.apache.hadoop.lib.wsrs.ShortParam; import org.apache.hadoop.lib.wsrs.StringParam; +import org.apache.hadoop.util.StringUtils; import javax.ws.rs.ext.Provider; import java.util.HashMap; @@ -168,7 +169,8 @@ public static class OperationParam extends EnumParam */ public OperationParam(String operation) { super(NAME, HttpFSFileSystem.Operation.class, - HttpFSFileSystem.Operation.valueOf(operation.toUpperCase())); + HttpFSFileSystem.Operation.valueOf( + StringUtils.toUpperCase(operation))); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java index 5c1bb4f727..1a0f9ff44e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/server/Server.java @@ -22,6 +22,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.lib.util.Check; import org.apache.hadoop.lib.util.ConfigurationUtils; +import org.apache.hadoop.util.StringUtils; import org.apache.log4j.LogManager; import org.apache.log4j.PropertyConfigurator; import org.slf4j.Logger; @@ -202,7 +203,7 @@ public Server(String name, String homeDir, Configuration config) { * @param config server configuration. */ public Server(String name, String homeDir, String configDir, String logDir, String tempDir, Configuration config) { - this.name = Check.notEmpty(name, "name").trim().toLowerCase(); + this.name = StringUtils.toLowerCase(Check.notEmpty(name, "name").trim()); this.homeDir = Check.notEmpty(homeDir, "homeDir"); this.configDir = Check.notEmpty(configDir, "configDir"); this.logDir = Check.notEmpty(logDir, "logDir"); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java index ccb15a30ba..88780cbeba 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/hadoop/FileSystemAccessService.java @@ -33,6 +33,7 @@ import org.apache.hadoop.lib.util.Check; import org.apache.hadoop.lib.util.ConfigurationUtils; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.VersionInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -254,7 +255,7 @@ public void run() { private Set toLowerCase(Collection collection) { Set set = new HashSet(); for (String value : collection) { - set.add(value.toLowerCase()); + set.add(StringUtils.toLowerCase(value)); } return set; } @@ -300,7 +301,8 @@ protected void closeFileSystem(FileSystem fs) throws IOException { protected void validateNamenode(String namenode) throws FileSystemAccessException { if (nameNodeWhitelist.size() > 0 && !nameNodeWhitelist.contains("*")) { - if (!nameNodeWhitelist.contains(namenode.toLowerCase())) { + if (!nameNodeWhitelist.contains( + StringUtils.toLowerCase(namenode))) { throw new FileSystemAccessException(FileSystemAccessException.ERROR.H05, namenode, "not in whitelist"); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java index 8baef67e8c..f95a6e6fc8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java @@ -34,7 +34,7 @@ public EnumParam(String name, Class e, E defaultValue) { @Override protected E parse(String str) throws Exception { - return Enum.valueOf(klass, str.toUpperCase()); + return Enum.valueOf(klass, StringUtils.toUpperCase(str)); } @Override diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java index 8d79b71886..ba6e5aa7a1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumSetParam.java @@ -22,6 +22,7 @@ import java.util.Iterator; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.util.StringUtils; @InterfaceAudience.Private public abstract class EnumSetParam> extends Param> { @@ -37,7 +38,7 @@ protected EnumSet parse(String str) throws Exception { final EnumSet set = EnumSet.noneOf(klass); if (!str.isEmpty()) { for (String sub : str.split(",")) { - set.add(Enum.valueOf(klass, sub.trim().toUpperCase())); + set.add(Enum.valueOf(klass, StringUtils.toUpperCase(sub.trim()))); } } return set; diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java index 4703a904a1..c93f8f2b17 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java @@ -26,6 +26,7 @@ import com.sun.jersey.spi.inject.Injectable; import com.sun.jersey.spi.inject.InjectableProvider; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.util.StringUtils; import javax.ws.rs.core.Context; import javax.ws.rs.core.MultivaluedMap; @@ -70,7 +71,7 @@ public Parameters getValue(HttpContext httpContext) { } Enum op; try { - op = Enum.valueOf(enumClass, str.toUpperCase()); + op = Enum.valueOf(enumClass, StringUtils.toUpperCase(str)); } catch (IllegalArgumentException ex) { throw new IllegalArgumentException( MessageFormat.format("Invalid Operation [{0}]", str)); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java index 04364ccf7e..5cafb3cc63 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/XAttrHelper.java @@ -24,6 +24,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.fs.XAttr.NameSpace; +import org.apache.hadoop.util.StringUtils; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; @@ -57,16 +58,20 @@ public static XAttr buildXAttr(String name, byte[] value) { } NameSpace ns; - final String prefix = name.substring(0, prefixIndex).toLowerCase(); - if (prefix.equals(NameSpace.USER.toString().toLowerCase())) { + final String prefix = name.substring(0, prefixIndex); + if (StringUtils.equalsIgnoreCase(prefix, NameSpace.USER.toString())) { ns = NameSpace.USER; - } else if (prefix.equals(NameSpace.TRUSTED.toString().toLowerCase())) { + } else if ( + StringUtils.equalsIgnoreCase(prefix, NameSpace.TRUSTED.toString())) { ns = NameSpace.TRUSTED; - } else if (prefix.equals(NameSpace.SYSTEM.toString().toLowerCase())) { + } else if ( + StringUtils.equalsIgnoreCase(prefix, NameSpace.SYSTEM.toString())) { ns = NameSpace.SYSTEM; - } else if (prefix.equals(NameSpace.SECURITY.toString().toLowerCase())) { + } else if ( + StringUtils.equalsIgnoreCase(prefix, NameSpace.SECURITY.toString())) { ns = NameSpace.SECURITY; - } else if (prefix.equals(NameSpace.RAW.toString().toLowerCase())) { + } else if ( + StringUtils.equalsIgnoreCase(prefix, NameSpace.RAW.toString())) { ns = NameSpace.RAW; } else { throw new HadoopIllegalArgumentException("An XAttr name must be " + @@ -145,7 +150,7 @@ public static String getPrefixName(XAttr xAttr) { } String namespace = xAttr.getNameSpace().toString(); - return namespace.toLowerCase() + "." + xAttr.getName(); + return StringUtils.toLowerCase(namespace) + "." + xAttr.getName(); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java index 54da8ebe8d..7cf8a4721d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hdfs.server.datanode.DataNodeLayoutVersion; import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion; import org.apache.hadoop.hdfs.server.namenode.FSDirectory; +import org.apache.hadoop.util.StringUtils; /************************************ * Some handy constants @@ -98,7 +99,7 @@ public static enum RollingUpgradeAction { /** Covert the given String to a RollingUpgradeAction. */ public static RollingUpgradeAction fromString(String s) { - return MAP.get(s.toUpperCase()); + return MAP.get(StringUtils.toUpperCase(s)); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java index 0c03a42712..020cb5f065 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockStoragePolicySuite.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.HdfsConstants; +import org.apache.hadoop.util.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -131,7 +132,8 @@ public BlockStoragePolicy[] getAllPolicies() { } public static String buildXAttrName() { - return XAttrNS.toString().toLowerCase() + "." + STORAGE_POLICY_XATTR_NAME; + return StringUtils.toLowerCase(XAttrNS.toString()) + + "." + STORAGE_POLICY_XATTR_NAME; } public static XAttr buildXAttr(byte policyId) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java index ff64524864..2d267cee04 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java @@ -27,6 +27,7 @@ import org.apache.hadoop.hdfs.server.namenode.MetaRecoveryContext; import com.google.common.base.Preconditions; +import org.apache.hadoop.util.StringUtils; /************************************ * Some handy internal HDFS constants @@ -53,7 +54,7 @@ public static enum RollingUpgradeStartupOption{ public String getOptionString() { return StartupOption.ROLLINGUPGRADE.getName() + " " - + name().toLowerCase(); + + StringUtils.toLowerCase(name()); } public boolean matches(StartupOption option) { @@ -84,7 +85,7 @@ static RollingUpgradeStartupOption fromString(String s) { public static String getAllOptionString() { final StringBuilder b = new StringBuilder("<"); for(RollingUpgradeStartupOption opt : VALUES) { - b.append(opt.name().toLowerCase()).append("|"); + b.append(StringUtils.toLowerCase(opt.name())).append("|"); } b.setCharAt(b.length() - 1, '>'); return b.toString(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java index 7cda670f22..126086f397 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/StorageLocation.java @@ -28,6 +28,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.server.common.Util; +import org.apache.hadoop.util.StringUtils; /** * Encapsulates the URI and storage medium that together describe a @@ -88,7 +89,8 @@ public static StorageLocation parse(String rawLocation) String classString = matcher.group(1); location = matcher.group(2); if (!classString.isEmpty()) { - storageType = StorageType.valueOf(classString.toUpperCase()); + storageType = + StorageType.valueOf(StringUtils.toUpperCase(classString)); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java index c41a46a880..c768690bc6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java @@ -123,6 +123,7 @@ import org.apache.hadoop.ipc.RpcConstants; import org.apache.hadoop.security.token.delegation.DelegationKey; import org.apache.hadoop.util.DataChecksum; +import org.apache.hadoop.util.StringUtils; import org.xml.sax.ContentHandler; import org.xml.sax.SAXException; import org.xml.sax.helpers.AttributesImpl; @@ -4348,7 +4349,7 @@ static class RollingUpgradeOp extends FSEditLogOp { // @Idempotent public RollingUpgradeOp(FSEditLogOpCodes code, String name) { super(code); - this.name = name.toUpperCase(); + this.name = StringUtils.toUpperCase(name); } static RollingUpgradeOp getStartInstance(OpInstanceCache cache) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java index 711d0f8c6f..39ce2dcc81 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java @@ -19,6 +19,7 @@ import com.google.common.base.Objects; import org.apache.hadoop.fs.StorageType; +import org.apache.hadoop.util.StringUtils; public class QuotaByStorageTypeEntry { private StorageType type; @@ -53,7 +54,7 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); assert (type != null); - sb.append(type.toString().toLowerCase()); + sb.append(StringUtils.toLowerCase(type.toString())); sb.append(':'); sb.append(quota); return sb.toString(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java index 83e6426a30..ec7e0c9470 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java @@ -587,7 +587,7 @@ private int processStartupCommand(CommandLineOpts opts) throws Exception { return 0; } - String cmd = opts.getCommand().toString().toLowerCase(); + String cmd = StringUtils.toLowerCase(opts.getCommand().toString()); int exitCode = 0; try { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java index 92a16cd282..e6cf16c89d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; @@ -79,19 +80,19 @@ enum Command { private static final Map map; static { map = new HashMap(); - map.put(NAMENODE.getName().toLowerCase(), + map.put(StringUtils.toLowerCase(NAMENODE.getName()), new NameNodesCommandHandler()); - map.put(SECONDARY.getName().toLowerCase(), + map.put(StringUtils.toLowerCase(SECONDARY.getName()), new SecondaryNameNodesCommandHandler()); - map.put(BACKUP.getName().toLowerCase(), + map.put(StringUtils.toLowerCase(BACKUP.getName()), new BackupNodesCommandHandler()); - map.put(INCLUDE_FILE.getName().toLowerCase(), + map.put(StringUtils.toLowerCase(INCLUDE_FILE.getName()), new CommandHandler(DFSConfigKeys.DFS_HOSTS)); - map.put(EXCLUDE_FILE.getName().toLowerCase(), + map.put(StringUtils.toLowerCase(EXCLUDE_FILE.getName()), new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE)); - map.put(NNRPCADDRESSES.getName().toLowerCase(), + map.put(StringUtils.toLowerCase(NNRPCADDRESSES.getName()), new NNRpcAddressesCommandHandler()); - map.put(CONFKEY.getName().toLowerCase(), + map.put(StringUtils.toLowerCase(CONFKEY.getName()), new PrintConfKeyCommandHandler()); } @@ -116,7 +117,7 @@ public String getDescription() { } public static CommandHandler getHandler(String cmd) { - return map.get(cmd.toLowerCase()); + return map.get(StringUtils.toLowerCase(cmd)); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java index c4b8424571..de3acebd92 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsVisitorFactory.java @@ -24,6 +24,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.util.StringUtils; /** * EditsVisitorFactory for different implementations of EditsVisitor @@ -43,7 +44,7 @@ public class OfflineEditsVisitorFactory { */ static public OfflineEditsVisitor getEditsVisitor(String filename, String processor, boolean printToScreen) throws IOException { - if(processor.toLowerCase().equals("binary")) { + if(StringUtils.equalsIgnoreCase("binary", processor)) { return new BinaryEditsVisitor(filename); } OfflineEditsVisitor vis; @@ -59,9 +60,9 @@ static public OfflineEditsVisitor getEditsVisitor(String filename, outs[1] = System.out; out = new TeeOutputStream(outs); } - if(processor.toLowerCase().equals("xml")) { + if(StringUtils.equalsIgnoreCase("xml", processor)) { vis = new XmlEditsVisitor(out); - } else if(processor.toLowerCase().equals("stats")) { + } else if(StringUtils.equalsIgnoreCase("stats", processor)) { vis = new StatisticsEditsVisitor(out); } else { throw new IOException("Unknown proccesor " + processor + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java index 43fcd69a83..429b6fc487 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java @@ -33,6 +33,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hdfs.web.JsonUtil; +import org.apache.hadoop.util.StringUtils; import java.io.FileNotFoundException; import java.io.IOException; @@ -51,6 +52,7 @@ import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.APPLICATION_JSON_UTF8; import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.WEBHDFS_PREFIX; import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.WEBHDFS_PREFIX_LENGTH; + /** * Implement the read-only WebHDFS API for fsimage. */ @@ -141,7 +143,7 @@ public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) private static String getOp(QueryStringDecoder decoder) { Map> parameters = decoder.parameters(); return parameters.containsKey("op") - ? parameters.get("op").get(0).toUpperCase() : null; + ? StringUtils.toUpperCase(parameters.get("op").get(0)) : null; } private static String getPath(QueryStringDecoder decoder) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java index b6ff4b656d..5ad1f2464e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java @@ -39,6 +39,7 @@ import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler; +import org.apache.hadoop.util.StringUtils; /** * Subclass of {@link AuthenticationFilter} that @@ -96,7 +97,7 @@ private static HttpServletRequest toLowerCase(final HttpServletRequest request) final Map> m = new HashMap>(); for(Map.Entry entry : original.entrySet()) { - final String key = entry.getKey().toLowerCase(); + final String key = StringUtils.toLowerCase(entry.getKey()); List strings = m.get(key); if (strings == null) { strings = new ArrayList(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java index 2ae3445691..febe1253a8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java @@ -28,6 +28,7 @@ import com.sun.jersey.spi.container.ContainerRequestFilter; import com.sun.jersey.spi.container.ContainerResponseFilter; import com.sun.jersey.spi.container.ResourceFilter; +import org.apache.hadoop.util.StringUtils; /** * A filter to change parameter names to lower cases @@ -75,7 +76,7 @@ private static URI rebuildQuery(final URI uri, final MultivaluedMap parameters) { UriBuilder b = UriBuilder.fromUri(uri).replaceQuery(""); for(Map.Entry> e : parameters.entrySet()) { - final String key = e.getKey().toLowerCase(); + final String key = StringUtils.toLowerCase(e.getKey()); for(String v : e.getValue()) { b = b.queryParam(key, v); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index 938f7c77f6..a907404bed 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -80,6 +80,7 @@ import org.apache.hadoop.security.token.TokenSelector; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector; import org.apache.hadoop.util.Progressable; +import org.apache.hadoop.util.StringUtils; import org.mortbay.util.ajax.JSON; import com.google.common.annotations.VisibleForTesting; @@ -1242,7 +1243,7 @@ static URL removeOffsetParam(final URL url) throws MalformedURLException { if (query == null) { return url; } - final String lower = query.toLowerCase(); + final String lower = StringUtils.toLowerCase(query); if (!lower.startsWith(OFFSET_PARAM_PREFIX) && !lower.contains("&" + OFFSET_PARAM_PREFIX)) { return url; @@ -1253,7 +1254,7 @@ static URL removeOffsetParam(final URL url) throws MalformedURLException { for(final StringTokenizer st = new StringTokenizer(query, "&"); st.hasMoreTokens();) { final String token = st.nextToken(); - if (!token.toLowerCase().startsWith(OFFSET_PARAM_PREFIX)) { + if (!StringUtils.toLowerCase(token).startsWith(OFFSET_PARAM_PREFIX)) { if (b == null) { b = new StringBuilder("?").append(token); } else { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java index 1703e3b25d..60d201bb4c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumParam.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hdfs.web.resources; import java.util.Arrays; +import org.apache.hadoop.util.StringUtils; abstract class EnumParam> extends Param> { EnumParam(final Domain domain, final E value) { @@ -40,7 +41,7 @@ public final String getDomain() { @Override final E parse(final String str) { - return Enum.valueOf(enumClass, str.toUpperCase()); + return Enum.valueOf(enumClass, StringUtils.toUpperCase(str)); } } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java index 5adb5a6fc2..c2dfadf5db 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/EnumSetParam.java @@ -20,6 +20,7 @@ import java.util.Arrays; import java.util.EnumSet; import java.util.Iterator; +import org.apache.hadoop.util.StringUtils; abstract class EnumSetParam> extends Param, EnumSetParam.Domain> { /** Convert an EnumSet to a string of comma separated values. */ @@ -82,7 +83,7 @@ final EnumSet parse(final String str) { i = j > 0 ? j + 1 : 0; j = str.indexOf(',', i); final String sub = j >= 0? str.substring(i, j): str.substring(i); - set.add(Enum.valueOf(enumClass, sub.trim().toUpperCase())); + set.add(Enum.valueOf(enumClass, StringUtils.toUpperCase(sub.trim()))); } } return set; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java index ac6acf90b6..b439a28a02 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshotManager.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.server.namenode.snapshot; import static org.mockito.Matchers.anyObject; -import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -31,6 +30,7 @@ import org.apache.hadoop.hdfs.server.namenode.INode; import org.apache.hadoop.hdfs.server.namenode.INodeDirectory; import org.apache.hadoop.hdfs.server.namenode.INodesInPath; +import org.apache.hadoop.util.StringUtils; import org.junit.Assert; import org.junit.Test; @@ -70,7 +70,7 @@ public void testSnapshotLimits() throws Exception { Assert.fail("Expected SnapshotException not thrown"); } catch (SnapshotException se) { Assert.assertTrue( - se.getMessage().toLowerCase().contains("rollover")); + StringUtils.toLowerCase(se.getMessage()).contains("rollover")); } // Delete a snapshot to free up a slot. @@ -86,7 +86,7 @@ public void testSnapshotLimits() throws Exception { Assert.fail("Expected SnapshotException not thrown"); } catch (SnapshotException se) { Assert.assertTrue( - se.getMessage().toLowerCase().contains("rollover")); + StringUtils.toLowerCase(se.getMessage()).contains("rollover")); } } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java index aad63d3158..a0e7041904 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java @@ -59,6 +59,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity; import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent; import org.apache.hadoop.yarn.client.api.TimelineClient; @@ -711,7 +712,7 @@ public void processEventForJobSummary(HistoryEvent event, JobSummary summary, private void processEventForTimelineServer(HistoryEvent event, JobId jobId, long timestamp) { TimelineEvent tEvent = new TimelineEvent(); - tEvent.setEventType(event.getEventType().name().toUpperCase()); + tEvent.setEventType(StringUtils.toUpperCase(event.getEventType().name())); tEvent.setTimestamp(timestamp); TimelineEntity tEntity = new TimelineEntity(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java index 53f21db12d..0f528e4063 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java @@ -22,7 +22,6 @@ import java.io.IOException; import java.net.URLDecoder; -import java.util.Locale; import javax.servlet.http.HttpServletResponse; @@ -226,8 +225,9 @@ public void tasks() { if (app.getJob() != null) { try { String tt = $(TASK_TYPE); - tt = tt.isEmpty() ? "All" : StringUtils.capitalize(MRApps.taskType(tt). - toString().toLowerCase(Locale.US)); + tt = tt.isEmpty() ? "All" : StringUtils.capitalize( + org.apache.hadoop.util.StringUtils.toLowerCase( + MRApps.taskType(tt).toString())); setTitle(join(tt, " Tasks for ", $(JOB_ID))); } catch (Exception e) { LOG.error("Failed to render tasks page with task type : " diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java index 553ba707a0..5b8d3a79fe 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java @@ -41,6 +41,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.util.MRApps; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport; @@ -314,7 +315,7 @@ public static org.apache.hadoop.mapreduce.QueueState fromYarn( QueueState state) { org.apache.hadoop.mapreduce.QueueState qState = org.apache.hadoop.mapreduce.QueueState.getState( - state.toString().toLowerCase()); + StringUtils.toLowerCase(state.toString())); return qState; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java index 08b44f8617..1520fc8139 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java @@ -303,7 +303,7 @@ private static void addToClasspathIfNotJar(Path[] paths, remoteFS.getWorkingDirectory())); String name = (null == u.getFragment()) ? p.getName() : u.getFragment(); - if (!name.toLowerCase().endsWith(".jar")) { + if (!StringUtils.toLowerCase(name).endsWith(".jar")) { linkLookup.put(p, name); } } @@ -317,7 +317,7 @@ private static void addToClasspathIfNotJar(Path[] paths, if (name == null) { name = p.getName(); } - if(!name.toLowerCase().endsWith(".jar")) { + if(!StringUtils.toLowerCase(name).endsWith(".jar")) { MRApps.addToEnvironment( environment, classpathEnvVar, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java index cc42b9c220..e36efec5c8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.mapreduce; +import org.apache.hadoop.util.StringUtils; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -151,9 +152,10 @@ public void testFromYarnQueueInfo() { .newRecord(org.apache.hadoop.yarn.api.records.QueueInfo.class); queueInfo.setQueueState(org.apache.hadoop.yarn.api.records.QueueState.STOPPED); org.apache.hadoop.mapreduce.QueueInfo returned = - TypeConverter.fromYarn(queueInfo, new Configuration()); + TypeConverter.fromYarn(queueInfo, new Configuration()); Assert.assertEquals("queueInfo translation didn't work.", - returned.getState().toString(), queueInfo.getQueueState().toString().toLowerCase()); + returned.getState().toString(), + StringUtils.toLowerCase(queueInfo.getQueueState().toString())); } /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java index 5274438782..7fa5d02682 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java @@ -116,7 +116,7 @@ public static enum Counter { * BYTES_READ counter and second one is of the BYTES_WRITTEN counter. */ protected static String[] getFileSystemCounterNames(String uriScheme) { - String scheme = uriScheme.toUpperCase(); + String scheme = StringUtils.toUpperCase(uriScheme); return new String[]{scheme+"_BYTES_READ", scheme+"_BYTES_WRITTEN"}; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java index a53b76a3c4..e0e5b79d87 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java @@ -25,7 +25,6 @@ import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.Iterator; -import java.util.Locale; import java.util.Map; import com.google.common.base.Joiner; @@ -42,6 +41,7 @@ import org.apache.hadoop.mapreduce.Counter; import org.apache.hadoop.mapreduce.FileSystemCounter; import org.apache.hadoop.mapreduce.util.ResourceBundles; +import org.apache.hadoop.util.StringUtils; /** * An abstract class to provide common implementation of the filesystem @@ -227,7 +227,7 @@ else if (counters[ord] == null) { } private String checkScheme(String scheme) { - String fixed = scheme.toUpperCase(Locale.US); + String fixed = StringUtils.toUpperCase(scheme); String interned = schemes.putIfAbsent(fixed, fixed); if (schemes.size() > MAX_NUM_SCHEMES) { // mistakes or abuses diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java index eaa5af8672..06737c9939 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/filecache/DistributedCache.java @@ -473,7 +473,7 @@ public static boolean checkURIs(URI[] uriFiles, URI[] uriArchives) { if (fragment == null) { return false; } - String lowerCaseFragment = fragment.toLowerCase(); + String lowerCaseFragment = StringUtils.toLowerCase(fragment); if (fragments.contains(lowerCaseFragment)) { return false; } @@ -488,7 +488,7 @@ public static boolean checkURIs(URI[] uriFiles, URI[] uriArchives) { if (fragment == null) { return false; } - String lowerCaseFragment = fragment.toLowerCase(); + String lowerCaseFragment = StringUtils.toLowerCase(fragment); if (fragments.contains(lowerCaseFragment)) { return false; } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java index 00fbeda09a..a6953b7598 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java @@ -45,6 +45,8 @@ import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; +import org.apache.hadoop.util.StringUtils; + /** * A InputFormat that reads input data from an SQL table. *

@@ -162,7 +164,8 @@ public void setConf(Configuration conf) { this.connection = createConnection(); DatabaseMetaData dbMeta = connection.getMetaData(); - this.dbProductName = dbMeta.getDatabaseProductName().toUpperCase(); + this.dbProductName = + StringUtils.toUpperCase(dbMeta.getDatabaseProductName()); } catch (Exception ex) { throw new RuntimeException(ex); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java index 37ba5b7119..3630c642b6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java @@ -222,12 +222,14 @@ public int run(String[] argv) throws Exception { taskType = argv[2]; taskState = argv[3]; displayTasks = true; - if (!taskTypes.contains(taskType.toUpperCase())) { + if (!taskTypes.contains( + org.apache.hadoop.util.StringUtils.toUpperCase(taskType))) { System.out.println("Error: Invalid task-type: " + taskType); displayUsage(cmd); return exitCode; } - if (!taskStates.contains(taskState.toLowerCase())) { + if (!taskStates.contains( + org.apache.hadoop.util.StringUtils.toLowerCase(taskState))) { System.out.println("Error: Invalid task-state: " + taskState); displayUsage(cmd); return exitCode; @@ -593,7 +595,8 @@ protected void displayTasks(Job job, String type, String state) throws IOException, InterruptedException { TaskReport[] reports=null; - reports = job.getTaskReports(TaskType.valueOf(type.toUpperCase())); + reports = job.getTaskReports(TaskType.valueOf( + org.apache.hadoop.util.StringUtils.toUpperCase(type))); for (TaskReport report : reports) { TIPStatus status = report.getCurrentStatus(); if ((state.equalsIgnoreCase("pending") && status ==TIPStatus.PENDING) || diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java index d9cd07ba05..aff117e4c0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java @@ -154,16 +154,16 @@ long value() { static ByteMultiple parseString(String sMultiple) { if(sMultiple == null || sMultiple.isEmpty()) // MB by default return MB; - String sMU = sMultiple.toUpperCase(); - if(B.name().toUpperCase().endsWith(sMU)) + String sMU = StringUtils.toUpperCase(sMultiple); + if(StringUtils.toUpperCase(B.name()).endsWith(sMU)) return B; - if(KB.name().toUpperCase().endsWith(sMU)) + if(StringUtils.toUpperCase(KB.name()).endsWith(sMU)) return KB; - if(MB.name().toUpperCase().endsWith(sMU)) + if(StringUtils.toUpperCase(MB.name()).endsWith(sMU)) return MB; - if(GB.name().toUpperCase().endsWith(sMU)) + if(StringUtils.toUpperCase(GB.name()).endsWith(sMU)) return GB; - if(TB.name().toUpperCase().endsWith(sMU)) + if(StringUtils.toUpperCase(TB.name()).endsWith(sMU)) return TB; throw new IllegalArgumentException("Unsupported ByteMultiple "+sMultiple); } @@ -736,7 +736,7 @@ public int run(String[] args) throws IOException { } for (int i = 0; i < args.length; i++) { // parse command line - if (args[i].toLowerCase().startsWith("-read")) { + if (StringUtils.toLowerCase(args[i]).startsWith("-read")) { testType = TestType.TEST_TYPE_READ; } else if (args[i].equalsIgnoreCase("-write")) { testType = TestType.TEST_TYPE_WRITE; @@ -755,9 +755,9 @@ public int run(String[] args) throws IOException { testType = TestType.TEST_TYPE_TRUNCATE; } else if (args[i].equalsIgnoreCase("-clean")) { testType = TestType.TEST_TYPE_CLEANUP; - } else if (args[i].toLowerCase().startsWith("-seq")) { + } else if (StringUtils.toLowerCase(args[i]).startsWith("-seq")) { isSequential = true; - } else if (args[i].toLowerCase().startsWith("-compression")) { + } else if (StringUtils.toLowerCase(args[i]).startsWith("-compression")) { compressionClass = args[++i]; } else if (args[i].equalsIgnoreCase("-nrfiles")) { nrFiles = Integer.parseInt(args[++i]); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java index 13e27cd9e1..92441ab456 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestFileSystem.java @@ -49,6 +49,7 @@ import org.apache.hadoop.mapred.*; import org.apache.hadoop.mapred.lib.LongSumReducer; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.StringUtils; public class TestFileSystem extends TestCase { private static final Log LOG = FileSystem.LOG; @@ -556,7 +557,8 @@ static void runTestCache(int port) throws Exception { static void checkPath(MiniDFSCluster cluster, FileSystem fileSys) throws IOException { InetSocketAddress add = cluster.getNameNode().getNameNodeAddress(); // Test upper/lower case - fileSys.checkPath(new Path("hdfs://" + add.getHostName().toUpperCase() + ":" + add.getPort())); + fileSys.checkPath(new Path("hdfs://" + + StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort())); } public void testFsClose() throws Exception { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java index 0642052e0f..57a7163a1e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/Constants.java @@ -18,6 +18,8 @@ package org.apache.hadoop.fs.slive; +import org.apache.hadoop.util.StringUtils; + /** * Constants used in various places in slive */ @@ -35,7 +37,7 @@ private Constants() { enum Distribution { BEG, END, UNIFORM, MID; String lowerName() { - return this.name().toLowerCase(); + return StringUtils.toLowerCase(this.name()); } } @@ -45,7 +47,7 @@ String lowerName() { enum OperationType { READ, APPEND, RENAME, LS, MKDIR, DELETE, CREATE, TRUNCATE; String lowerName() { - return this.name().toLowerCase(); + return StringUtils.toLowerCase(this.name()); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java index b4c98f7f17..02eca37ce5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationData.java @@ -19,6 +19,7 @@ package org.apache.hadoop.fs.slive; import org.apache.hadoop.fs.slive.Constants.Distribution; +import org.apache.hadoop.util.StringUtils; /** * This class holds the data representing what an operations distribution and @@ -52,7 +53,7 @@ class OperationData { percent = (Double.parseDouble(pieces[0]) / 100.0d); } else if (pieces.length >= 2) { percent = (Double.parseDouble(pieces[0]) / 100.0d); - distribution = Distribution.valueOf(pieces[1].toUpperCase()); + distribution = Distribution.valueOf(StringUtils.toUpperCase(pieces[1])); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java index 57ef0176a5..bca5a1c777 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/OperationOutput.java @@ -19,6 +19,7 @@ package org.apache.hadoop.fs.slive; import org.apache.hadoop.io.Text; +import org.apache.hadoop.util.StringUtils; /** * An operation output has the following object format whereby simple types are @@ -67,7 +68,8 @@ static enum OutputType { "Invalid key format - no type seperator - " + TYPE_SEP); } try { - dataType = OutputType.valueOf(key.substring(0, place).toUpperCase()); + dataType = OutputType.valueOf( + StringUtils.toUpperCase(key.substring(0, place))); } catch (Exception e) { throw new IllegalArgumentException( "Invalid key format - invalid output type", e); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java index ce1837f3af..97360d6693 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/SliveTest.java @@ -42,6 +42,7 @@ import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.TextOutputFormat; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; @@ -157,7 +158,7 @@ private boolean getBool(String val) { if (val == null) { return false; } - String cleanupOpt = val.toLowerCase().trim(); + String cleanupOpt = StringUtils.toLowerCase(val).trim(); if (cleanupOpt.equals("true") || cleanupOpt.equals("1")) { return true; } else { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java index f155daef14..0a9d0e9391 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java @@ -35,6 +35,7 @@ import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.*; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; @@ -214,23 +215,25 @@ public int run(String[] argv) throws IOException { if (!(fmt == Format.txt || cod == CCodec.pln)) { for (CType typ : ct) { String fn = - fmt.name().toUpperCase() + "_" + - cod.name().toUpperCase() + "_" + - typ.name().toUpperCase(); + StringUtils.toUpperCase(fmt.name()) + "_" + + StringUtils.toUpperCase(cod.name()) + "_" + + StringUtils.toUpperCase(typ.name()); typ.configure(job); - System.out.print(rwop.name().toUpperCase() + " " + fn + ": "); + System.out.print( + StringUtils.toUpperCase(rwop.name()) + " " + fn + ": "); System.out.println(rwop.exec(fn, job) / 1000 + " seconds"); } } else { String fn = - fmt.name().toUpperCase() + "_" + - cod.name().toUpperCase(); + StringUtils.toUpperCase(fmt.name()) + "_" + + StringUtils.toUpperCase(cod.name()); Path p = new Path(root, fn); if (rwop == RW.r && !fs.exists(p)) { fn += cod.getExt(); } - System.out.print(rwop.name().toUpperCase() + " " + fn + ": "); + System.out.print( + StringUtils.toUpperCase(rwop.name()) + " " + fn + ": "); System.out.println(rwop.exec(fn, job) / 1000 + " seconds"); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java index 02a083b4f0..d60905ed05 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java @@ -45,6 +45,7 @@ import org.apache.hadoop.mapred.lib.IdentityMapper; import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.mapreduce.MRConfig; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.junit.After; @@ -280,7 +281,7 @@ public void configure(JobConf conf) { public void map(WritableComparable key, Text value, OutputCollector output, Reporter reporter) throws IOException { - String str = value.toString().toLowerCase(); + String str = StringUtils.toLowerCase(value.toString()); output.collect(new Text(str), value); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java index 270ddc9a55..8dec39d866 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/DBCountPageView.java @@ -102,7 +102,7 @@ private void startHsqldbServer() { private void createConnection(String driverClassName , String url) throws Exception { - if(driverClassName.toLowerCase().contains("oracle")) { + if(StringUtils.toLowerCase(driverClassName).contains("oracle")) { isOracle = true; } Class.forName(driverClassName); diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java index f342463370..b6a45ecfb8 100644 --- a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java +++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/versioninfo/VersionInfoMojo.java @@ -13,6 +13,7 @@ */ package org.apache.hadoop.maven.plugin.versioninfo; +import java.util.Locale; import org.apache.hadoop.maven.plugin.util.Exec; import org.apache.hadoop.maven.plugin.util.FileSetUtils; import org.apache.maven.model.FileSet; @@ -329,7 +330,8 @@ public int compare(File lhs, File rhs) { } private String normalizePath(File file) { - return file.getPath().toUpperCase().replaceAll("\\\\", "/"); + return file.getPath().toUpperCase(Locale.ENGLISH) + .replaceAll("\\\\", "/"); } }); byte[] md5 = computeMD5(files); diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java index 6bed8bb830..c0c03b3fce 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java @@ -984,8 +984,8 @@ private static String trim(String s, String toTrim) { private String verifyAndConvertToStandardFormat(String rawDir) throws URISyntaxException { URI asUri = new URI(rawDir); if (asUri.getAuthority() == null - || asUri.getAuthority().toLowerCase(Locale.US).equalsIgnoreCase( - sessionUri.getAuthority().toLowerCase(Locale.US))) { + || asUri.getAuthority().toLowerCase(Locale.ENGLISH).equalsIgnoreCase( + sessionUri.getAuthority().toLowerCase(Locale.ENGLISH))) { // Applies to me. return trim(asUri.getPath(), "/"); } else { diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java index 71e84a11cc..ca7566b0f1 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java @@ -51,6 +51,7 @@ import org.apache.hadoop.tools.mapred.UniformSizeInputFormat; import com.google.common.collect.Maps; +import org.apache.hadoop.util.StringUtils; /** * Utility functions used in DistCp. @@ -121,8 +122,9 @@ public static long getLong(Configuration configuration, String label) { */ public static Class getStrategy(Configuration conf, DistCpOptions options) { - String confLabel = "distcp." + - options.getCopyStrategy().toLowerCase(Locale.getDefault()) + ".strategy.impl"; + String confLabel = "distcp." + + StringUtils.toLowerCase(options.getCopyStrategy()) + + ".strategy" + ".impl"; return conf.getClass(confLabel, UniformSizeInputFormat.class, InputFormat.class); } @@ -221,7 +223,8 @@ public static void preserve(FileSystem targetFS, Path path, final boolean preserveXAttrs = attributes.contains(FileAttribute.XATTR); if (preserveXAttrs || preserveRawXattrs) { - final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase(); + final String rawNS = + StringUtils.toLowerCase(XAttr.NameSpace.RAW.name()); Map srcXAttrs = srcFileStatus.getXAttrs(); Map targetXAttrs = getXAttrs(targetFS, path); if (srcXAttrs != null && !srcXAttrs.equals(targetXAttrs)) { @@ -321,7 +324,8 @@ public static CopyListingFileStatus toCopyListingFileStatus( copyListingFileStatus.setXAttrs(srcXAttrs); } else { Map trgXAttrs = Maps.newHashMap(); - final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase(); + final String rawNS = + StringUtils.toLowerCase(XAttr.NameSpace.RAW.name()); for (Map.Entry ent : srcXAttrs.entrySet()) { final String xattrName = ent.getKey(); if (xattrName.startsWith(rawNS)) { diff --git a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java index f46c421607..8a6819b018 100644 --- a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java +++ b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java @@ -169,7 +169,9 @@ static enum FileAttribute { final char symbol; - private FileAttribute() {symbol = toString().toLowerCase().charAt(0);} + private FileAttribute() { + symbol = StringUtils.toLowerCase(toString()).charAt(0); + } static EnumSet parse(String s) { if (s == null || s.length() == 0) { diff --git a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java index 83eb947c89..b8035386d1 100644 --- a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java +++ b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixJobSubmissionPolicy.java @@ -25,6 +25,7 @@ import java.util.concurrent.CountDownLatch; import java.io.IOException; +import org.apache.hadoop.util.StringUtils; enum GridmixJobSubmissionPolicy { @@ -84,6 +85,6 @@ public int getPollingInterval() { public static GridmixJobSubmissionPolicy getPolicy( Configuration conf, GridmixJobSubmissionPolicy defaultPolicy) { String policy = conf.get(JOB_SUBMISSION_POLICY, defaultPolicy.name()); - return valueOf(policy.toUpperCase()); + return valueOf(StringUtils.toUpperCase(policy)); } } diff --git a/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java b/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java index 7a35b46b3e..967929b42c 100644 --- a/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java +++ b/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemExtendedContract.java @@ -27,12 +27,12 @@ import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem; import org.apache.hadoop.fs.swift.util.SwiftTestUtils; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.util.StringUtils; import org.junit.Test; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URI; -import java.util.Locale; public class TestSwiftFileSystemExtendedContract extends SwiftFileSystemBaseTest { @@ -115,7 +115,7 @@ public void testGetSchemeImplemented() throws Throwable { public void testFilesystemIsCaseSensitive() throws Exception { String mixedCaseFilename = "/test/UPPER.TXT"; Path upper = path(mixedCaseFilename); - Path lower = path(mixedCaseFilename.toLowerCase(Locale.ENGLISH)); + Path lower = path(StringUtils.toLowerCase(mixedCaseFilename)); assertFalse("File exists" + upper, fs.exists(upper)); assertFalse("File exists" + lower, fs.exists(lower)); FSDataOutputStream out = fs.create(upper); diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java index 47fdb1ad55..c53a7c2ddd 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/HadoopLogsAnalyzer.java @@ -38,6 +38,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.LineReader; @@ -319,42 +320,42 @@ private int initializeHadoopLogsAnalyzer(String[] args) } for (int i = 0; i < args.length - (inputFilename == null ? 0 : 1); ++i) { - if ("-h".equals(args[i].toLowerCase()) - || "-help".equals(args[i].toLowerCase())) { + if (StringUtils.equalsIgnoreCase("-h", args[i]) + || StringUtils.equalsIgnoreCase("-help", args[i])) { usage(); return 0; } - if ("-c".equals(args[i].toLowerCase()) - || "-collect-prefixes".equals(args[i].toLowerCase())) { + if (StringUtils.equalsIgnoreCase("-c", args[i]) + || StringUtils.equalsIgnoreCase("-collect-prefixes", args[i])) { collecting = true; continue; } // these control the job digest - if ("-write-job-trace".equals(args[i].toLowerCase())) { + if (StringUtils.equalsIgnoreCase("-write-job-trace", args[i])) { ++i; jobTraceFilename = new Path(args[i]); continue; } - if ("-single-line-job-traces".equals(args[i].toLowerCase())) { + if (StringUtils.equalsIgnoreCase("-single-line-job-traces", args[i])) { prettyprintTrace = false; continue; } - if ("-omit-task-details".equals(args[i].toLowerCase())) { + if (StringUtils.equalsIgnoreCase("-omit-task-details", args[i])) { omitTaskDetails = true; continue; } - if ("-write-topology".equals(args[i].toLowerCase())) { + if (StringUtils.equalsIgnoreCase("-write-topology", args[i])) { ++i; topologyFilename = new Path(args[i]); continue; } - if ("-job-digest-spectra".equals(args[i].toLowerCase())) { + if (StringUtils.equalsIgnoreCase("-job-digest-spectra", args[i])) { ArrayList values = new ArrayList(); ++i; @@ -384,13 +385,13 @@ private int initializeHadoopLogsAnalyzer(String[] args) continue; } - if ("-d".equals(args[i].toLowerCase()) - || "-debug".equals(args[i].toLowerCase())) { + if (StringUtils.equalsIgnoreCase("-d", args[i]) + || StringUtils.equalsIgnoreCase("-debug", args[i])) { debug = true; continue; } - if ("-spreads".equals(args[i].toLowerCase())) { + if (StringUtils.equalsIgnoreCase("-spreads", args[i])) { int min = Integer.parseInt(args[i + 1]); int max = Integer.parseInt(args[i + 2]); @@ -404,22 +405,22 @@ private int initializeHadoopLogsAnalyzer(String[] args) } // These control log-wide CDF outputs - if ("-delays".equals(args[i].toLowerCase())) { + if (StringUtils.equalsIgnoreCase("-delays", args[i])) { delays = true; continue; } - if ("-runtimes".equals(args[i].toLowerCase())) { + if (StringUtils.equalsIgnoreCase("-runtimes", args[i])) { runtimes = true; continue; } - if ("-tasktimes".equals(args[i].toLowerCase())) { + if (StringUtils.equalsIgnoreCase("-tasktimes", args[i])) { collectTaskTimes = true; continue; } - if ("-v1".equals(args[i].toLowerCase())) { + if (StringUtils.equalsIgnoreCase("-v1", args[i])) { version = 1; continue; } diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java index eaa9547a96..c5ae2fc36d 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java @@ -433,7 +433,7 @@ private static Values getPre21Value(String name) { return Values.SUCCESS; } - return Values.valueOf(name.toUpperCase()); + return Values.valueOf(StringUtils.toUpperCase(name)); } private void processTaskUpdatedEvent(TaskUpdatedEvent event) { diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java index 903d5fbcfb..4a23fa6fcc 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java @@ -28,6 +28,7 @@ import org.apache.hadoop.mapreduce.jobhistory.JhCounterGroup; import org.apache.hadoop.mapreduce.jobhistory.JhCounters; +import org.apache.hadoop.util.StringUtils; import org.codehaus.jackson.annotate.JsonAnySetter; /** @@ -243,7 +244,7 @@ public void incorporateCounters(JhCounters counters) { } private static String canonicalizeCounterName(String nonCanonicalName) { - String result = nonCanonicalName.toLowerCase(); + String result = StringUtils.toLowerCase(nonCanonicalName); result = result.replace(' ', '|'); result = result.replace('-', '|'); diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java index d1b365eb33..c21eb39663 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java @@ -23,6 +23,7 @@ import java.util.Set; import java.util.TreeSet; +import org.apache.hadoop.util.StringUtils; import org.codehaus.jackson.annotate.JsonAnySetter; // HACK ALERT!!! This "should" have have two subclasses, which might be called @@ -611,7 +612,7 @@ void setResourceUsageMetrics(ResourceUsageMetrics metrics) { } private static String canonicalizeCounterName(String nonCanonicalName) { - String result = nonCanonicalName.toLowerCase(); + String result = StringUtils.toLowerCase(nonCanonicalName); result = result.replace(' ', '|'); result = result.replace('-', '|'); diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java index 98d8aa0306..bc92b7149a 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java @@ -25,6 +25,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.util.StringUtils; /** * This is a class used to get the current environment @@ -43,7 +44,7 @@ public Environment() throws IOException { // http://lopica.sourceforge.net/os.html String command = null; String OS = System.getProperty("os.name"); - String lowerOs = OS.toLowerCase(); + String lowerOs = StringUtils.toLowerCase(OS); if (OS.indexOf("Windows") > -1) { command = "cmd /C set"; } else if (lowerOs.indexOf("ix") > -1 || lowerOs.indexOf("linux") > -1 diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java index de8f740253..108ad0b722 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java @@ -36,6 +36,7 @@ import org.apache.commons.cli.Options; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -173,7 +174,7 @@ public int run(String[] args) throws Exception { if (types != null) { for (String type : types) { if (!type.trim().isEmpty()) { - appTypes.add(type.toUpperCase().trim()); + appTypes.add(StringUtils.toUpperCase(type).trim()); } } } @@ -191,8 +192,8 @@ public int run(String[] args) throws Exception { break; } try { - appStates.add(YarnApplicationState.valueOf(state - .toUpperCase().trim())); + appStates.add(YarnApplicationState.valueOf( + StringUtils.toUpperCase(state).trim())); } catch (IllegalArgumentException ex) { sysout.println("The application state " + state + " is invalid."); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java index d6036262b0..4f0ddfebf3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java @@ -111,7 +111,8 @@ public int run(String[] args) throws Exception { if (types != null) { for (String type : types) { if (!type.trim().isEmpty()) { - nodeStates.add(NodeState.valueOf(type.trim().toUpperCase())); + nodeStates.add(NodeState.valueOf( + org.apache.hadoop.util.StringUtils.toUpperCase(type.trim()))); } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java index a8996f0298..ad009d6f42 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java @@ -26,6 +26,7 @@ import org.apache.commons.lang.math.LongRange; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.records.YarnApplicationState; @@ -213,7 +214,7 @@ public void setApplicationTags(Set tags) { // Convert applicationTags to lower case and add this.applicationTags = new HashSet(); for (String tag : tags) { - this.applicationTags.add(tag.toLowerCase()); + this.applicationTags.add(StringUtils.toLowerCase(tag)); } } @@ -258,7 +259,8 @@ public void setApplicationStates(EnumSet applicationStates public void setApplicationStates(Set applicationStates) { EnumSet appStates = null; for (YarnApplicationState state : YarnApplicationState.values()) { - if (applicationStates.contains(state.name().toLowerCase())) { + if (applicationStates.contains( + StringUtils.toLowerCase(state.name()))) { if (appStates == null) { appStates = EnumSet.of(state); } else { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java index 303b437160..67e3a84ce6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationSubmissionContextPBImpl.java @@ -23,6 +23,7 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; @@ -291,7 +292,7 @@ public void setApplicationTags(Set tags) { // Convert applicationTags to lower case and add this.applicationTags = new HashSet(); for (String tag : tags) { - this.applicationTags.add(tag.toLowerCase()); + this.applicationTags.add(StringUtils.toLowerCase(tag)); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java index 870aa95212..bd9c907418 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/FSDownload.java @@ -23,7 +23,6 @@ import java.io.IOException; import java.net.URISyntaxException; import java.security.PrivilegedExceptionAction; -import java.util.Locale; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; @@ -47,6 +46,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.RunJar; import org.apache.hadoop.util.Shell; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; @@ -272,7 +272,7 @@ private Path copy(Path sCopy, Path dstdir) throws IOException { private long unpack(File localrsrc, File dst) throws IOException { switch (resource.getType()) { case ARCHIVE: { - String lowerDst = dst.getName().toLowerCase(Locale.ENGLISH); + String lowerDst = StringUtils.toLowerCase(dst.getName()); if (lowerDst.endsWith(".jar")) { RunJar.unJar(localrsrc, dst); } else if (lowerDst.endsWith(".zip")) { @@ -291,7 +291,7 @@ private long unpack(File localrsrc, File dst) throws IOException { } break; case PATTERN: { - String lowerDst = dst.getName().toLowerCase(Locale.ENGLISH); + String lowerDst = StringUtils.toLowerCase(dst.getName()); if (lowerDst.endsWith(".jar")) { String p = resource.getPattern(); RunJar.unJar(localrsrc, dst, diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java index c848828845..5acb3f3ee0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet/HamletGen.java @@ -26,7 +26,6 @@ import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; -import java.util.Locale; import java.util.Set; import java.util.regex.Pattern; @@ -35,6 +34,7 @@ import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Options; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.webapp.WebAppException; import org.slf4j.Logger; @@ -241,7 +241,7 @@ void genFactoryMethod(String retName, String methodName, int indent) { puts(indent, "\n", "private ", retName, " ", methodName, "_(T e, boolean inline) {\n", - " return new ", retName, "(\"", retName.toLowerCase(Locale.US), + " return new ", retName, "(\"", StringUtils.toLowerCase(retName), "\", e, opt(", !endTagOptional.contains(retName), ", inline, ", retName.equals("PRE"), ")); }"); } @@ -258,7 +258,7 @@ void genNewElementMethod(String className, Method method, int indent) { puts(0, ") {"); puts(indent, topMode ? "" : " closeAttrs();\n", - " return ", retName.toLowerCase(Locale.US), "_(this, ", + " return ", StringUtils.toLowerCase(retName), "_" + "(this, ", isInline(className, retName), ");\n", "}"); } else if (params.length == 1) { puts(0, "String selector) {"); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java index 68dc84e7bf..06a56d85d2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/java/org/apache/hadoop/registry/client/binding/RegistryUtils.java @@ -88,7 +88,8 @@ public static String homePathForUser(String username) { * @return the converted username */ public static String convertUsername(String username) { - String converted= username.toLowerCase(Locale.ENGLISH); + String converted = + org.apache.hadoop.util.StringUtils.toLowerCase(username); int atSymbol = converted.indexOf('@'); if (atSymbol > 0) { converted = converted.substring(0, atSymbol); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java index 2040f57514..2af4027cc3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java @@ -31,6 +31,7 @@ import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.server.api.ApplicationContext; import org.apache.hadoop.yarn.server.webapp.WebServices; @@ -147,7 +148,8 @@ public ContainerInfo getContainer(@Context HttpServletRequest req, } Set appStates = parseQueries(statesQuery, true); for (String appState : appStates) { - switch (YarnApplicationState.valueOf(appState.toUpperCase())) { + switch (YarnApplicationState.valueOf( + StringUtils.toUpperCase(appState))) { case FINISHED: case FAILED: case KILLED: diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java index 0907f2c154..915e3f2ed6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/TimelineWebServices.java @@ -52,6 +52,7 @@ import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain; import org.apache.hadoop.yarn.api.records.timeline.TimelineDomains; import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities; @@ -417,7 +418,7 @@ private static EnumSet parseFieldsStr(String str, String delimiter) { String[] strs = str.split(delimiter); List fieldList = new ArrayList(); for (String s : strs) { - s = s.trim().toUpperCase(); + s = StringUtils.toUpperCase(s.trim()); if (s.equals("EVENTS")) { fieldList.add(Field.EVENTS); } else if (s.equals("LASTEVENTONLY")) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java index 385d10ae8b..6d94737f90 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/WebServices.java @@ -31,6 +31,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -162,9 +163,9 @@ public Collection run() throws Exception { break; } - if (checkAppStates - && !appStates.contains(appReport.getYarnApplicationState().toString() - .toLowerCase())) { + if (checkAppStates && + !appStates.contains(StringUtils.toLowerCase( + appReport.getYarnApplicationState().toString()))) { continue; } if (finalStatusQuery != null && !finalStatusQuery.isEmpty()) { @@ -184,9 +185,9 @@ public Collection run() throws Exception { continue; } } - if (checkAppTypes - && !appTypes.contains(appReport.getApplicationType().trim() - .toLowerCase())) { + if (checkAppTypes && + !appTypes.contains( + StringUtils.toLowerCase(appReport.getApplicationType().trim()))) { continue; } @@ -368,7 +369,8 @@ protected void init(HttpServletResponse response) { if (isState) { try { // enum string is in the uppercase - YarnApplicationState.valueOf(paramStr.trim().toUpperCase()); + YarnApplicationState.valueOf( + StringUtils.toUpperCase(paramStr.trim())); } catch (RuntimeException e) { YarnApplicationState[] stateArray = YarnApplicationState.values(); @@ -378,7 +380,7 @@ protected void init(HttpServletResponse response) { + allAppStates); } } - params.add(paramStr.trim().toLowerCase()); + params.add(StringUtils.toLowerCase(paramStr.trim())); } } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java index 46b45f852a..21d70b4768 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java @@ -46,6 +46,7 @@ import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.service.AbstractService; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.ApplicationClientProtocol; import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest; @@ -756,7 +757,7 @@ public void remove() { if (applicationTypes != null && !applicationTypes.isEmpty()) { String appTypeToMatch = caseSensitive ? application.getApplicationType() - : application.getApplicationType().toLowerCase(); + : StringUtils.toLowerCase(application.getApplicationType()); if (!applicationTypes.contains(appTypeToMatch)) { continue; } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java index 230f9a9a58..d6e9e45b83 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/resource/ResourceWeights.java @@ -20,6 +20,7 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Evolving; +import org.apache.hadoop.util.StringUtils; @Private @Evolving @@ -61,7 +62,7 @@ public String toString() { sb.append(", "); } ResourceType resourceType = ResourceType.values()[i]; - sb.append(resourceType.name().toLowerCase()); + sb.append(StringUtils.toLowerCase(resourceType.name())); sb.append(String.format(" weight=%.1f", getWeight(resourceType))); } sb.append(">"); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java index 3528c2d617..102e5539f1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacitySchedulerConfiguration.java @@ -394,7 +394,7 @@ public void setUserLimitFactor(String queue, float userLimitFactor) { public QueueState getState(String queue) { String state = get(getQueuePrefix(queue) + STATE); return (state != null) ? - QueueState.valueOf(state.toUpperCase()) : QueueState.RUNNING; + QueueState.valueOf(StringUtils.toUpperCase(state)) : QueueState.RUNNING; } public void setAccessibleNodeLabels(String queue, Set labels) { @@ -490,7 +490,7 @@ public boolean getReservationContinueLook() { } private static String getAclKey(QueueACL acl) { - return "acl_" + acl.toString().toLowerCase(); + return "acl_" + StringUtils.toLowerCase(acl.toString()); } public AccessControlList getAcl(String queue, QueueACL acl) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java index 32ef906a57..e477e6e4e2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FairSchedulerConfiguration.java @@ -28,6 +28,7 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.utils.BuilderUtils; @@ -241,7 +242,7 @@ public boolean getUsePortForNodeName() { public static Resource parseResourceConfigValue(String val) throws AllocationConfigurationException { try { - val = val.toLowerCase(); + val = StringUtils.toLowerCase(val); int memory = findResource(val, "mb"); int vcores = findResource(val, "vcores"); return BuilderUtils.newResource(memory, vcores); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java index cc28afc1e4..bf2a25b61a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/SchedulingPolicy.java @@ -20,6 +20,7 @@ import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.policies.DominantResourceFairnessPolicy; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.policies.FairSharePolicy; @@ -72,7 +73,7 @@ public static SchedulingPolicy parse(String policy) throws AllocationConfigurationException { @SuppressWarnings("rawtypes") Class clazz; - String text = policy.toLowerCase(); + String text = StringUtils.toLowerCase(policy); if (text.equalsIgnoreCase(FairSharePolicy.NAME)) { clazz = FairSharePolicy.class; } else if (text.equalsIgnoreCase(FifoPolicy.NAME)) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java index f28a9a88bc..13e0835206 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java @@ -77,7 +77,7 @@ protected void render(Block html) { .th(".nodeManagerVersion", "Version")._()._().tbody(); NodeState stateFilter = null; if (type != null && !type.isEmpty()) { - stateFilter = NodeState.valueOf(type.toUpperCase()); + stateFilter = NodeState.valueOf(StringUtils.toUpperCase(type)); } Collection rmNodes = this.rm.getRMContext().getRMNodes().values(); boolean isInactive = false; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java index f8836d5e79..059ea09fb1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java @@ -66,6 +66,7 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationHandler; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse; @@ -257,7 +258,8 @@ public NodesInfo getNodes(@QueryParam("states") String states) { } else { acceptedStates = EnumSet.noneOf(NodeState.class); for (String stateStr : states.split(",")) { - acceptedStates.add(NodeState.valueOf(stateStr.toUpperCase())); + acceptedStates.add( + NodeState.valueOf(StringUtils.toUpperCase(stateStr))); } } @@ -506,7 +508,7 @@ public ApplicationStatisticsInfo getAppStatistics( // if no states, returns the counts of all RMAppStates if (states.size() == 0) { for (YarnApplicationState state : YarnApplicationState.values()) { - states.add(state.toString().toLowerCase()); + states.add(StringUtils.toLowerCase(state.toString())); } } // in case we extend to multiple applicationTypes in the future @@ -518,8 +520,9 @@ public ApplicationStatisticsInfo getAppStatistics( ConcurrentMap apps = rm.getRMContext().getRMApps(); for (RMApp rmapp : apps.values()) { YarnApplicationState state = rmapp.createApplicationState(); - String type = rmapp.getApplicationType().trim().toLowerCase(); - if (states.contains(state.toString().toLowerCase())) { + String type = StringUtils.toLowerCase(rmapp.getApplicationType().trim()); + if (states.contains( + StringUtils.toLowerCase(state.toString()))) { if (types.contains(ANY)) { countApp(scoreboard, state, ANY); } else if (types.contains(type)) { @@ -554,7 +557,8 @@ private static Set parseQueries( if (isState) { try { // enum string is in the uppercase - YarnApplicationState.valueOf(paramStr.trim().toUpperCase()); + YarnApplicationState.valueOf( + StringUtils.toUpperCase(paramStr.trim())); } catch (RuntimeException e) { YarnApplicationState[] stateArray = YarnApplicationState.values(); @@ -564,7 +568,8 @@ private static Set parseQueries( + " specified. It should be one of " + allAppStates); } } - params.add(paramStr.trim().toLowerCase()); + params.add( + StringUtils.toLowerCase(paramStr.trim())); } } } @@ -582,7 +587,8 @@ private static Map> buildScoreboard( for (String state : states) { Map partScoreboard = new HashMap(); scoreboard.put( - YarnApplicationState.valueOf(state.toUpperCase()), partScoreboard); + YarnApplicationState.valueOf(StringUtils.toUpperCase(state)), + partScoreboard); // types is verified no to be empty for (String type : types) { partScoreboard.put(type, 0L);