diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 048b68747d..fed6a6cb92 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -103,6 +103,9 @@ Trunk (Unreleased) HADOOP-8736. Add Builder for building RPC server. (Brandon Li via Suresh) + HADOOP-8814. Replace string equals "" by String#isEmpty(). + (Brandon Li via suresh) + BUG FIXES HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index 21454154e0..bd42f683c0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -1073,7 +1073,7 @@ public class Configuration implements Iterable>, */ public boolean getBoolean(String name, boolean defaultValue) { String valueString = getTrimmed(name); - if (null == valueString || "".equals(valueString)) { + if (null == valueString || valueString.isEmpty()) { return defaultValue; } @@ -1140,7 +1140,7 @@ public class Configuration implements Iterable>, */ public Pattern getPattern(String name, Pattern defaultValue) { String valString = get(name); - if (null == valString || "".equals(valString)) { + if (null == valString || valString.isEmpty()) { return defaultValue; } try { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java index 452d29f7b7..1a24e655c5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java @@ -153,9 +153,9 @@ public class ReconfigurationServlet extends HttpServlet { StringEscapeUtils.unescapeHtml(req.getParameter(rawParam)); if (value != null) { if (value.equals(newConf.getRaw(param)) || value.equals("default") || - value.equals("null") || value.equals("")) { + value.equals("null") || value.isEmpty()) { if ((value.equals("default") || value.equals("null") || - value.equals("")) && + value.isEmpty()) && oldConf.getRaw(param) != null) { out.println("

Changed \"" + StringEscapeUtils.escapeHtml(param) + "\" from \"" + @@ -163,7 +163,7 @@ public class ReconfigurationServlet extends HttpServlet { "\" to default

"); reconf.reconfigureProperty(param, null); } else if (!value.equals("default") && !value.equals("null") && - !value.equals("") && + !value.isEmpty() && (oldConf.getRaw(param) == null || !oldConf.getRaw(param).equals(value))) { // change from default or value to different value diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java index 5cfce9b019..dc15b052cc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java @@ -2003,7 +2003,7 @@ public final class FileContext { String filename = inPathPattern.toUri().getPath(); // path has only zero component - if ("".equals(filename) || Path.SEPARATOR.equals(filename)) { + if (filename.isEmpty() || Path.SEPARATOR.equals(filename)) { Path p = inPathPattern.makeQualified(uri, null); return getFileStatus(new Path[]{p}); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index ff9f2db1ff..e780812983 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -1597,7 +1597,7 @@ public abstract class FileSystem extends Configured implements Closeable { String filename = pathPattern.toUri().getPath(); // path has only zero component - if ("".equals(filename) || Path.SEPARATOR.equals(filename)) { + if (filename.isEmpty() || Path.SEPARATOR.equals(filename)) { return getFileStatus(new Path[]{pathPattern}); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java index c0ebebfe67..0a2dfe7d39 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java @@ -68,7 +68,7 @@ public class Path implements Comparable { // Add a slash to parent's path so resolution is compatible with URI's URI parentUri = parent.uri; String parentPath = parentUri.getPath(); - if (!(parentPath.equals("/") || parentPath.equals(""))) { + if (!(parentPath.equals("/") || parentPath.isEmpty())) { try { parentUri = new URI(parentUri.getScheme(), parentUri.getAuthority(), parentUri.getPath()+"/", null, parentUri.getFragment()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java index b33b1a778f..267510d364 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java @@ -492,7 +492,7 @@ public class RawLocalFileSystem extends FileSystem { * onwer.equals(""). */ private boolean isPermissionLoaded() { - return !super.getOwner().equals(""); + return !super.getOwner().isEmpty(); } RawLocalFileStatus(File f, long defaultBlockSize, FileSystem fs) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java index ab3e2e18ad..b9a9277ade 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java @@ -128,7 +128,7 @@ public class RawLocalFs extends DelegateToFileSystem { try { FileStatus fs = getFileStatus(f); // If f refers to a regular file or directory - if ("".equals(target)) { + if (target.isEmpty()) { return fs; } // Otherwise f refers to a symlink @@ -150,7 +150,7 @@ public class RawLocalFs extends DelegateToFileSystem { * the readBasicFileAttributes method in java.nio.file.attributes * when available. */ - if (!"".equals(target)) { + if (!target.isEmpty()) { return new FileStatus(0, false, 0, 0, 0, 0, FsPermission.getDefault(), "", "", new Path(target), f); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java index af1c7a9885..efc415f89e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/s3native/NativeS3FileSystem.java @@ -300,7 +300,7 @@ public class NativeS3FileSystem extends FileSystem { } private static String pathToKey(Path path) { - if (path.toUri().getScheme() != null && "".equals(path.toUri().getPath())) { + if (path.toUri().getScheme() != null && path.toUri().getPath().isEmpty()) { // allow uris without trailing slash after bucket to refer to root, // like s3n://mybucket return ""; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java index 5287581073..7a2a5f1eba 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java @@ -276,7 +276,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback { String pathParts[] = znodeWorkingDir.split("/"); Preconditions.checkArgument(pathParts.length >= 1 && - "".equals(pathParts[0]), + pathParts[0].isEmpty(), "Invalid path: %s", znodeWorkingDir); StringBuilder sb = new StringBuilder(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java index 343693e95c..0f5465194b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/SshFenceByTcpPort.java @@ -241,7 +241,7 @@ public class SshFenceByTcpPort extends Configured sshPort = DEFAULT_SSH_PORT; // Parse optional user and ssh port - if (arg != null && !"".equals(arg)) { + if (arg != null && !arg.isEmpty()) { Matcher m = USER_PORT_RE.matcher(arg); if (!m.matches()) { throw new BadFencingConfigurationException( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java index 2b8e259464..d32d58b600 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java @@ -192,7 +192,7 @@ public class DefaultStringifier implements Stringifier { String[] parts = itemStr.split(SEPARATOR); for (String part : parts) { - if (!part.equals("")) + if (!part.isEmpty()) list.add(stringifier.fromString(part)); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java index 9a57581c90..c11678dd5d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java @@ -2105,7 +2105,7 @@ public class TFile { } public boolean isSorted() { - return !strComparator.equals(""); + return !strComparator.isEmpty(); } public String getComparatorString() { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsDynamicMBeanBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsDynamicMBeanBase.java index 29e1396b05..ea663a45b3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsDynamicMBeanBase.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsDynamicMBeanBase.java @@ -129,7 +129,7 @@ public abstract class MetricsDynamicMBeanBase implements DynamicMBean { @Override public Object getAttribute(String attributeName) throws AttributeNotFoundException, MBeanException, ReflectionException { - if (attributeName == null || attributeName.equals("")) + if (attributeName == null || attributeName.isEmpty()) throw new IllegalArgumentException(); updateMbeanInfoIfMetricsListChanged(); @@ -197,7 +197,7 @@ public abstract class MetricsDynamicMBeanBase implements DynamicMBean { public Object invoke(String actionName, Object[] parms, String[] signature) throws MBeanException, ReflectionException { - if (actionName == null || actionName.equals("")) + if (actionName == null || actionName.isEmpty()) throw new IllegalArgumentException(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java index e9fa0c35b8..a3d12b692e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordInput.java @@ -144,7 +144,7 @@ public class CsvRecordInput implements RecordInput { @Override public void startRecord(String tag) throws IOException { - if (tag != null && !"".equals(tag)) { + if (tag != null && !tag.isEmpty()) { char c1 = (char) stream.read(); char c2 = (char) stream.read(); if (c1 != 's' || c2 != '{') { @@ -156,7 +156,7 @@ public class CsvRecordInput implements RecordInput { @Override public void endRecord(String tag) throws IOException { char c = (char) stream.read(); - if (tag == null || "".equals(tag)) { + if (tag == null || tag.isEmpty()) { if (c != '\n' && c != '\r') { throw new IOException("Error deserializing record."); } else { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java index d770f47cf5..18cf23e78a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/CsvRecordOutput.java @@ -115,7 +115,7 @@ public class CsvRecordOutput implements RecordOutput { @Override public void startRecord(Record r, String tag) throws IOException { - if (tag != null && !"".equals(tag)) { + if (tag != null && ! tag.isEmpty()) { printCommaUnlessFirst(); stream.print("s{"); isFirst = true; @@ -124,7 +124,7 @@ public class CsvRecordOutput implements RecordOutput { @Override public void endRecord(Record r, String tag) throws IOException { - if (tag == null || "".equals(tag)) { + if (tag == null || tag.isEmpty()) { stream.print("\n"); isFirst = true; } else { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java index 25bae83b1e..a98de8b2ee 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java @@ -212,7 +212,7 @@ public class SecurityUtil { private static String replacePattern(String[] components, String hostname) throws IOException { String fqdn = hostname; - if (fqdn == null || fqdn.equals("") || fqdn.equals("0.0.0.0")) { + if (fqdn == null || fqdn.isEmpty() || fqdn.equals("0.0.0.0")) { fqdn = getLocalHostName(); } return components[0] + "/" + fqdn.toLowerCase() + "@" + components[2]; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java index 64ca98cf28..4097453c76 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java @@ -992,7 +992,7 @@ public class UserGroupInformation { @InterfaceAudience.Public @InterfaceStability.Evolving public static UserGroupInformation createRemoteUser(String user) { - if (user == null || "".equals(user)) { + if (user == null || user.isEmpty()) { throw new IllegalArgumentException("Null user"); } Subject subject = new Subject(); @@ -1027,7 +1027,7 @@ public class UserGroupInformation { @InterfaceStability.Evolving public static UserGroupInformation createProxyUser(String user, UserGroupInformation realUser) { - if (user == null || "".equals(user)) { + if (user == null || user.isEmpty()) { throw new IllegalArgumentException("Null user"); } if (realUser == null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java index d17d065bf8..8523f38dec 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java @@ -88,7 +88,7 @@ public class ServiceAuthorizationManager { String clientPrincipal = null; if (krbInfo != null) { String clientKey = krbInfo.clientPrincipal(); - if (clientKey != null && !clientKey.equals("")) { + if (clientKey != null && !clientKey.isEmpty()) { try { clientPrincipal = SecurityUtil.getServerPrincipal( conf.get(clientKey), addr); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java index 6ec3b7e606..089cd99fd5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenIdentifier.java @@ -87,12 +87,12 @@ extends TokenIdentifier { */ @Override public UserGroupInformation getUser() { - if ( (owner == null) || ("".equals(owner.toString()))) { + if ( (owner == null) || (owner.toString().isEmpty())) { return null; } final UserGroupInformation realUgi; final UserGroupInformation ugi; - if ((realUser == null) || ("".equals(realUser.toString())) + if ((realUser == null) || (realUser.toString().isEmpty()) || realUser.equals(owner)) { ugi = realUgi = UserGroupInformation.createRemoteUser(owner.toString()); } else { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java index 29367a38ab..aa4fa28ad1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java @@ -265,7 +265,7 @@ extends AbstractDelegationTokenIdentifier> throw new InvalidToken("User " + renewer + " tried to renew an expired token"); } - if ((id.getRenewer() == null) || ("".equals(id.getRenewer().toString()))) { + if ((id.getRenewer() == null) || (id.getRenewer().toString().isEmpty())) { throw new AccessControlException("User " + renewer + " tried to renew a token without " + "a renewer"); @@ -321,7 +321,7 @@ extends AbstractDelegationTokenIdentifier> HadoopKerberosName cancelerKrbName = new HadoopKerberosName(canceller); String cancelerShortName = cancelerKrbName.getShortName(); if (!canceller.equals(owner) - && (renewer == null || "".equals(renewer.toString()) || !cancelerShortName + && (renewer == null || renewer.toString().isEmpty() || !cancelerShortName .equals(renewer.toString()))) { throw new AccessControlException(canceller + " is not authorized to cancel the token"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java index 818bffe2f1..a6f4555bbe 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java @@ -63,7 +63,7 @@ public class HostsFileReader { // Everything from now on is a comment break; } - if (!nodes[i].equals("")) { + if (!nodes[i].isEmpty()) { LOG.info("Adding " + nodes[i] + " to the list of hosts from " + filename); set.add(nodes[i]); // might need to add canonical name } @@ -80,13 +80,13 @@ public class HostsFileReader { public synchronized void refresh() throws IOException { LOG.info("Refreshing hosts (include/exclude) list"); - if (!includesFile.equals("")) { + if (!includesFile.isEmpty()) { Set newIncludes = new HashSet(); readFileToSet(includesFile, newIncludes); // switch the new hosts that are to be included includes = newIncludes; } - if (!excludesFile.equals("")) { + if (!excludesFile.isEmpty()) { Set newExcludes = new HashSet(); readFileToSet(excludesFile, newExcludes); // switch the excluded hosts diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java index 67a8f82d93..ba32269f5f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java @@ -348,7 +348,7 @@ public class StringUtils { * @return an array of String values */ public static String[] getTrimmedStrings(String str){ - if (null == str || "".equals(str.trim())) { + if (null == str || str.trim().isEmpty()) { return emptyStringArray; } @@ -408,7 +408,7 @@ public class StringUtils { String str, char separator) { // String.split returns a single empty result for splitting the empty // string. - if ("".equals(str)) { + if (str.isEmpty()) { return new String[]{""}; } ArrayList strList = new ArrayList(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java index acd728b0ec..6a911ed804 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java @@ -67,7 +67,7 @@ public class TestNativeIO { assertEquals(System.getProperty("user.name"), stat.getOwner()); assertNotNull(stat.getGroup()); - assertTrue(!"".equals(stat.getGroup())); + assertTrue(!stat.getGroup().isEmpty()); assertEquals("Stat mode field should indicate a regular file", NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT); } @@ -96,7 +96,7 @@ public class TestNativeIO { NativeIO.Stat stat = NativeIO.fstat(fos.getFD()); assertEquals(System.getProperty("user.name"), stat.getOwner()); assertNotNull(stat.getGroup()); - assertTrue(!"".equals(stat.getGroup())); + assertTrue(!stat.getGroup().isEmpty()); assertEquals("Stat mode field should indicate a regular file", NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT); } catch (Throwable t) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java index db4a146542..fed7051f97 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java @@ -112,7 +112,7 @@ public class TestSaslRPC { } @Override public UserGroupInformation getUser() { - if ("".equals(realUser.toString())) { + if (realUser.toString().isEmpty()) { return UserGroupInformation.createRemoteUser(tokenid.toString()); } else { UserGroupInformation realUgi = UserGroupInformation diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java index fc90984608..0ab86497d3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java @@ -231,7 +231,8 @@ public class TestStringUtils extends UnitTestcaseTimeLimit { assertArrayEquals(expectedArray, StringUtils.getTrimmedStrings(pathologicalDirList2)); assertArrayEquals(emptyArray, StringUtils.getTrimmedStrings(emptyList1)); - assertArrayEquals(emptyArray, StringUtils.getTrimmedStrings(emptyList2)); + String[] estring = StringUtils.getTrimmedStrings(emptyList2); + assertArrayEquals(emptyArray, estring); } @Test