From fb8932a727f757b2e9c1c61a18145878d0eb77bd Mon Sep 17 00:00:00 2001 From: Giovanni Matteo Fumarola Date: Fri, 11 Jan 2019 10:54:49 -0800 Subject: [PATCH] HADOOP-16029. Consecutive StringBuilder.append can be reused. Contributed by Ayush Saxena. --- .../org/apache/hadoop/crypto/CipherSuite.java | 4 +- .../org/apache/hadoop/fs/BlockLocation.java | 6 +- .../apache/hadoop/fs/FSDataOutputStream.java | 4 +- .../apache/hadoop/fs/FileEncryptionInfo.java | 32 +- .../java/org/apache/hadoop/fs/FileStatus.java | 34 +- .../java/org/apache/hadoop/fs/FileUtil.java | 30 +- .../main/java/org/apache/hadoop/fs/Path.java | 12 +- .../apache/hadoop/fs/permission/AclEntry.java | 4 +- .../org/apache/hadoop/fs/shell/Count.java | 4 +- .../java/org/apache/hadoop/fs/shell/Ls.java | 8 +- .../org/apache/hadoop/fs/shell/PathData.java | 6 +- .../hadoop/fs/shell/find/BaseExpression.java | 4 +- .../org/apache/hadoop/fs/shell/find/Find.java | 4 +- .../java/org/apache/hadoop/io/MD5Hash.java | 4 +- .../org/apache/hadoop/io/SequenceFile.java | 4 +- .../io/compress/CompressionCodecFactory.java | 18 +- .../hadoop/io/erasurecode/ECSchema.java | 8 +- .../apache/hadoop/ipc/WritableRpcEngine.java | 12 +- .../hadoop/metrics2/sink/GraphiteSink.java | 8 +- .../hadoop/metrics2/sink/StatsDSink.java | 6 +- .../net/AbstractDNSToSwitchMapping.java | 4 +- .../java/org/apache/hadoop/net/NetUtils.java | 4 +- .../apache/hadoop/net/NetworkTopology.java | 16 +- .../apache/hadoop/security/ProviderUtils.java | 4 +- .../security/alias/CredentialProvider.java | 6 +- .../security/alias/CredentialShell.java | 12 +- .../security/authorize/AccessControlList.java | 6 +- .../security/ssl/SSLHostnameVerifier.java | 6 +- .../apache/hadoop/security/token/Token.java | 10 +- .../service/launcher/InterruptEscalator.java | 6 +- .../apache/hadoop/tools/GetGroupsBase.java | 4 +- .../BlockingThreadPoolExecutorService.java | 6 +- .../apache/hadoop/util/CpuTimeTracker.java | 12 +- .../util/SemaphoredDelegatingExecutor.java | 8 +- .../java/org/apache/hadoop/util/Shell.java | 14 +- .../org/apache/hadoop/util/SignalLogger.java | 4 +- .../hadoop/util/bloom/DynamicBloomFilter.java | 4 +- .../apache/hadoop/hdfs/DFSInputStream.java | 12 +- .../org/apache/hadoop/hdfs/DFSUtilClient.java | 8 +- .../hadoop/hdfs/protocol/DatanodeInfo.java | 83 +- .../hadoop/hdfs/protocol/HdfsPathHandle.java | 4 +- .../hdfs/protocol/ReencryptionStatus.java | 12 +- .../hadoop/hdfs/util/StripedBlockUtil.java | 10 +- .../federation/resolver/PathLocation.java | 6 +- .../federation/router/ConnectionContext.java | 10 +- .../federation/router/RouterQuotaUsage.java | 4 +- .../server/blockmanagement/BlockManager.java | 3 +- .../blockmanagement/DatanodeAdminManager.java | 3 +- .../hadoop/hdfs/server/datanode/DataNode.java | 3 +- .../hdfs/server/datanode/VolumeScanner.java | 18 +- .../diskbalancer/command/PlanCommand.java | 3 +- .../namenode/EncryptionZoneManager.java | 7 +- .../hdfs/server/namenode/FSEditLog.java | 20 +- .../hdfs/server/namenode/FSEditLogLoader.java | 4 +- .../hdfs/server/namenode/FSEditLogOp.java | 734 +++++++++--------- .../hdfs/server/namenode/FSNamesystem.java | 28 +- .../hdfs/server/namenode/JournalSet.java | 4 +- .../hdfs/server/namenode/NamenodeFsck.java | 10 +- .../namenode/QuotaByStorageTypeEntry.java | 6 +- .../namenode/RedundantEditLogInputStream.java | 4 +- .../server/namenode/StoragePolicySummary.java | 13 +- .../hdfs/server/protocol/ServerCommand.java | 6 +- .../hdfs/tools/DFSZKFailoverController.java | 4 +- .../hadoop/tools/CopyListingFileStatus.java | 14 +- 64 files changed, 683 insertions(+), 688 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java index a811aa7271..8221ba2bd7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CipherSuite.java @@ -65,8 +65,8 @@ public int getAlgorithmBlockSize() { @Override public String toString() { StringBuilder builder = new StringBuilder("{"); - builder.append("name: " + name); - builder.append(", algorithmBlockSize: " + algoBlockSize); + builder.append("name: " + name) + .append(", algorithmBlockSize: " + algoBlockSize); if (unknownValue != null) { builder.append(", unknownValue: " + unknownValue); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java index ae134c4818..37f03092bf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java @@ -330,9 +330,9 @@ public void setStorageTypes(StorageType[] storageTypes) { @Override public String toString() { StringBuilder result = new StringBuilder(); - result.append(offset); - result.append(','); - result.append(length); + result.append(offset) + .append(',') + .append(length); if (corrupt) { result.append("(corrupt)"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStream.java index 5970373a9f..5b604e58e2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStream.java @@ -105,8 +105,8 @@ public void close() throws IOException { public String toString() { final StringBuilder sb = new StringBuilder( "FSDataOutputStream{"); - sb.append("wrappedStream=").append(wrappedStream); - sb.append('}'); + sb.append("wrappedStream=").append(wrappedStream) + .append('}'); return sb.toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java index ce5ed564c5..4cfce2eed6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java @@ -115,14 +115,14 @@ public byte[] getIV() { @Override public String toString() { - StringBuilder builder = new StringBuilder("{"); - builder.append("cipherSuite: " + cipherSuite); - builder.append(", cryptoProtocolVersion: " + version); - builder.append(", edek: " + Hex.encodeHexString(edek)); - builder.append(", iv: " + Hex.encodeHexString(iv)); - builder.append(", keyName: " + keyName); - builder.append(", ezKeyVersionName: " + ezKeyVersionName); - builder.append("}"); + StringBuilder builder = new StringBuilder("{") + .append("cipherSuite: " + cipherSuite) + .append(", cryptoProtocolVersion: " + version) + .append(", edek: " + Hex.encodeHexString(edek)) + .append(", iv: " + Hex.encodeHexString(iv)) + .append(", keyName: " + keyName) + .append(", ezKeyVersionName: " + ezKeyVersionName) + .append("}"); return builder.toString(); } @@ -136,14 +136,14 @@ public String toString() { * Currently this method is used by CLI for backward compatibility. */ public String toStringStable() { - StringBuilder builder = new StringBuilder("{"); - builder.append("cipherSuite: " + cipherSuite); - builder.append(", cryptoProtocolVersion: " + version); - builder.append(", edek: " + Hex.encodeHexString(edek)); - builder.append(", iv: " + Hex.encodeHexString(iv)); - builder.append(", keyName: " + keyName); - builder.append(", ezKeyVersionName: " + ezKeyVersionName); - builder.append("}"); + StringBuilder builder = new StringBuilder("{") + .append("cipherSuite: " + cipherSuite) + .append(", cryptoProtocolVersion: " + version) + .append(", edek: " + Hex.encodeHexString(edek)) + .append(", iv: " + Hex.encodeHexString(iv)) + .append(", keyName: " + keyName) + .append(", ezKeyVersionName: " + ezKeyVersionName) + .append("}"); return builder.toString(); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java index 83910c46c1..d7ca8f172f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java @@ -442,21 +442,21 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()); - sb.append("{"); - sb.append("path=" + path); - sb.append("; isDirectory=" + isdir); + sb.append(getClass().getSimpleName()) + .append("{") + .append("path=" + path) + .append("; isDirectory=" + isdir); if(!isDirectory()){ - sb.append("; length=" + length); - sb.append("; replication=" + block_replication); - sb.append("; blocksize=" + blocksize); + sb.append("; length=" + length) + .append("; replication=" + block_replication) + .append("; blocksize=" + blocksize); } - sb.append("; modification_time=" + modification_time); - sb.append("; access_time=" + access_time); - sb.append("; owner=" + owner); - sb.append("; group=" + group); - sb.append("; permission=" + permission); - sb.append("; isSymlink=" + isSymlink()); + sb.append("; modification_time=" + modification_time) + .append("; access_time=" + access_time) + .append("; owner=" + owner) + .append("; group=" + group) + .append("; permission=" + permission) + .append("; isSymlink=" + isSymlink()); if(isSymlink()) { try { sb.append("; symlink=" + getSymlink()); @@ -464,10 +464,10 @@ public String toString() { throw new RuntimeException("Unexpected exception", e); } } - sb.append("; hasAcl=" + hasAcl()); - sb.append("; isEncrypted=" + isEncrypted()); - sb.append("; isErasureCoded=" + isErasureCoded()); - sb.append("}"); + sb.append("; hasAcl=" + hasAcl()) + .append("; isEncrypted=" + isEncrypted()) + .append("; isErasureCoded=" + isErasureCoded()) + .append("}"); return sb.toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index 51dd0e0e43..c849055a3a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -367,8 +367,8 @@ public static boolean copy(FileSystem srcFS, Path[] srcs, returnVal = false; } catch (IOException e) { gotException = true; - exceptions.append(e.getMessage()); - exceptions.append("\n"); + exceptions.append(e.getMessage()) + .append("\n"); } } if (gotException) { @@ -873,10 +873,10 @@ private static void unTarUsingTar(InputStream inputStream, File untarDir, if (gzipped) { untarCommand.append("gzip -dc | ("); } - untarCommand.append("cd '"); - untarCommand.append(FileUtil.makeSecureShellPath(untarDir)); - untarCommand.append("' && "); - untarCommand.append("tar -x "); + untarCommand.append("cd '") + .append(FileUtil.makeSecureShellPath(untarDir)) + .append("' && ") + .append("tar -x "); if (gzipped) { untarCommand.append(")"); @@ -888,14 +888,14 @@ private static void unTarUsingTar(File inFile, File untarDir, boolean gzipped) throws IOException { StringBuffer untarCommand = new StringBuffer(); if (gzipped) { - untarCommand.append(" gzip -dc '"); - untarCommand.append(FileUtil.makeSecureShellPath(inFile)); - untarCommand.append("' | ("); + untarCommand.append(" gzip -dc '") + .append(FileUtil.makeSecureShellPath(inFile)) + .append("' | ("); } - untarCommand.append("cd '"); - untarCommand.append(FileUtil.makeSecureShellPath(untarDir)); - untarCommand.append("' && "); - untarCommand.append("tar -xf "); + untarCommand.append("cd '") + .append(FileUtil.makeSecureShellPath(untarDir)) + .append("' && ") + .append("tar -xf "); if (gzipped) { untarCommand.append(" -)"); @@ -1504,8 +1504,8 @@ public static String[] createJarWithClassPath(String inputClassPath, Path pwd, classPathEntryList.add(jar.toUri().toURL().toExternalForm()); } } else { - unexpandedWildcardClasspath.append(File.pathSeparator); - unexpandedWildcardClasspath.append(classPathEntry); + unexpandedWildcardClasspath.append(File.pathSeparator) + .append(classPathEntry); } } else { // Append just this entry diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java index b6244d6a36..7672c993f3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java @@ -452,12 +452,12 @@ public String toString() { // illegal characters unescaped in the string, for glob processing, etc. StringBuilder buffer = new StringBuilder(); if (uri.getScheme() != null) { - buffer.append(uri.getScheme()); - buffer.append(":"); + buffer.append(uri.getScheme()) + .append(":"); } if (uri.getAuthority() != null) { - buffer.append("//"); - buffer.append(uri.getAuthority()); + buffer.append("//") + .append(uri.getAuthority()); } if (uri.getPath() != null) { String path = uri.getPath(); @@ -469,8 +469,8 @@ public String toString() { buffer.append(path); } if (uri.getFragment() != null) { - buffer.append("#"); - buffer.append(uri.getFragment()); + buffer.append("#") + .append(uri.getFragment()); } return buffer.toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java index b42c36525a..a902488377 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclEntry.java @@ -330,8 +330,8 @@ public static AclEntry parseAclEntry(String aclStr, public static String aclSpecToString(List aclSpec) { StringBuilder buf = new StringBuilder(); for ( AclEntry e : aclSpec ) { - buf.append(e.toString()); - buf.append(","); + buf.append(e.toString()) + .append(","); } return buf.substring(0, buf.length()-1); // remove last , } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java index 011e489df2..22d8be53e9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Count.java @@ -202,8 +202,8 @@ protected void processPath(PathData src) throws IOException { if(!summary.getErasureCodingPolicy().equals("Replicated")){ outputString.append("EC:"); } - outputString.append(summary.getErasureCodingPolicy()); - outputString.append(" "); + outputString.append(summary.getErasureCodingPolicy()) + .append(" "); } outputString.append(src); out.println(outputString.toString()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java index 32084fc8fe..efc541ccf8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java @@ -334,10 +334,10 @@ private void adjustColumnWidths(PathData items[]) throws IOException { } StringBuilder fmt = new StringBuilder(); - fmt.append("%s%s"); // permission string - fmt.append("%" + maxRepl + "s "); - fmt.append((maxOwner > 0) ? "%-" + maxOwner + "s " : "%s"); - fmt.append((maxGroup > 0) ? "%-" + maxGroup + "s " : "%s"); + fmt.append("%s%s") // permission string + .append("%" + maxRepl + "s ") + .append((maxOwner > 0) ? "%-" + maxOwner + "s " : "%s") + .append((maxGroup > 0) ? "%-" + maxGroup + "s " : "%s"); // Do not use '%-0s' as a formatting conversion, since it will throw a // a MissingFormatWidthException if it is used in String.format(). // http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html#intFlags diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java index adf17df2db..dad54ea07b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java @@ -484,9 +484,9 @@ private static String uriToString(URI uri, boolean inferredSchemeFromPath) { return decodedRemainder; } else { StringBuilder buffer = new StringBuilder(); - buffer.append(scheme); - buffer.append(":"); - buffer.append(decodedRemainder); + buffer.append(scheme) + .append(":") + .append(decodedRemainder); return buffer.toString(); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java index db7d62ff46..5069d2d34e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java @@ -110,8 +110,8 @@ public Configuration getConf() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()); - sb.append("("); + sb.append(getClass().getSimpleName()) + .append("("); boolean firstArg = true; for (String arg : getArguments()) { if (!firstArg) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Find.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Find.java index 70a8c79a41..199038a751 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Find.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Find.java @@ -134,8 +134,8 @@ public int compare(Expression arg0, Expression arg1) { for (String line : HELP) { sb.append(line).append("\n"); } - sb.append("\n"); - sb.append("The following primary expressions are recognised:\n"); + sb.append("\n") + .append("The following primary expressions are recognised:\n"); for (Expression expr : primaries) { for (String line : expr.getUsage()) { sb.append(" ").append(line).append("\n"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java index aaf3ea1d9f..99c17acdd4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java @@ -220,8 +220,8 @@ public String toString() { StringBuilder buf = new StringBuilder(MD5_LEN*2); for (int i = 0; i < MD5_LEN; i++) { int b = digest[i]; - buf.append(HEX_DIGITS[(b >> 4) & 0xf]); - buf.append(HEX_DIGITS[b & 0xf]); + buf.append(HEX_DIGITS[(b >> 4) & 0xf]) + .append(HEX_DIGITS[b & 0xf]); } return buf.toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java index 9afa621892..fec0a4ac81 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java @@ -826,8 +826,8 @@ public String toString() { this.theMetadata.entrySet().iterator(); while (iter.hasNext()) { Map.Entry en = iter.next(); - sb.append("\t").append(en.getKey().toString()).append("\t").append(en.getValue().toString()); - sb.append("\n"); + sb.append("\t").append(en.getKey().toString()).append("\t") + .append(en.getValue().toString()).append("\n"); } return sb.toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java index e24812058e..1fa7fd4b52 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java @@ -85,15 +85,15 @@ public String toString() { buf.append("{ "); if (itr.hasNext()) { Map.Entry entry = itr.next(); - buf.append(entry.getKey()); - buf.append(": "); - buf.append(entry.getValue().getClass().getName()); + buf.append(entry.getKey()) + .append(": ") + .append(entry.getValue().getClass().getName()); while (itr.hasNext()) { entry = itr.next(); - buf.append(", "); - buf.append(entry.getKey()); - buf.append(": "); - buf.append(entry.getValue().getClass().getName()); + buf.append(", ") + .append(entry.getKey()) + .append(": ") + .append(entry.getValue().getClass().getName()); } } buf.append(" }"); @@ -161,8 +161,8 @@ public static void setCodecClasses(Configuration conf, Class cls = itr.next(); buf.append(cls.getName()); while(itr.hasNext()) { - buf.append(','); - buf.append(itr.next().getName()); + buf.append(',') + .append(itr.next().getName()); } } conf.set(CommonConfigurationKeys.IO_COMPRESSION_CODECS_KEY, buf.toString()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECSchema.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECSchema.java index 0f95058afc..f512203346 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECSchema.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ECSchema.java @@ -187,10 +187,10 @@ public int getNumParityUnits() { public String toString() { StringBuilder sb = new StringBuilder("ECSchema=["); - sb.append("Codec=" + codecName + ", "); - sb.append(NUM_DATA_UNITS_KEY + "=" + numDataUnits + ", "); - sb.append(NUM_PARITY_UNITS_KEY + "=" + numParityUnits); - sb.append((extraOptions.isEmpty() ? "" : ", ")); + sb.append("Codec=" + codecName + ", ") + .append(NUM_DATA_UNITS_KEY + "=" + numDataUnits + ", ") + .append(NUM_PARITY_UNITS_KEY + "=" + numParityUnits) + .append((extraOptions.isEmpty() ? "" : ", ")); int i = 0; for (Map.Entry entry : extraOptions.entrySet()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java index 2e3b5594e5..c590dbdaf2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java @@ -181,17 +181,17 @@ public void write(DataOutput out) throws IOException { @Override public String toString() { StringBuilder buffer = new StringBuilder(); - buffer.append(methodName); - buffer.append("("); + buffer.append(methodName) + .append("("); for (int i = 0; i < parameters.length; i++) { if (i != 0) buffer.append(", "); buffer.append(parameters[i]); } - buffer.append(")"); - buffer.append(", rpc version="+rpcVersion); - buffer.append(", client version="+clientVersion); - buffer.append(", methodsFingerPrint="+clientMethodsHash); + buffer.append(")") + .append(", rpc version="+rpcVersion) + .append(", client version="+clientVersion) + .append(", methodsFingerPrint="+clientMethodsHash); return buffer.toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java index de4c14d7af..ea1bde3a75 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java @@ -76,10 +76,10 @@ public void putMetrics(MetricsRecord record) { for (MetricsTag tag : record.tags()) { if (tag.value() != null) { - metricsPathPrefix.append("."); - metricsPathPrefix.append(tag.name()); - metricsPathPrefix.append("="); - metricsPathPrefix.append(tag.value()); + metricsPathPrefix.append(".") + .append(tag.name()) + .append("=") + .append(tag.value()); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/StatsDSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/StatsDSink.java index c1dbf7ec82..d1ec47fdec 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/StatsDSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/StatsDSink.java @@ -122,9 +122,9 @@ public void putMetrics(MetricsRecord record) { buf.append(hn.substring(0, idx)).append(PERIOD); } } - buf.append(sn).append(PERIOD); - buf.append(ctx).append(PERIOD); - buf.append(record.name().replaceAll("\\.", "-")).append(PERIOD); + buf.append(sn).append(PERIOD) + .append(ctx).append(PERIOD) + .append(record.name().replaceAll("\\.", "-")).append(PERIOD); // Collect datapoints. for (AbstractMetric metric : record.metrics()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java index 97723c4a37..f050219398 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/AbstractDNSToSwitchMapping.java @@ -124,8 +124,8 @@ public String dumpTopology() { .append("\n"); switches.add(entry.getValue()); } - builder.append("Nodes: ").append(rack.size()).append("\n"); - builder.append("Switches: ").append(switches.size()).append("\n"); + builder.append("Nodes: ").append(rack.size()).append("\n") + .append("Switches: ").append(switches.size()).append("\n"); } else { builder.append("No topology information"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java index ceb8ec2f7d..bd2ff7b43a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java @@ -850,8 +850,8 @@ private static String getHostDetailsAsString(final String destHost, StringBuilder hostDetails = new StringBuilder(27); hostDetails.append("local host is: ") .append(quoteHost(localHost)) - .append("; "); - hostDetails.append("destination host is: ").append(quoteHost(destHost)) + .append("; ") + .append("destination host is: ").append(quoteHost(destHost)) .append(":") .append(destPort).append("; "); return hostDetails.toString(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java index 5729f5f3e5..5ee19d62ee 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java @@ -710,18 +710,18 @@ public int countNumOfAvailableNodes(String scope, public String toString() { // print the number of racks StringBuilder tree = new StringBuilder(); - tree.append("Number of racks: "); - tree.append(numOfRacks); - tree.append("\n"); + tree.append("Number of racks: ") + .append(numOfRacks) + .append("\n"); // print the number of leaves int numOfLeaves = getNumOfLeaves(); - tree.append("Expected number of leaves:"); - tree.append(numOfLeaves); - tree.append("\n"); + tree.append("Expected number of leaves:") + .append(numOfLeaves) + .append("\n"); // print nodes for(int i=0; i'); + buf.append(" <") + .append(cn) + .append('>'); if (it.hasNext()) { buf.append(" OR"); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java index 22392beda2..95e00b5b72 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java @@ -456,11 +456,11 @@ private void identifierToString(StringBuilder buffer) { @Override public String toString() { StringBuilder buffer = new StringBuilder(); - buffer.append("Kind: "); - buffer.append(kind.toString()); - buffer.append(", Service: "); - buffer.append(service.toString()); - buffer.append(", Ident: "); + buffer.append("Kind: ") + .append(kind.toString()) + .append(", Service: ") + .append(service.toString()) + .append(", Ident: "); identifierToString(buffer); return buffer.toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/InterruptEscalator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/InterruptEscalator.java index a7e1edd007..594fc5bfe8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/InterruptEscalator.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/service/launcher/InterruptEscalator.java @@ -94,9 +94,9 @@ public String toString() { if (owner != null) { sb.append(", owner= ").append(owner.toString()); } - sb.append(", shutdownTimeMillis=").append(shutdownTimeMillis); - sb.append(", forcedShutdownTimedOut=").append(forcedShutdownTimedOut); - sb.append('}'); + sb.append(", shutdownTimeMillis=").append(shutdownTimeMillis) + .append(", forcedShutdownTimedOut=").append(forcedShutdownTimedOut) + .append('}'); return sb.toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java index da7830de6b..92cdb5835e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java @@ -69,8 +69,8 @@ public int run(String[] args) throws Exception { StringBuilder sb = new StringBuilder(); sb.append(username + " :"); for (String group : getUgmProtocol().getGroupsForUser(username)) { - sb.append(" "); - sb.append(group); + sb.append(" ") + .append(group); } out.println(sb); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java index 404eea9618..d49013ec14 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/BlockingThreadPoolExecutorService.java @@ -161,9 +161,9 @@ int getActiveCount() { public String toString() { final StringBuilder sb = new StringBuilder( "BlockingThreadPoolExecutorService{"); - sb.append(super.toString()); - sb.append(", activeCount=").append(getActiveCount()); - sb.append('}'); + sb.append(super.toString()) + .append(", activeCount=").append(getActiveCount()) + .append('}'); return sb.toString(); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CpuTimeTracker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CpuTimeTracker.java index 4355367e80..80d3c190f9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CpuTimeTracker.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/CpuTimeTracker.java @@ -106,12 +106,12 @@ public void updateElapsedJiffies(BigInteger elapsedJiffies, long newTime) { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("SampleTime " + this.sampleTime); - sb.append(" CummulativeCpuTime " + this.cumulativeCpuTime); - sb.append(" LastSampleTime " + this.lastSampleTime); - sb.append(" LastCummulativeCpuTime " + this.lastCumulativeCpuTime); - sb.append(" CpuUsage " + this.cpuUsage); - sb.append(" JiffyLengthMillisec " + this.jiffyLengthInMillis); + sb.append("SampleTime " + this.sampleTime) + .append(" CummulativeCpuTime " + this.cumulativeCpuTime) + .append(" LastSampleTime " + this.lastSampleTime) + .append(" LastCummulativeCpuTime " + this.lastCumulativeCpuTime) + .append(" CpuUsage " + this.cpuUsage) + .append(" JiffyLengthMillisec " + this.jiffyLengthInMillis); return sb.toString(); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SemaphoredDelegatingExecutor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SemaphoredDelegatingExecutor.java index bcc19e35e8..22ba52af3a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SemaphoredDelegatingExecutor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SemaphoredDelegatingExecutor.java @@ -173,10 +173,10 @@ public int getPermitCount() { public String toString() { final StringBuilder sb = new StringBuilder( "SemaphoredDelegatingExecutor{"); - sb.append("permitCount=").append(getPermitCount()); - sb.append(", available=").append(getAvailablePermits()); - sb.append(", waiting=").append(getWaitingCount()); - sb.append('}'); + sb.append("permitCount=").append(getPermitCount()) + .append(", available=").append(getAvailablePermits()) + .append(", waiting=").append(getWaitingCount()) + .append('}'); return sb.toString(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java index 46a0fccd41..e66c81b4b8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java @@ -147,9 +147,9 @@ public static void checkWindowsCommandLineLength(String...commands) */ static String bashQuote(String arg) { StringBuilder buffer = new StringBuilder(arg.length() + 2); - buffer.append('\''); - buffer.append(arg.replace("'", "'\\''")); - buffer.append('\''); + buffer.append('\'') + .append(arg.replace("'", "'\\''")) + .append('\''); return buffer.toString(); } @@ -964,8 +964,8 @@ public void run() { try { String line = errReader.readLine(); while((line != null) && !isInterrupted()) { - errMsg.append(line); - errMsg.append(System.getProperty("line.separator")); + errMsg.append(line) + .append(System.getProperty("line.separator")); line = errReader.readLine(); } } catch(IOException ioe) { @@ -1109,8 +1109,8 @@ public String toString() { final StringBuilder sb = new StringBuilder("ExitCodeException "); sb.append("exitCode=").append(exitCode) - .append(": "); - sb.append(super.getMessage()); + .append(": ") + .append(super.getMessage()); return sb.toString(); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java index 62338c9303..605352443e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SignalLogger.java @@ -84,8 +84,8 @@ void register(final LogAdapter LOG) { for (String signalName : SIGNALS) { try { new Handler(signalName, LOG); - bld.append(separator); - bld.append(signalName); + bld.append(separator) + .append(signalName); separator = ", "; } catch (Exception e) { LOG.debug(e); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/DynamicBloomFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/DynamicBloomFilter.java index 8a7ec6954c..b84a5f7ac2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/DynamicBloomFilter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/DynamicBloomFilter.java @@ -237,8 +237,8 @@ public String toString() { StringBuilder res = new StringBuilder(); for (int i = 0; i < matrix.length; i++) { - res.append(matrix[i]); - res.append(Character.LINE_SEPARATOR); + res.append(matrix[i]) + .append(Character.LINE_SEPARATOR); } return res.toString(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java index 52ed1d432a..f47b88cb5a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java @@ -972,19 +972,19 @@ private static String getBestNodeDNAddrPairErrorString( " No live nodes contain current block "); errMsgr.append("Block locations:"); for (DatanodeInfo datanode : nodes) { - errMsgr.append(" "); - errMsgr.append(datanode.toString()); + errMsgr.append(" ") + .append(datanode.toString()); } errMsgr.append(" Dead nodes: "); for (DatanodeInfo datanode : deadNodes.keySet()) { - errMsgr.append(" "); - errMsgr.append(datanode.toString()); + errMsgr.append(" ") + .append(datanode.toString()); } if (ignoredNodes != null) { errMsgr.append(" Ignored nodes: "); for (DatanodeInfo datanode : ignoredNodes) { - errMsgr.append(" "); - errMsgr.append(datanode.toString()); + errMsgr.append(" ") + .append(datanode.toString()); } } return errMsgr.toString(); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java index 0acccea097..77cb73831d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java @@ -519,10 +519,10 @@ private static String checkRpcAuxiliary(Configuration conf, String suffix, // localhost), then append port // TODO : revisit if there is a better way StringBuilder sb = new StringBuilder(); - sb.append(uri.getScheme()); - sb.append("://"); - sb.append(uri.getHost()); - sb.append(":"); + sb.append(uri.getScheme()) + .append("://") + .append(uri.getHost()) + .append(":"); // TODO : currently, only the very first auxiliary port is being used. // But actually NN supports running multiple auxiliary sb.append(ports[0]); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java index c140d06018..bba90a0579 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeInfo.java @@ -374,8 +374,8 @@ public String getDatanodeReport() { if (lookupName != null) { buffer.append(" (").append(lookupName).append(")"); } - buffer.append("\n"); - buffer.append("Hostname: ").append(getHostName()).append("\n"); + buffer.append("\n") + .append("Hostname: ").append(getHostName()).append("\n"); if (!NetworkTopology.DEFAULT_RACK.equals(location)) { buffer.append("Rack: ").append(location).append("\n"); @@ -396,35 +396,34 @@ public String getDatanodeReport() { buffer.append("Normal\n"); } buffer.append("Configured Capacity: ").append(c).append(" (") - .append(StringUtils.byteDesc(c)).append(")").append("\n"); - buffer.append("DFS Used: ").append(u).append(" (") - .append(StringUtils.byteDesc(u)).append(")").append("\n"); - buffer.append("Non DFS Used: ").append(nonDFSUsed).append(" (") - .append(StringUtils.byteDesc(nonDFSUsed)).append(")").append("\n"); - buffer.append("DFS Remaining: ").append(r).append(" (") - .append(StringUtils.byteDesc(r)).append(")").append("\n"); - buffer.append("DFS Used%: ").append(percent2String(usedPercent)) - .append("\n"); - buffer.append("DFS Remaining%: ").append(percent2String(remainingPercent)) - .append("\n"); - buffer.append("Configured Cache Capacity: ").append(cc).append(" (") - .append(StringUtils.byteDesc(cc)).append(")").append("\n"); - buffer.append("Cache Used: ").append(cu).append(" (") - .append(StringUtils.byteDesc(cu)).append(")").append("\n"); - buffer.append("Cache Remaining: ").append(cr).append(" (") - .append(StringUtils.byteDesc(cr)).append(")").append("\n"); - buffer.append("Cache Used%: ").append(percent2String(cacheUsedPercent)) - .append("\n"); - buffer.append("Cache Remaining%: ") - .append(percent2String(cacheRemainingPercent)).append("\n"); - buffer.append("Xceivers: ").append(getXceiverCount()).append("\n"); - buffer.append("Last contact: ").append(new Date(lastUpdate)).append("\n"); - buffer + .append(StringUtils.byteDesc(c)).append(")").append("\n") + .append("DFS Used: ").append(u).append(" (") + .append(StringUtils.byteDesc(u)).append(")").append("\n") + .append("Non DFS Used: ").append(nonDFSUsed).append(" (") + .append(StringUtils.byteDesc(nonDFSUsed)).append(")").append("\n") + .append("DFS Remaining: ").append(r).append(" (") + .append(StringUtils.byteDesc(r)).append(")").append("\n") + .append("DFS Used%: ").append(percent2String(usedPercent)) + .append("\n") + .append("DFS Remaining%: ").append(percent2String(remainingPercent)) + .append("\n") + .append("Configured Cache Capacity: ").append(cc).append(" (") + .append(StringUtils.byteDesc(cc)).append(")").append("\n") + .append("Cache Used: ").append(cu).append(" (") + .append(StringUtils.byteDesc(cu)).append(")").append("\n") + .append("Cache Remaining: ").append(cr).append(" (") + .append(StringUtils.byteDesc(cr)).append(")").append("\n") + .append("Cache Used%: ").append(percent2String(cacheUsedPercent)) + .append("\n") + .append("Cache Remaining%: ") + .append(percent2String(cacheRemainingPercent)).append("\n") + .append("Xceivers: ").append(getXceiverCount()).append("\n") + .append("Last contact: ").append(new Date(lastUpdate)).append("\n") .append("Last Block Report: ") .append( lastBlockReportTime != 0 ? new Date(lastBlockReportTime) : "Never") - .append("\n"); - buffer.append("Num of Blocks: ").append(blockCount).append("\n"); + .append("\n") + .append("Num of Blocks: ").append(blockCount).append("\n"); return buffer.toString(); } @@ -458,20 +457,20 @@ public String dumpDatanode() { buffer.append(" IN"); } buffer.append(" ").append(c).append("(").append(StringUtils.byteDesc(c)) - .append(")"); - buffer.append(" ").append(u).append("(").append(StringUtils.byteDesc(u)) - .append(")"); - buffer.append(" ").append(percent2String(usedPercent)); - buffer.append(" ").append(r).append("(").append(StringUtils.byteDesc(r)) - .append(")"); - buffer.append(" ").append(cc).append("(").append(StringUtils.byteDesc(cc)) - .append(")"); - buffer.append(" ").append(cu).append("(").append(StringUtils.byteDesc(cu)) - .append(")"); - buffer.append(" ").append(percent2String(cacheUsedPercent)); - buffer.append(" ").append(cr).append("(").append(StringUtils.byteDesc(cr)) - .append(")"); - buffer.append(" ").append(new Date(lastUpdate)); + .append(")") + .append(" ").append(u).append("(").append(StringUtils.byteDesc(u)) + .append(")") + .append(" ").append(percent2String(usedPercent)) + .append(" ").append(r).append("(").append(StringUtils.byteDesc(r)) + .append(")") + .append(" ").append(cc).append("(").append(StringUtils.byteDesc(cc)) + .append(")") + .append(" ").append(cu).append("(").append(StringUtils.byteDesc(cu)) + .append(")") + .append(" ").append(percent2String(cacheUsedPercent)) + .append(" ").append(cr).append("(").append(StringUtils.byteDesc(cr)) + .append(")") + .append(" ").append(new Date(lastUpdate)); return buffer.toString(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsPathHandle.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsPathHandle.java index a04aeebd98..7752b40fd5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsPathHandle.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsPathHandle.java @@ -111,8 +111,8 @@ public int hashCode() { @Override public String toString() { StringBuilder sb = new StringBuilder(); - sb.append("{ "); - sb.append("\"path\" : \"").append(path).append("\""); + sb.append("{ ") + .append("\"path\" : \"").append(path).append("\""); if (inodeId != null) { sb.append(",\"inodeId\" : ").append(inodeId); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ReencryptionStatus.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ReencryptionStatus.java index e83ab52ec6..dabeceacc3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ReencryptionStatus.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ReencryptionStatus.java @@ -200,12 +200,12 @@ public String toString() { StringBuilder sb = new StringBuilder(); for (Map.Entry entry : zoneStatuses .entrySet()) { - sb.append("[zone:" + entry.getKey()); - sb.append(" state:" + entry.getValue().getState()); - sb.append(" lastProcessed:" + entry.getValue().getLastCheckpointFile()); - sb.append(" filesReencrypted:" + entry.getValue().getFilesReencrypted()); - sb.append(" fileReencryptionFailures:" + entry.getValue() - .getNumReencryptionFailures() + "]"); + sb.append("[zone:" + entry.getKey()) + .append(" state:" + entry.getValue().getState()) + .append(" lastProcessed:" + entry.getValue().getLastCheckpointFile()) + .append(" filesReencrypted:" + entry.getValue().getFilesReencrypted()) + .append(" fileReencryptionFailures:" + entry.getValue() + .getNumReencryptionFailures() + "]"); } return sb.toString(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/StripedBlockUtil.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/StripedBlockUtil.java index 22457570e4..7251c7b67f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/StripedBlockUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/StripedBlockUtil.java @@ -109,11 +109,11 @@ public int getNetworkDistance() { @Override public String toString() { final StringBuilder sb = new StringBuilder(); - sb.append("bytesRead=").append(bytesRead); - sb.append(','); - sb.append("isShortCircuit=").append(isShortCircuit); - sb.append(','); - sb.append("networkDistance=").append(networkDistance); + sb.append("bytesRead=").append(bytesRead) + .append(',') + .append("isShortCircuit=").append(isShortCircuit) + .append(',') + .append("networkDistance=").append(networkDistance); return sb.toString(); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/PathLocation.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/PathLocation.java index 945d81df51..cb04ff8f93 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/PathLocation.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/PathLocation.java @@ -162,9 +162,9 @@ public String toString() { sb.append(nsId + "->" + path); } if (this.destinations.size() > 1) { - sb.append(" ["); - sb.append(this.destOrder.toString()); - sb.append("]"); + sb.append(" [") + .append(this.destOrder.toString()) + .append("]"); } return sb.toString(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionContext.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionContext.java index 7e779b5b1b..02a3dbeb4e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionContext.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionContext.java @@ -116,11 +116,11 @@ public String toString() { Class clazz = proxy.getClass(); StringBuilder sb = new StringBuilder(); - sb.append(clazz.getSimpleName()); - sb.append("@"); - sb.append(addr); - sb.append("x"); - sb.append(numThreads); + sb.append(clazz.getSimpleName()) + .append("@") + .append(addr) + .append("x") + .append(numThreads); if (closed) { sb.append("[CLOSED]"); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterQuotaUsage.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterQuotaUsage.java index e4728f5740..de9119aed4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterQuotaUsage.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterQuotaUsage.java @@ -110,8 +110,8 @@ public String toString() { StringBuilder str = new StringBuilder(); str.append("[NsQuota: ").append(nsQuota).append("/") - .append(nsCount); - str.append(", SsQuota: ").append(ssQuota) + .append(nsCount) + .append(", SsQuota: ").append(ssQuota) .append("/").append(ssCount) .append("]"); return str.toString(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java index b326a7554f..740f9ca0f7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java @@ -1900,8 +1900,7 @@ int computeReconstructionWorkForBlocks( if (targets != null && targets.length != 0) { StringBuilder targetList = new StringBuilder("datanode(s)"); for (DatanodeStorageInfo target : targets) { - targetList.append(' '); - targetList.append(target.getDatanodeDescriptor()); + targetList.append(' ').append(target.getDatanodeDescriptor()); } blockLog.debug("BLOCK* ask {} to replicate {} to {}", rw.getSrcNodes(), rw.getBlock(), targetList); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeAdminManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeAdminManager.java index abc0f7c331..f2ae4dfbf7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeAdminManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeAdminManager.java @@ -395,8 +395,7 @@ private void logBlockReplicationInfo(BlockInfo block, StringBuilder nodeList = new StringBuilder(); for (DatanodeStorageInfo storage : storages) { final DatanodeDescriptor node = storage.getDatanodeDescriptor(); - nodeList.append(node); - nodeList.append(' '); + nodeList.append(node).append(' '); } NameNode.blockStateChangeLog.info( "Block: " + block + ", Expected Replicas: " diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index 7c9e9cbaf6..e926b6a498 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -2323,8 +2323,7 @@ void transferBlock(ExtendedBlock block, DatanodeInfo[] xferTargets, if (numTargets > 0) { StringBuilder xfersBuilder = new StringBuilder(); for (int i = 0; i < numTargets; i++) { - xfersBuilder.append(xferTargets[i]); - xfersBuilder.append(" "); + xfersBuilder.append(xferTargets[i]).append(" "); } LOG.info(bpReg + " Starting thread to transfer " + block + " to " + xfersBuilder); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java index e0afb9b1f1..34552d2941 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/VolumeScanner.java @@ -228,15 +228,15 @@ public void printStats(StringBuilder p) { " path %s%n", volume.getStorageID(), volume)); synchronized (stats) { p.append(String.format("Bytes verified in last hour : %57d%n", - stats.bytesScannedInPastHour)); - p.append(String.format("Blocks scanned in current period : %57d%n", - stats.blocksScannedInCurrentPeriod)); - p.append(String.format("Blocks scanned since restart : %57d%n", - stats.blocksScannedSinceRestart)); - p.append(String.format("Block pool scans since restart : %57d%n", - stats.scansSinceRestart)); - p.append(String.format("Block scan errors since restart : %57d%n", - stats.scanErrorsSinceRestart)); + stats.bytesScannedInPastHour)) + .append(String.format("Blocks scanned in current period : %57d%n", + stats.blocksScannedInCurrentPeriod)) + .append(String.format("Blocks scanned since restart : %57d%n", + stats.blocksScannedSinceRestart)) + .append(String.format("Block pool scans since restart : %57d%n", + stats.scansSinceRestart)) + .append(String.format("Block scan errors since restart : %57d%n", + stats.scanErrorsSinceRestart)); if (stats.nextBlockPoolScanStartMs > 0) { p.append(String.format("Hours until next block pool scan : %57.3f%n", positiveMsToHours(stats.nextBlockPoolScanStartMs - diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/PlanCommand.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/PlanCommand.java index dab9559512..ebcbb4c2fa 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/PlanCommand.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/PlanCommand.java @@ -176,8 +176,7 @@ public void execute(CommandLine cmd) throws Exception { final String errMsg = "Errors while recording the output of plan command."; LOG.error(errMsg, e); - result.appendln(errMsg); - result.appendln(Throwables.getStackTraceAsString(e)); + result.appendln(errMsg).appendln(Throwables.getStackTraceAsString(e)); } getPrintStream().print(result.toString()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java index 8fa9578725..77349b2c93 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java @@ -499,11 +499,8 @@ void checkMoveValidity(INodesInPath srcIIP, INodesInPath dstIIP) final String srcEZPath = getFullPathName(srcParentEZI.getINodeId()); final String dstEZPath = getFullPathName(dstParentEZI.getINodeId()); final StringBuilder sb = new StringBuilder(srcIIP.getPath()); - sb.append(" can't be moved from encryption zone "); - sb.append(srcEZPath); - sb.append(" to encryption zone "); - sb.append(dstEZPath); - sb.append("."); + sb.append(" can't be moved from encryption zone ").append(srcEZPath) + .append(" to encryption zone ").append(dstEZPath).append("."); throw new IOException(sb.toString()); } checkMoveValidityForReencryption(srcIIP.getPath(), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java index 56aa927f81..cc1dcba8e3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java @@ -762,16 +762,16 @@ private void printStatistics(boolean force) { } lastPrintTime = now; StringBuilder buf = new StringBuilder(); - buf.append("Number of transactions: "); - buf.append(numTransactions); - buf.append(" Total time for transactions(ms): "); - buf.append(totalTimeTransactions); - buf.append(" Number of transactions batched in Syncs: "); - buf.append(numTransactionsBatchedInSync.get()); - buf.append(" Number of syncs: "); - buf.append(editLogStream.getNumSync()); - buf.append(" SyncTimes(ms): "); - buf.append(journalSet.getSyncTimes()); + buf.append("Number of transactions: ") + .append(numTransactions) + .append(" Total time for transactions(ms): ") + .append(totalTimeTransactions) + .append(" Number of transactions batched in Syncs: ") + .append(numTransactionsBatchedInSync.get()) + .append(" Number of syncs: ") + .append(editLogStream.getNumSync()) + .append(" SyncTimes(ms): ") + .append(journalSet.getSyncTimes()); LOG.info(buf.toString()); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java index 6755487ec5..44ad9d538e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java @@ -1055,8 +1055,8 @@ private long applyEditLogOp(FSEditLogOp op, FSDirectory fsDir, private static String formatEditLogReplayError(EditLogInputStream in, long recentOpcodeOffsets[], long txid) { StringBuilder sb = new StringBuilder(); - sb.append("Error replaying edit log at offset " + in.getPosition()); - sb.append(". Expected transaction ID was ").append(txid); + sb.append("Error replaying edit log at offset " + in.getPosition()) + .append(". Expected transaction ID was ").append(txid); if (recentOpcodeOffsets[0] != -1) { Arrays.sort(recentOpcodeOffsets); sb.append("\nRecent opcode offsets:"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java index 8293a82db9..b93dcb7d5a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java @@ -314,10 +314,10 @@ void readRpcIdsFromXml(Stanza st) { private static void appendRpcIdsToString(final StringBuilder builder, final byte[] clientId, final int callId) { - builder.append(", RpcClientId="); - builder.append(ClientId.toString(clientId)); - builder.append(", RpcCallId="); - builder.append(callId); + builder.append(", RpcClientId=") + .append(ClientId.toString(clientId)) + .append(", RpcCallId=") + .append(callId); } private static void appendRpcIdsToXml(ContentHandler contentHandler, @@ -682,44 +682,44 @@ private static Block[] readBlocks( public String stringifyMembers() { StringBuilder builder = new StringBuilder(); - builder.append("[length="); - builder.append(length); - builder.append(", inodeId="); - builder.append(inodeId); - builder.append(", path="); - builder.append(path); - builder.append(", replication="); - builder.append(replication); - builder.append(", mtime="); - builder.append(mtime); - builder.append(", atime="); - builder.append(atime); - builder.append(", blockSize="); - builder.append(blockSize); - builder.append(", blocks="); - builder.append(Arrays.toString(blocks)); - builder.append(", permissions="); - builder.append(permissions); - builder.append(", aclEntries="); - builder.append(aclEntries); - builder.append(", clientName="); - builder.append(clientName); - builder.append(", clientMachine="); - builder.append(clientMachine); - builder.append(", overwrite="); - builder.append(overwrite); + builder.append("[length=") + .append(length) + .append(", inodeId=") + .append(inodeId) + .append(", path=") + .append(path) + .append(", replication=") + .append(replication) + .append(", mtime=") + .append(mtime) + .append(", atime=") + .append(atime) + .append(", blockSize=") + .append(blockSize) + .append(", blocks=") + .append(Arrays.toString(blocks)) + .append(", permissions=") + .append(permissions) + .append(", aclEntries=") + .append(aclEntries) + .append(", clientName=") + .append(clientName) + .append(", clientMachine=") + .append(clientMachine) + .append(", overwrite=") + .append(overwrite); if (this.opCode == OP_ADD) { appendRpcIdsToString(builder, rpcClientId, rpcCallId); } - builder.append(", storagePolicyId="); - builder.append(storagePolicyId); - builder.append(", erasureCodingPolicyId="); - builder.append(erasureCodingPolicyId); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append(", storagePolicyId=") + .append(storagePolicyId) + .append(", erasureCodingPolicyId=") + .append(erasureCodingPolicyId) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -809,8 +809,8 @@ public boolean shouldCompleteLastBlock() { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("AddOp "); - builder.append(stringifyMembers()); + builder.append("AddOp ") + .append(stringifyMembers()); return builder.toString(); } } @@ -837,8 +837,8 @@ public boolean shouldCompleteLastBlock() { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("CloseOp "); - builder.append(stringifyMembers()); + builder.append("CloseOp ") + .append(stringifyMembers()); return builder.toString(); } } @@ -880,11 +880,11 @@ AppendOp setNewBlock(boolean newBlock) { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("AppendOp "); - builder.append("[path=").append(path); - builder.append(", clientName=").append(clientName); - builder.append(", clientMachine=").append(clientMachine); - builder.append(", newBlock=").append(newBlock).append("]"); + builder.append("AppendOp ") + .append("[path=").append(path) + .append(", clientName=").append(clientName) + .append(", clientMachine=").append(clientMachine) + .append(", newBlock=").append(newBlock).append("]"); return builder.toString(); } @@ -1010,11 +1010,11 @@ void readFields(DataInputStream in, int logVersion) throws IOException { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("AddBlockOp [path=") - .append(path) - .append(", penultimateBlock=") - .append(penultimateBlock == null ? "NULL" : penultimateBlock) - .append(", lastBlock=") - .append(lastBlock); + .append(path) + .append(", penultimateBlock=") + .append(penultimateBlock == null ? "NULL" : penultimateBlock) + .append(", lastBlock=") + .append(lastBlock); appendRpcIdsToString(sb, rpcClientId, rpcCallId); sb.append("]"); return sb.toString(); @@ -1191,15 +1191,15 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("SetReplicationOp [path="); - builder.append(path); - builder.append(", replication="); - builder.append(replication); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("SetReplicationOp [path=") + .append(path) + .append(", replication=") + .append(replication) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -1323,20 +1323,20 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("ConcatDeleteOp [length="); - builder.append(length); - builder.append(", trg="); - builder.append(trg); - builder.append(", srcs="); - builder.append(Arrays.toString(srcs)); - builder.append(", timestamp="); - builder.append(timestamp); + builder.append("ConcatDeleteOp [length=") + .append(length) + .append(", trg=") + .append(trg) + .append(", srcs=") + .append(Arrays.toString(srcs)) + .append(", timestamp=") + .append(timestamp); appendRpcIdsToString(builder, rpcClientId, rpcCallId); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -1449,20 +1449,20 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("RenameOldOp [length="); - builder.append(length); - builder.append(", src="); - builder.append(src); - builder.append(", dst="); - builder.append(dst); - builder.append(", timestamp="); - builder.append(timestamp); + builder.append("RenameOldOp [length=") + .append(length) + .append(", src=") + .append(src) + .append(", dst=") + .append(dst) + .append(", timestamp=") + .append(timestamp); appendRpcIdsToString(builder, rpcClientId, rpcCallId); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -1551,18 +1551,18 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("DeleteOp [length="); - builder.append(length); - builder.append(", path="); - builder.append(path); - builder.append(", timestamp="); - builder.append(timestamp); + builder.append("DeleteOp [length=") + .append(length) + .append(", path=") + .append(path) + .append(", timestamp=") + .append(timestamp); appendRpcIdsToString(builder, rpcClientId, rpcCallId); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -1707,25 +1707,25 @@ void readFields(DataInputStream in, int logVersion) throws IOException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("MkdirOp [length="); - builder.append(length); - builder.append(", inodeId="); - builder.append(inodeId); - builder.append(", path="); - builder.append(path); - builder.append(", timestamp="); - builder.append(timestamp); - builder.append(", permissions="); - builder.append(permissions); - builder.append(", aclEntries="); - builder.append(aclEntries); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append(", xAttrs="); - builder.append(xAttrs); - builder.append("]"); + builder.append("MkdirOp [length=") + .append(length) + .append(", inodeId=") + .append(inodeId) + .append(", path=") + .append(path) + .append(", timestamp=") + .append(timestamp) + .append(", permissions=") + .append(permissions) + .append(", aclEntries=") + .append(aclEntries) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append(", xAttrs=") + .append(xAttrs) + .append("]"); return builder.toString(); } @@ -1801,13 +1801,13 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("SetGenstampOp [GenStamp="); - builder.append(genStampV1); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("SetGenstampOp [GenStamp=") + .append(genStampV1) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -1859,13 +1859,13 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("SetGenstampV2Op [GenStampV2="); - builder.append(genStampV2); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("SetGenstampV2Op [GenStampV2=") + .append(genStampV2) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -1917,13 +1917,13 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("AllocateBlockIdOp [blockId="); - builder.append(blockId); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("AllocateBlockIdOp [blockId=") + .append(blockId) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -1984,15 +1984,15 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("SetPermissionsOp [src="); - builder.append(src); - builder.append(", permissions="); - builder.append(permissions); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("SetPermissionsOp [src=") + .append(src) + .append(", permissions=") + .append(permissions) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -2065,17 +2065,17 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("SetOwnerOp [src="); - builder.append(src); - builder.append(", username="); - builder.append(username); - builder.append(", groupname="); - builder.append(groupname); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("SetOwnerOp [src=") + .append(src) + .append(", username=") + .append(username) + .append(", groupname=") + .append(groupname) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -2133,15 +2133,15 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("SetNSQuotaOp [src="); - builder.append(src); - builder.append(", nsQuota="); - builder.append(nsQuota); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("SetNSQuotaOp [src=") + .append(src) + .append(", nsQuota=") + .append(nsQuota) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -2189,13 +2189,13 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("ClearNSQuotaOp [src="); - builder.append(src); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("ClearNSQuotaOp [src=") + .append(src) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -2264,17 +2264,17 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("SetQuotaOp [src="); - builder.append(src); - builder.append(", nsQuota="); - builder.append(nsQuota); - builder.append(", dsQuota="); - builder.append(dsQuota); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("SetQuotaOp [src=") + .append(src) + .append(", nsQuota=") + .append(nsQuota) + .append(", dsQuota=") + .append(dsQuota) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -2345,17 +2345,17 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("SetTypeQuotaOp [src="); - builder.append(src); - builder.append(", storageType="); - builder.append(type); - builder.append(", dsQuota="); - builder.append(dsQuota); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("SetTypeQuotaOp [src=") + .append(src) + .append(", storageType=") + .append(type) + .append(", dsQuota=") + .append(dsQuota) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -2447,18 +2447,18 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("TimesOp [length="); - builder.append(length); - builder.append(", path="); - builder.append(path); - builder.append(", mtime="); - builder.append(mtime); - builder.append(", atime="); - builder.append(atime); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); + builder.append("TimesOp [length=") + .append(length) + .append(", path=") + .append(path) + .append(", mtime=") + .append(mtime) + .append(", atime=") + .append(atime) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid); builder.append("]"); return builder.toString(); } @@ -2590,26 +2590,26 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("SymlinkOp [length="); - builder.append(length); - builder.append(", inodeId="); - builder.append(inodeId); - builder.append(", path="); - builder.append(path); - builder.append(", value="); - builder.append(value); - builder.append(", mtime="); - builder.append(mtime); - builder.append(", atime="); - builder.append(atime); - builder.append(", permissionStatus="); - builder.append(permissionStatus); + builder.append("SymlinkOp [length=") + .append(length) + .append(", inodeId=") + .append(inodeId) + .append(", path=") + .append(path) + .append(", value=") + .append(value) + .append(", mtime=") + .append(mtime) + .append(", atime=") + .append(atime) + .append(", permissionStatus=") + .append(permissionStatus); appendRpcIdsToString(builder, rpcClientId, rpcCallId); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -2748,22 +2748,22 @@ static BytesWritable toBytesWritable(Rename... options) { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("RenameOp [length="); - builder.append(length); - builder.append(", src="); - builder.append(src); - builder.append(", dst="); - builder.append(dst); - builder.append(", timestamp="); - builder.append(timestamp); - builder.append(", options="); - builder.append(Arrays.toString(options)); + builder.append("RenameOp [length=") + .append(length) + .append(", src=") + .append(src) + .append(", dst=") + .append(dst) + .append(", timestamp=") + .append(timestamp) + .append(", options=") + .append(Arrays.toString(options)); appendRpcIdsToString(builder, rpcClientId, rpcCallId); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -2918,23 +2918,23 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("TruncateOp [src="); - builder.append(src); - builder.append(", clientName="); - builder.append(clientName); - builder.append(", clientMachine="); - builder.append(clientMachine); - builder.append(", newLength="); - builder.append(newLength); - builder.append(", timestamp="); - builder.append(timestamp); - builder.append(", truncateBlock="); - builder.append(truncateBlock); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("TruncateOp [src=") + .append(src) + .append(", clientName=") + .append(clientName) + .append(", clientMachine=") + .append(clientMachine) + .append(", newLength=") + .append(newLength) + .append(", timestamp=") + .append(timestamp) + .append(", truncateBlock=") + .append(truncateBlock) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } } @@ -2998,16 +2998,16 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("ReassignLeaseOp [leaseHolder="); - builder.append(leaseHolder); - builder.append(", path="); - builder.append(path); - builder.append(", newHolder="); - builder.append(newHolder); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); + builder.append("ReassignLeaseOp [leaseHolder=") + .append(leaseHolder) + .append(", path=") + .append(path) + .append(", newHolder=") + .append(newHolder) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid); builder.append("]"); return builder.toString(); } @@ -3079,15 +3079,15 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("GetDelegationTokenOp [token="); - builder.append(token); - builder.append(", expiryTime="); - builder.append(expiryTime); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("GetDelegationTokenOp [token=") + .append(token) + .append(", expiryTime=") + .append(expiryTime) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -3158,15 +3158,15 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("RenewDelegationTokenOp [token="); - builder.append(token); - builder.append(", expiryTime="); - builder.append(expiryTime); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("RenewDelegationTokenOp [token=") + .append(token) + .append(", expiryTime=") + .append(expiryTime) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -3223,13 +3223,13 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("CancelDelegationTokenOp [token="); - builder.append(token); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("CancelDelegationTokenOp [token=") + .append(token) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -3281,13 +3281,13 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("UpdateMasterKeyOp [key="); - builder.append(key); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("UpdateMasterKeyOp [key=") + .append(key) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -3334,11 +3334,11 @@ void writeFields(DataOutputStream out) throws IOException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("LogSegmentOp [opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("LogSegmentOp [opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @@ -3391,11 +3391,11 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("InvalidOp [opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("InvalidOp [opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } @Override @@ -3474,10 +3474,10 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("CreateSnapshotOp [snapshotRoot="); - builder.append(snapshotRoot); - builder.append(", snapshotName="); - builder.append(snapshotName); + builder.append("CreateSnapshotOp [snapshotRoot=") + .append(snapshotRoot) + .append(", snapshotName=") + .append(snapshotName); appendRpcIdsToString(builder, rpcClientId, rpcCallId); builder.append("]"); return builder.toString(); @@ -3550,10 +3550,10 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("DeleteSnapshotOp [snapshotRoot="); - builder.append(snapshotRoot); - builder.append(", snapshotName="); - builder.append(snapshotName); + builder.append("DeleteSnapshotOp [snapshotRoot=") + .append(snapshotRoot) + .append(", snapshotName=") + .append(snapshotName); appendRpcIdsToString(builder, rpcClientId, rpcCallId); builder.append("]"); return builder.toString(); @@ -3638,12 +3638,12 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("RenameSnapshotOp [snapshotRoot="); - builder.append(snapshotRoot); - builder.append(", snapshotOldName="); - builder.append(snapshotOldName); - builder.append(", snapshotNewName="); - builder.append(snapshotNewName); + builder.append("RenameSnapshotOp [snapshotRoot=") + .append(snapshotRoot) + .append(", snapshotOldName=") + .append(snapshotOldName) + .append(", snapshotNewName=") + .append(snapshotNewName); appendRpcIdsToString(builder, rpcClientId, rpcCallId); builder.append("]"); return builder.toString(); @@ -3702,9 +3702,9 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("AllowSnapshotOp [snapshotRoot="); - builder.append(snapshotRoot); - builder.append("]"); + builder.append("AllowSnapshotOp [snapshotRoot=") + .append(snapshotRoot) + .append("]"); return builder.toString(); } } @@ -3760,9 +3760,9 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("DisallowSnapshotOp [snapshotRoot="); - builder.append(snapshotRoot); - builder.append("]"); + builder.append("DisallowSnapshotOp [snapshotRoot=") + .append(snapshotRoot) + .append("]"); return builder.toString(); } } @@ -3825,12 +3825,12 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("AddCacheDirectiveInfo ["); - builder.append("id=" + directive.getId() + ","); - builder.append("path=" + directive.getPath().toUri().getPath() + ","); - builder.append("replication=" + directive.getReplication() + ","); - builder.append("pool=" + directive.getPool() + ","); - builder.append("expiration=" + directive.getExpiration().getMillis()); + builder.append("AddCacheDirectiveInfo [") + .append("id=" + directive.getId() + ",") + .append("path=" + directive.getPath().toUri().getPath() + ",") + .append("replication=" + directive.getReplication() + ",") + .append("pool=" + directive.getPool() + ",") + .append("expiration=" + directive.getExpiration().getMillis()); appendRpcIdsToString(builder, rpcClientId, rpcCallId); builder.append("]"); return builder.toString(); @@ -3891,8 +3891,8 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("ModifyCacheDirectiveInfoOp["); - builder.append("id=").append(directive.getId()); + builder.append("ModifyCacheDirectiveInfoOp[") + .append("id=").append(directive.getId()); if (directive.getPath() != null) { builder.append(",").append("path=").append(directive.getPath()); } @@ -3965,8 +3965,8 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("RemoveCacheDirectiveInfo ["); - builder.append("id=" + Long.toString(id)); + builder.append("RemoveCacheDirectiveInfo [") + .append("id=" + Long.toString(id)); appendRpcIdsToString(builder, rpcClientId, rpcCallId); builder.append("]"); return builder.toString(); @@ -4027,12 +4027,12 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("AddCachePoolOp ["); - builder.append("poolName=" + info.getPoolName() + ","); - builder.append("ownerName=" + info.getOwnerName() + ","); - builder.append("groupName=" + info.getGroupName() + ","); - builder.append("mode=" + Short.toString(info.getMode().toShort()) + ","); - builder.append("limit=" + Long.toString(info.getLimit())); + builder.append("AddCachePoolOp [") + .append("poolName=" + info.getPoolName() + ",") + .append("ownerName=" + info.getOwnerName() + ",") + .append("groupName=" + info.getGroupName() + ",") + .append("mode=" + Short.toString(info.getMode().toShort()) + ",") + .append("limit=" + Long.toString(info.getLimit())); appendRpcIdsToString(builder, rpcClientId, rpcCallId); builder.append("]"); return builder.toString(); @@ -4161,8 +4161,8 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("RemoveCachePoolOp ["); - builder.append("poolName=" + poolName); + builder.append("RemoveCachePoolOp [") + .append("poolName=" + poolName); appendRpcIdsToString(builder, rpcClientId, rpcCallId); builder.append("]"); return builder.toString(); @@ -4478,8 +4478,8 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("AddErasureCodingPolicy ["); - builder.append(ecPolicy.toString()); + builder.append("AddErasureCodingPolicy [") + .append(ecPolicy.toString()); appendRpcIdsToString(builder, rpcClientId, rpcCallId); builder.append("]"); @@ -4547,8 +4547,8 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("EnableErasureCodingPolicy ["); - builder.append(ecPolicyName); + builder.append("EnableErasureCodingPolicy [") + .append(ecPolicyName); appendRpcIdsToString(builder, rpcClientId, rpcCallId); builder.append("]"); @@ -4614,8 +4614,8 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("DisableErasureCodingPolicy ["); - builder.append(ecPolicyName); + builder.append("DisableErasureCodingPolicy [") + .append(ecPolicyName); appendRpcIdsToString(builder, rpcClientId, rpcCallId); builder.append("]"); @@ -4681,8 +4681,8 @@ void fromXml(Stanza st) throws InvalidXmlException { @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("RemoveErasureCodingPolicy ["); - builder.append(ecPolicyName); + builder.append("RemoveErasureCodingPolicy [") + .append(ecPolicyName); appendRpcIdsToString(builder, rpcClientId, rpcCallId); builder.append("]"); @@ -4792,15 +4792,15 @@ void readFields(DataInputStream in, int logVersion) @Override public String toString() { StringBuilder builder = new StringBuilder(); - builder.append("SetStoragePolicyOp [path="); - builder.append(path); - builder.append(", policyId="); - builder.append(policyId); - builder.append(", opCode="); - builder.append(opCode); - builder.append(", txid="); - builder.append(txid); - builder.append("]"); + builder.append("SetStoragePolicyOp [path=") + .append(path) + .append(", policyId=") + .append(policyId) + .append(", opCode=") + .append(opCode) + .append(", txid=") + .append(txid) + .append("]"); return builder.toString(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index 40e42c4d1e..4cedcd88f9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java @@ -8031,19 +8031,19 @@ public void logAuditEvent(boolean succeeded, String userName, src = escapeJava(src); dst = escapeJava(dst); sb.setLength(0); - sb.append("allowed=").append(succeeded).append("\t"); - sb.append("ugi=").append(userName).append("\t"); - sb.append("ip=").append(addr).append("\t"); - sb.append("cmd=").append(cmd).append("\t"); - sb.append("src=").append(src).append("\t"); - sb.append("dst=").append(dst).append("\t"); + sb.append("allowed=").append(succeeded).append("\t") + .append("ugi=").append(userName).append("\t") + .append("ip=").append(addr).append("\t") + .append("cmd=").append(cmd).append("\t") + .append("src=").append(src).append("\t") + .append("dst=").append(dst).append("\t"); if (null == status) { sb.append("perm=null"); } else { - sb.append("perm="); - sb.append(status.getOwner()).append(":"); - sb.append(status.getGroup()).append(":"); - sb.append(status.getPermission()); + sb.append("perm=") + .append(status.getOwner()).append(":") + .append(status.getGroup()).append(":") + .append(status.getPermission()); } if (logTokenTrackingId) { sb.append("\t").append("trackingId="); @@ -8061,8 +8061,8 @@ public void logAuditEvent(boolean succeeded, String userName, } sb.append(trackingId); } - sb.append("\t").append("proto="); - sb.append(Server.getProtocol()); + sb.append("\t").append("proto=") + .append(Server.getProtocol()); if (isCallerContextEnabled && callerContext != null && callerContext.isContextValid()) { @@ -8076,8 +8076,8 @@ public void logAuditEvent(boolean succeeded, String userName, if (callerContext.getSignature() != null && callerContext.getSignature().length > 0 && callerContext.getSignature().length <= callerSignatureMaxLen) { - sb.append(":"); - sb.append(new String(callerContext.getSignature(), + sb.append(":") + .append(new String(callerContext.getSignature(), CallerContext.SIGNATURE_ENCODING)); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java index 4ab0828ca8..a5df9f53b9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/JournalSet.java @@ -697,8 +697,8 @@ String getSyncTimes() { StringBuilder buf = new StringBuilder(); for (JournalAndStream jas : journals) { if (jas.isActive()) { - buf.append(jas.getCurrentStream().getTotalSyncTime()); - buf.append(" "); + buf.append(jas.getCurrentStream().getTotalSyncTime()) + .append(" "); } } return buf.toString(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java index 095a6ffffc..9c39d8648d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java @@ -855,11 +855,11 @@ private void collectBlocksSummary(String parent, HdfsFileStatus file, block.getLocalBlock()); DatanodeStorageInfo[] storages = storedBlock .getUnderConstructionFeature().getExpectedStorageLocations(); - report.append('\n'); - report.append("Under Construction Block:\n"); - report.append(blockNumber).append(". ").append(blkName); - report.append(" len=").append(block.getNumBytes()); - report.append(" Expected_repl=" + storages.length); + report.append('\n') + .append("Under Construction Block:\n") + .append(blockNumber).append(". ").append(blkName) + .append(" len=").append(block.getNumBytes()) + .append(" Expected_repl=" + storages.length); String info=getReplicaInfo(storedBlock); if (!info.isEmpty()){ report.append(" ").append(info); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java index 39ce2dcc81..934c3169fc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/QuotaByStorageTypeEntry.java @@ -54,9 +54,9 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(); assert (type != null); - sb.append(StringUtils.toLowerCase(type.toString())); - sb.append(':'); - sb.append(quota); + sb.append(StringUtils.toLowerCase(type.toString())) + .append(':') + .append(quota); return sb.toString(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java index 19e046dfc1..edda691fbb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RedundantEditLogInputStream.java @@ -140,8 +140,8 @@ public String getName() { StringBuilder bld = new StringBuilder(); String prefix = ""; for (EditLogInputStream elis : streams) { - bld.append(prefix); - bld.append(elis.getName()); + bld.append(prefix) + .append(elis.getName()); prefix = ", "; } return bld.toString(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StoragePolicySummary.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StoragePolicySummary.java index bcdad355ac..a13032de1c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StoragePolicySummary.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StoragePolicySummary.java @@ -83,8 +83,10 @@ public int compare(Entry o1, public String toString() { StringBuilder compliantBlocksSB = new StringBuilder(); - compliantBlocksSB.append("\nBlocks satisfying the specified storage policy:"); - compliantBlocksSB.append("\nStorage Policy # of blocks % of blocks\n"); + compliantBlocksSB + .append("\nBlocks satisfying the specified storage policy:") + .append("\nStorage Policy" + + " # of blocks % of blocks\n"); StringBuilder nonCompliantBlocksSB = new StringBuilder(); Formatter compliantFormatter = new Formatter(compliantBlocksSB); Formatter nonCompliantFormatter = new Formatter(nonCompliantBlocksSB); @@ -103,9 +105,10 @@ public String toString() { percentFormat.format(percent)); } else { if (nonCompliantBlocksSB.length() == 0) { - nonCompliantBlocksSB.append("\nBlocks NOT satisfying the specified storage policy:"); - nonCompliantBlocksSB.append("\nStorage Policy "); - nonCompliantBlocksSB.append( + nonCompliantBlocksSB + .append("\nBlocks NOT satisfying the specified storage policy:") + .append("\nStorage Policy ") + .append( "Specified Storage Policy # of blocks % of blocks\n"); } nonCompliantFormatter.format("%-35s %-20s %10d %20s%n", diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java index eed9a6e380..2d14cd6eb1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/ServerCommand.java @@ -55,9 +55,9 @@ public int getAction() { public String toString() { final StringBuilder sb = new StringBuilder(); - sb.append(getClass().getSimpleName()); - sb.append("/"); - sb.append(action); + sb.append(getClass().getSimpleName()) + .append("/") + .append(action); return sb.toString(); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSZKFailoverController.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSZKFailoverController.java index 5ae535a2c4..96723da7c7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSZKFailoverController.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSZKFailoverController.java @@ -241,8 +241,8 @@ private void getLocalNNThreadDump() { IOUtils.copyBytes(conn.getInputStream(), out, 4096, true); StringBuilder localNNThreadDumpContent = new StringBuilder("-- Local NN thread dump -- \n"); - localNNThreadDumpContent.append(out); - localNNThreadDumpContent.append("\n -- Local NN thread dump -- "); + localNNThreadDumpContent.append(out) + .append("\n -- Local NN thread dump -- "); LOG.info("{}", localNNThreadDumpContent.toString()); isThreadDumpCaptured = true; } catch (IOException e) { diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java index e8e5ed8234..9f6f136c6e 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListingFileStatus.java @@ -405,14 +405,14 @@ public int hashCode() { public String toString() { StringBuilder sb = new StringBuilder(super.toString()); sb.append('{'); - sb.append(this.getPath() == null ? "" : this.getPath().toString()); - sb.append(" length = ").append(this.getLen()); - sb.append(" aclEntries = ").append(aclEntries); - sb.append(", xAttrs = ").append(xAttrs); - sb.append(", modTime = ").append(modificationTime); + sb.append(this.getPath() == null ? "" : this.getPath().toString()) + .append(" length = ").append(this.getLen()) + .append(" aclEntries = ").append(aclEntries) + .append(", xAttrs = ").append(xAttrs) + .append(", modTime = ").append(modificationTime); if (isSplit()) { - sb.append(", chunkOffset = ").append(this.getChunkOffset()); - sb.append(", chunkLength = ").append(this.getChunkLength()); + sb.append(", chunkOffset = ").append(this.getChunkOffset()) + .append(", chunkLength = ").append(this.getChunkLength()); } sb.append('}'); return sb.toString();