From 59dba6e1bd3f5f62fba8e64040dfe20301518846 Mon Sep 17 00:00:00 2001 From: PJ Fanning Date: Sun, 18 Aug 2024 16:59:12 +0100 Subject: [PATCH] HADOOP-19134. Use StringBuilder instead of StringBuffer. (#6692). Contributed by PJ Fanning --- .../apache/hadoop/crypto/key/KeyShell.java | 2 +- .../main/java/org/apache/hadoop/fs/DF.java | 2 +- .../java/org/apache/hadoop/fs/FileUtil.java | 2 +- .../apache/hadoop/oncrpc/RpcDeniedReply.java | 2 +- .../apache/hadoop/security/ProviderUtils.java | 2 +- .../security/alias/CredentialShell.java | 2 +- .../security/ssl/SSLHostnameVerifier.java | 8 +-- .../java/org/apache/hadoop/util/Shell.java | 6 +- .../org/apache/hadoop/util/StringUtils.java | 2 +- .../org/apache/hadoop/fs/shell/TestCount.java | 2 +- .../alias/TestCredentialProviderFactory.java | 2 +- .../org/apache/hadoop/util/TestShell.java | 2 +- .../util/functional/TestRemoteIterators.java | 2 +- .../hadoop/crypto/key/kms/server/TestKMS.java | 2 +- .../org/apache/hadoop/hdfs/tools/DFSck.java | 2 +- .../OfflineEditsXmlLoader.java | 7 +-- .../offlineImageViewer/PBImageCorruption.java | 2 +- .../org/apache/hadoop/hdfs/DFSTestUtil.java | 2 +- .../org/apache/hadoop/hdfs/TestHDFSTrash.java | 2 +- .../blockmanagement/TestBlockManager.java | 8 +-- .../resources/TestWebHdfsDataLocality.java | 2 +- .../mapreduce/v2/app/webapp/ConfBlock.java | 2 +- .../hadoop/mapreduce/v2/app/TestRecovery.java | 2 +- .../app/webapp/TestAMWebServicesAttempts.java | 2 +- .../v2/app/webapp/TestAMWebServicesJobs.java | 2 +- .../apache/hadoop/mapred/LocalJobRunner.java | 4 +- .../mapreduce/v2/util/MRWebAppUtil.java | 2 +- .../mapred/TestLocalModeWithNewApis.java | 2 +- .../apache/hadoop/mapred/FileInputFormat.java | 2 +- .../hadoop/mapred/InvalidInputException.java | 2 +- .../apache/hadoop/mapred/MultiFileSplit.java | 2 +- .../apache/hadoop/mapred/SortedRanges.java | 2 +- .../org/apache/hadoop/mapred/TaskLog.java | 6 +- .../mapred/lib/FieldSelectionMapReduce.java | 2 +- .../java/org/apache/hadoop/mapreduce/Job.java | 2 +- .../apache/hadoop/mapreduce/JobStatus.java | 2 +- .../hadoop/mapreduce/TaskCompletionEvent.java | 2 +- .../lib/aggregate/ValueHistogram.java | 4 +- .../lib/fieldsel/FieldSelectionHelper.java | 8 +-- .../lib/input/CombineFileInputFormat.java | 2 +- .../mapreduce/lib/input/CombineFileSplit.java | 4 +- .../mapreduce/lib/input/FileInputFormat.java | 2 +- .../lib/input/InvalidInputException.java | 2 +- .../lib/jobcontrol/ControlledJob.java | 2 +- .../mapreduce/lib/join/TupleWritable.java | 2 +- .../hadoop/mapreduce/split/JobSplit.java | 2 +- .../hadoop/mapreduce/task/reduce/Fetcher.java | 8 ++- .../task/reduce/ShuffleSchedulerImpl.java | 2 +- .../apache/hadoop/mapreduce/tools/CLI.java | 2 +- .../mapred/TestFileOutputCommitter.java | 2 +- .../lib/output/TestFileOutputCommitter.java | 2 +- .../mapreduce/v2/hs/webapp/HsJobBlock.java | 2 +- .../mapreduce/v2/hs/webapp/dao/JobInfo.java | 2 +- .../hs/webapp/TestHsWebServicesAttempts.java | 2 +- .../v2/hs/webapp/VerifyJobsUtils.java | 2 +- .../apache/hadoop/RandomTextWriterJob.java | 2 +- .../apache/hadoop/fs/AccumulatingReducer.java | 2 +- .../org/apache/hadoop/fs/JHLogAnalyzer.java | 2 +- .../org/apache/hadoop/mapred/MRBench.java | 2 +- .../TestConcatenatedCompressedInput.java | 2 +- .../mapred/TestFixedLengthInputFormat.java | 2 +- .../mapred/TestMRCJCFileOutputCommitter.java | 2 +- .../apache/hadoop/mapred/TestMapProgress.java | 2 +- .../org/apache/hadoop/mapred/TestMapRed.java | 2 +- .../hadoop/mapred/TestMiniMRClasspath.java | 4 +- .../mapred/TestMultipleTextOutputFormat.java | 8 +-- .../hadoop/mapred/TestTextInputFormat.java | 2 +- .../apache/hadoop/mapred/UtilsForTests.java | 6 +- .../jobcontrol/JobControlTestUtils.java | 2 +- .../hadoop/mapreduce/MapReduceTestUtil.java | 6 +- .../hadoop/mapreduce/RandomTextWriter.java | 2 +- .../lib/input/TestFixedLengthInputFormat.java | 2 +- .../output/TestMRCJCFileOutputCommitter.java | 2 +- .../mapreduce/v2/MiniMRYarnCluster.java | 2 +- .../hadoop/examples/RandomTextWriter.java | 2 +- .../hadoop/examples/dancing/Pentomino.java | 2 +- .../hadoop/examples/dancing/Sudoku.java | 2 +- .../examples/terasort/TeraScheduler.java | 4 +- .../s3a/commit/AbstractITCommitProtocol.java | 2 +- .../hadoop/contrib/utils/join/JobBase.java | 2 +- .../org/apache/hadoop/tools/DistCpSync.java | 2 +- .../apache/hadoop/tools/util/DistCpUtils.java | 2 +- .../tools/rumen/datatypes/NodeName.java | 2 +- .../apache/hadoop/streaming/PipeMapRed.java | 2 +- .../apache/hadoop/streaming/StreamJob.java | 2 +- .../streaming/StreamXmlRecordReader.java | 2 - .../streaming/TestMultipleArchiveFiles.java | 2 +- .../org/apache/hadoop/streaming/UtilTest.java | 2 +- .../timeline/TimelineEntityGroupId.java | 2 +- .../api/resource/PlacementConstraint.java | 10 +-- .../hadoop/yarn/client/cli/TestLogsCLI.java | 2 +- .../yarn/util/ProcfsBasedProcessTree.java | 2 +- .../hadoop/yarn/webapp/view/JQueryUI.java | 4 +- .../TestAggregatedLogFormat.java | 2 +- .../store/sql/FederationQueryRunner.java | 2 +- .../task/DockerContainerDeletionTask.java | 2 +- .../PrivilegedOperationExecutor.java | 2 +- .../NetworkPacketTaggingHandlerImpl.java | 2 +- .../TrafficControlBandwidthHandlerImpl.java | 4 +- .../linux/resources/TrafficController.java | 2 +- .../linux/runtime/docker/DockerCommand.java | 2 +- .../localizer/LocalCacheDirectoryManager.java | 2 +- .../nvidia/NvidiaGPUPluginForRuntimeV2.java | 2 +- .../recovery/NMStateStoreService.java | 2 +- .../TestPrivilegedOperationExecutor.java | 2 +- .../TestNetworkPacketTaggingHandlerImpl.java | 2 +- ...estTrafficControlBandwidthHandlerImpl.java | 2 +- .../resources/TestTrafficController.java | 2 +- .../runtime/TestDockerContainerRuntime.java | 2 +- .../TestHdfsManifestToResourcesPlugin.java | 2 +- .../runtime/TestImageTagToManifestPlugin.java | 2 +- .../runtime/TestRuncContainerRuntime.java | 2 +- .../docker/TestDockerCommandExecutor.java | 4 +- .../TestLocalCacheDirectoryManager.java | 2 +- .../TestDevicePluginAdapter.java | 2 +- .../scheduler/capacity/PlanQueue.java | 2 +- .../resourcemanager/webapp/RMWebServices.java | 2 +- .../webapp/dao/AllocationTagsInfo.java | 2 +- .../constraint/TestPlacementProcessor.java | 2 +- .../algorithm/TestCircularIterator.java | 8 +-- .../fair/TestQueuePlacementPolicy.java | 62 +++++++++---------- .../webapp/FederationInterceptorREST.java | 8 +-- 122 files changed, 195 insertions(+), 196 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java index c18d0d41bc..cd774479d7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java @@ -169,7 +169,7 @@ protected int init(String[] args) throws IOException { @Override public String getCommandUsage() { - StringBuffer sbuf = new StringBuffer(USAGE_PREFIX + COMMANDS); + StringBuilder sbuf = new StringBuilder(USAGE_PREFIX + COMMANDS); String banner = StringUtils.repeat("=", 66); sbuf.append(banner + "\n"); sbuf.append(CreateCommand.USAGE + ":\n\n" + CreateCommand.DESC + "\n"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java index c5a052f3de..d88a1eca45 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java @@ -163,7 +163,7 @@ protected void parseExecResult(BufferedReader lines) throws IOException { @VisibleForTesting protected void parseOutput() throws IOException { if (output.size() < 2) { - StringBuffer sb = new StringBuffer("Fewer lines of output than expected"); + StringBuilder sb = new StringBuilder("Fewer lines of output than expected"); if (output.size() > 0) { sb.append(": " + output.get(0)); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index fa87bb48aa..56b97bf086 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -1052,7 +1052,7 @@ private static void unTarUsingTar(InputStream inputStream, File untarDir, private static void unTarUsingTar(File inFile, File untarDir, boolean gzipped) throws IOException { - StringBuffer untarCommand = new StringBuffer(); + StringBuilder untarCommand = new StringBuilder(); // not using canonical path here; this postpones relative path // resolution until bash is executed. final String source = "'" + FileUtil.makeSecureShellPath(inFile) + "'"; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java index 62bbd93342..62b6c51bcb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java @@ -58,7 +58,7 @@ public RejectState getRejectState() { @Override public String toString() { - return new StringBuffer().append("xid:").append(xid) + return new StringBuilder().append("xid:").append(xid) .append(",messageType:").append(messageType).append("verifier_flavor:") .append(verifier.getFlavor()).append("rejectState:") .append(rejectState).toString(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java index 9cd85499f5..ae17cde11c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java @@ -148,7 +148,7 @@ public static Configuration excludeIncompatibleCredentialProviders( if (providerPath == null) { return config; } - StringBuffer newProviderPath = new StringBuffer(); + StringBuilder newProviderPath = new StringBuilder(); String[] providers = providerPath.split(","); Path path = null; for (String provider: providers) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java index 45b5af36bb..41f56715d1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/CredentialShell.java @@ -127,7 +127,7 @@ protected int init(String[] args) throws IOException { @Override public String getCommandUsage() { - StringBuffer sbuf = new StringBuffer(USAGE_PREFIX + COMMANDS); + StringBuilder sbuf = new StringBuilder(USAGE_PREFIX + COMMANDS); String banner = StringUtils.repeat("=", 66); sbuf.append(banner + "\n") .append(CreateCommand.USAGE + ":\n\n" + CreateCommand.DESC + "\n") diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java index 6a7c9d48e4..86c92ab114 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLHostnameVerifier.java @@ -370,7 +370,7 @@ public void check(final String[] hosts, final String[] cns, strictWithSubDomains); } // Build up lists of allowed hosts For logging/debugging purposes. - StringBuffer buf = new StringBuffer(32); + StringBuilder buf = new StringBuilder(32); buf.append('<'); for (int i = 0; i < hosts.length; i++) { String h = hosts[i]; @@ -408,15 +408,15 @@ public void check(final String[] hosts, final String[] cns, throw new SSLException(msg); } - // StringBuffer for building the error message. - buf = new StringBuffer(); + // StringBuilder for building the error message. + buf = new StringBuilder(); boolean match = false; out: for (Iterator it = names.iterator(); it.hasNext();) { // Don't trim the CN, though! final String cn = StringUtils.toLowerCase(it.next()); - // Store CN in StringBuffer in case we need to report an error. + // Store CN in StringBuilder in case we need to report an error. buf.append(" <") .append(cn) .append('>'); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java index 91868365b1..e0d199ea86 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java @@ -1014,7 +1014,7 @@ private void runCommand() throws IOException { BufferedReader inReader = new BufferedReader(new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8)); - final StringBuffer errMsg = new StringBuffer(); + final StringBuilder errMsg = new StringBuilder(); // read error and input streams as this would free up the buffers // free the error stream buffer @@ -1208,7 +1208,7 @@ public static class ShellCommandExecutor extends Shell implements CommandExecutor { private String[] command; - private StringBuffer output; + private StringBuilder output; public ShellCommandExecutor(String[] execString) { @@ -1289,7 +1289,7 @@ public String[] getExecString() { @Override protected void parseExecResult(BufferedReader lines) throws IOException { - output = new StringBuffer(); + output = new StringBuilder(); char[] buf = new char[512]; int nRead; while ( (nRead = lines.read(buf, 0, buf.length)) > 0 ) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java index b109d8bacb..14a7458157 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java @@ -1334,7 +1334,7 @@ public static String wrap(String str, int wrapLength, String newLineStr, int inputLineLength = str.length(); int offset = 0; - StringBuffer wrappedLine = new StringBuffer(inputLineLength + 32); + StringBuilder wrappedLine = new StringBuilder(inputLineLength + 32); while(inputLineLength - offset > wrapLength) { if(str.charAt(offset) == 32) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java index c86a4c89df..a2af500c30 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestCount.java @@ -580,7 +580,7 @@ public MockQuotaUsage() { public String toString(boolean hOption, boolean tOption, List types) { if (tOption) { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); result.append(hOption ? HUMAN : BYTES); for (StorageType type : types) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java index fb17977aa2..37da798e80 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java @@ -114,7 +114,7 @@ public void testUriErrors() throws Exception { } private static char[] generatePassword(int length) { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); Random r = new Random(); for (int i = 0; i < length; i++) { sb.append(chars[r.nextInt(chars.length)]); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java index 9ae52ff95c..2dafe81696 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestShell.java @@ -480,7 +480,7 @@ public void testBashQuote() { @Test(timeout=120000) public void testDestroyAllShellProcesses() throws Throwable { Assume.assumeFalse(WINDOWS); - StringBuffer sleepCommand = new StringBuffer(); + StringBuilder sleepCommand = new StringBuilder(); sleepCommand.append("sleep 200"); String[] shellCmd = {"bash", "-c", sleepCommand.toString()}; final ShellCommandExecutor shexc1 = new ShellCommandExecutor(shellCmd); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/functional/TestRemoteIterators.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/functional/TestRemoteIterators.java index 373e1003ef..4f83b510c3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/functional/TestRemoteIterators.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/functional/TestRemoteIterators.java @@ -86,7 +86,7 @@ public void log(Object o) { */ @Test public void testSingleton() throws Throwable { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); String name = "singleton"; RemoteIterator it = remoteIteratorFromSingleton(name); assertStringValueContains(it, "SingletonIterator"); diff --git a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java index f4c7fbe0b3..282ae36f86 100644 --- a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java +++ b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java @@ -167,7 +167,7 @@ protected String generateLoadBalancingKeyProviderUriString() { if (kmsUrl == null || kmsUrl.size() == 0) { return null; } - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (int i = 0; i < kmsUrl.size(); i++) { sb.append(KMSClientProvider.SCHEME_NAME + "://" + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java index a0da4eaf80..e279ea349b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java @@ -195,7 +195,7 @@ private Integer listCorruptFileBlocks(String dir, String baseUrl) final String cookiePrefix = "Cookie:"; boolean allDone = false; while (!allDone) { - final StringBuffer url = new StringBuffer(baseUrl); + final StringBuilder url = new StringBuilder(baseUrl); if (cookie > 0) { url.append("&startblockafter=").append(String.valueOf(cookie)); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java index ac43b21d84..7169668eda 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java @@ -32,7 +32,6 @@ import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.OpInstanceCache; -import org.apache.hadoop.hdfs.tools.offlineEditsViewer.OfflineEditsViewer; import org.apache.hadoop.hdfs.util.XMLUtils.Stanza; import org.xml.sax.Attributes; import org.xml.sax.InputSource; @@ -57,7 +56,7 @@ class OfflineEditsXmlLoader private Stanza stanza; private Stack stanzaStack; private FSEditLogOpCodes opCode; - private StringBuffer cbuf; + private StringBuilder cbuf; private long nextTxId; private final OpInstanceCache opCache = new OpInstanceCache(); @@ -119,7 +118,7 @@ public void startDocument() { stanza = null; stanzaStack = new Stack(); opCode = null; - cbuf = new StringBuffer(); + cbuf = new StringBuilder(); nextTxId = -1; } @@ -182,7 +181,7 @@ public void startElement (String uri, String name, @Override public void endElement (String uri, String name, String qName) { String str = XMLUtils.unmangleXmlString(cbuf.toString(), false).trim(); - cbuf = new StringBuffer(); + cbuf = new StringBuilder(); switch (state) { case EXPECT_EDITS_TAG: throw new InvalidXmlException("expected "); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageCorruption.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageCorruption.java index d510dfc3b9..a2991f03fd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageCorruption.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageCorruption.java @@ -85,7 +85,7 @@ long getId() { } String getType() { - StringBuffer s = new StringBuffer(); + StringBuilder s = new StringBuilder(); if (type.contains(PBImageCorruptionType.CORRUPT_NODE)) { s.append(PBImageCorruptionType.CORRUPT_NODE); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java index 791f6529da..c35edd1f6d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java @@ -340,7 +340,7 @@ private class MyFile { for (int idx = 0; idx < nLevels; idx++) { levels[idx] = gen.nextInt(10); } - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (int idx = 0; idx < nLevels; idx++) { sb.append(dirNames[levels[idx]]); sb.append("/"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSTrash.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSTrash.java index 5dbb124882..ea73a63f38 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSTrash.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSTrash.java @@ -180,7 +180,7 @@ private Trash getPerUserTrash(UserGroupInformation ugi, FileSystem fileSystem, Configuration config) throws IOException { // generate an unique path per instance UUID trashId = UUID.randomUUID(); - StringBuffer sb = new StringBuffer() + StringBuilder sb = new StringBuilder() .append(ugi.getUserName()) .append("-") .append(trashId.toString()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockManager.java index d9d236b664..a456041d1f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestBlockManager.java @@ -1833,7 +1833,7 @@ public void testMetaSavePostponedMisreplicatedBlocks() throws IOException { DataInputStream in = new DataInputStream(fstream); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); - StringBuffer buffer = new StringBuffer(); + StringBuilder buffer = new StringBuilder(); String line; try { while ((line = reader.readLine()) != null) { @@ -1861,7 +1861,7 @@ public void testMetaSaveMissingReplicas() throws Exception { FileInputStream fstream = new FileInputStream(file); DataInputStream in = new DataInputStream(fstream); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); - StringBuffer buffer = new StringBuffer(); + StringBuilder buffer = new StringBuilder(); String line; try { while ((line = reader.readLine()) != null) { @@ -1933,7 +1933,7 @@ public void testMetaSaveInMaintenanceReplicas() throws Exception { FileInputStream fstream = new FileInputStream(file); DataInputStream in = new DataInputStream(fstream); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); - StringBuffer buffer = new StringBuffer(); + StringBuilder buffer = new StringBuilder(); String line; try { while ((line = reader.readLine()) != null) { @@ -1989,7 +1989,7 @@ public void testMetaSaveDecommissioningReplicas() throws Exception { FileInputStream fstream = new FileInputStream(file); DataInputStream in = new DataInputStream(fstream); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); - StringBuffer buffer = new StringBuffer(); + StringBuilder buffer = new StringBuilder(); String line; try { while ((line = reader.readLine()) != null) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java index 640994562e..e56b03ee18 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsDataLocality.java @@ -196,7 +196,7 @@ public void testExcludeDataNodes() throws Exception { //For GETFILECHECKSUM, OPEN and APPEND, //the chosen datanode must be different with exclude nodes. - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (int i = 0; i < 2; i++) { sb.append(locations[i].getXferAddr()); { // test GETFILECHECKSUM diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/ConfBlock.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/ConfBlock.java index 4c92ca0950..f080da46dc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/ConfBlock.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/ConfBlock.java @@ -83,7 +83,7 @@ public class ConfBlock extends HtmlBlock { __(). tbody(); for (ConfEntryInfo entry : info.getProperties()) { - StringBuffer buffer = new StringBuffer(); + StringBuilder buffer = new StringBuilder(); String[] sources = entry.getSource(); //Skip the last entry, because it is always the same HDFS file, and // output them in reverse order so most recent is output first diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java index d0b9acee8e..6c69dde2e9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java @@ -2080,7 +2080,7 @@ private void writeOutput(TaskAttempt attempt, Configuration conf) private void validateOutput() throws IOException { File expectedFile = new File(new Path(outputDir, partFile).toString()); - StringBuffer expectedOutput = new StringBuffer(); + StringBuilder expectedOutput = new StringBuilder(); expectedOutput.append(key1).append('\t').append(val1).append("\n"); expectedOutput.append(val1).append("\n"); expectedOutput.append(val2).append("\n"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java index aad41966e8..cdc868d945 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java @@ -516,7 +516,7 @@ public void verifyTaskAttemptGeneric(TaskAttempt ta, TaskType ttype, String expectDiag = ""; List diagnosticsList = ta.getDiagnostics(); if (diagnosticsList != null && !diagnostics.isEmpty()) { - StringBuffer b = new StringBuffer(); + StringBuilder b = new StringBuilder(); for (String diag : diagnosticsList) { b.append(diag); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java index 5e4e9f70b3..756a6b2e08 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java @@ -600,7 +600,7 @@ public void verifyAMJobGenericSecure(Job job, int mapsPending, String diagString = ""; List diagList = job.getDiagnostics(); if (diagList != null && !diagList.isEmpty()) { - StringBuffer b = new StringBuffer(); + StringBuilder b = new StringBuilder(); for (String diag : diagList) { b.append(diag); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java index 88a10e2a8d..aae1fd0b67 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java @@ -1027,8 +1027,8 @@ static void setupChildMapredLocalDirs(Task t, JobConf conf) { String taskId = t.getTaskID().toString(); boolean isCleanup = t.isTaskCleanupTask(); String user = t.getUser(); - StringBuffer childMapredLocalDir = - new StringBuffer(localDirs[0] + Path.SEPARATOR + StringBuilder childMapredLocalDir = + new StringBuilder(localDirs[0] + Path.SEPARATOR + getLocalTaskDir(user, jobId, taskId, isCleanup)); for (int i = 1; i < localDirs.length; i++) { childMapredLocalDir.append("," + localDirs[i] + Path.SEPARATOR diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java index acda0f43d9..5944d6cb32 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java @@ -145,7 +145,7 @@ public static String getApplicationWebURLOnJHSWithoutScheme(Configuration conf, InetSocketAddress address = NetUtils.createSocketAddr( hsAddress, getDefaultJHSWebappPort(), getDefaultJHSWebappURLWithoutScheme()); - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); if (address.getAddress() != null && (address.getAddress().isAnyLocalAddress() || address.getAddress().isLoopbackAddress())) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java index 628ff15095..d87015f5cf 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java @@ -102,7 +102,7 @@ public void testNewApis() throws Exception { static String readOutput(Path outDir, Configuration conf) throws IOException { FileSystem fs = outDir.getFileSystem(conf); - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir, new Utils.OutputFileUtils.OutputFilesFilter())); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java index 91151f0d8e..46bb4b629c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java @@ -470,7 +470,7 @@ public static void addInputPaths(JobConf conf, String commaSeparatedPaths) { */ public static void setInputPaths(JobConf conf, Path... inputPaths) { Path path = new Path(conf.getWorkingDirectory(), inputPaths[0]); - StringBuffer str = new StringBuffer(StringUtils.escapeString(path.toString())); + StringBuilder str = new StringBuilder(StringUtils.escapeString(path.toString())); for(int i = 1; i < inputPaths.length;i++) { str.append(StringUtils.COMMA_STR); path = new Path(conf.getWorkingDirectory(), inputPaths[i]); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InvalidInputException.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InvalidInputException.java index faf1a3877c..809721f84d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InvalidInputException.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/InvalidInputException.java @@ -61,7 +61,7 @@ public List getProblems() { * @return the concatenated messages from all of the problems. */ public String getMessage() { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); Iterator itr = problems.iterator(); while(itr.hasNext()) { result.append(itr.next().getMessage()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MultiFileSplit.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MultiFileSplit.java index 8ea4f093ec..cd811bde9b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MultiFileSplit.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MultiFileSplit.java @@ -70,7 +70,7 @@ private void addToSet(Set set, String[] array) { @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for(int i=0; i < getPaths().length; i++) { sb.append(getPath(i).toUri().getPath() + ":0+" + getLength(i)); if (i < getPaths().length -1) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java index 9d73e2be76..9d4e7354f0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/SortedRanges.java @@ -207,7 +207,7 @@ public synchronized void write(DataOutput out) throws IOException { } public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); Iterator it = ranges.iterator(); while(it.hasNext()) { Range range = it.next(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java index 36405328f5..4e5c21ea45 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java @@ -518,8 +518,8 @@ static String buildCommandLine(List setup, List cmd, throws IOException { String stdout = FileUtil.makeShellPath(stdoutFilename); - String stderr = FileUtil.makeShellPath(stderrFilename); - StringBuffer mergedCmd = new StringBuffer(); + String stderr = FileUtil.makeShellPath(stderrFilename); + StringBuilder mergedCmd = new StringBuilder(); // Export the pid of taskJvm to env variable JVM_PID. // Currently pid is not used on Windows @@ -606,7 +606,7 @@ static String buildDebugScriptCommandLine(List cmd, String debugout) */ public static String addCommand(List cmd, boolean isExecutable) throws IOException { - StringBuffer command = new StringBuffer(); + StringBuilder command = new StringBuilder(); for(String s: cmd) { command.append('\''); if (isExecutable) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java index 0105559e8f..6c7413139d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/FieldSelectionMapReduce.java @@ -96,7 +96,7 @@ public class FieldSelectionMapReduce LoggerFactory.getLogger("FieldSelectionMapReduce"); private String specToString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append("fieldSeparator: ").append(fieldSeparator).append("\n"); sb.append("mapOutputKeyValueSpec: ").append(mapOutputKeyValueSpec).append( diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java index c73c5bdd83..dada6595b6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Job.java @@ -476,7 +476,7 @@ public String toString() { } catch (IOException e) { } catch (InterruptedException ie) { } - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append("Job: ").append(status.getJobID()).append("\n"); sb.append("Job File: ").append(status.getJobFile()).append("\n"); sb.append("Job Tracking URL : ").append(status.getTrackingUrl()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobStatus.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobStatus.java index d8b2321aca..03aeef33cb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobStatus.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobStatus.java @@ -636,7 +636,7 @@ public synchronized void setUber(boolean isUber) { } public String toString() { - StringBuffer buffer = new StringBuffer(); + StringBuilder buffer = new StringBuilder(); buffer.append("job-id : " + jobid); buffer.append("uber-mode : " + isUber); buffer.append("map-progress : " + mapProgress); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java index 21c3823012..ae4c6e62fe 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java @@ -188,7 +188,7 @@ protected void setTaskTrackerHttp(String taskTrackerHttp) { @Override public String toString(){ - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); buf.append("Task Id : "); buf.append(taskId); buf.append(", Status : "); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueHistogram.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueHistogram.java index b41a5bd302..0438f38078 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueHistogram.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/aggregate/ValueHistogram.java @@ -83,7 +83,7 @@ public void addNextValue(Object val) { public String getReport() { long[] counts = new long[items.size()]; - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); Iterator iter = items.values().iterator(); int i = 0; while (iter.hasNext()) { @@ -133,7 +133,7 @@ public String getReport() { * the histogram */ public String getReportDetails() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); Iterator> iter = items.entrySet().iterator(); while (iter.hasNext()) { Entry en = iter.next(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java index 5ee7e0f78b..1755935188 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/fieldsel/FieldSelectionHelper.java @@ -121,10 +121,10 @@ private static String selectFields(String[] fields, List fieldList, int allFieldsFrom, String separator) { String retv = null; int i = 0; - StringBuffer sb = null; + StringBuilder sb = null; if (fieldList != null && fieldList.size() > 0) { if (sb == null) { - sb = new StringBuffer(); + sb = new StringBuilder(); } for (Integer index : fieldList) { if (index < fields.length) { @@ -135,7 +135,7 @@ private static String selectFields(String[] fields, List fieldList, } if (allFieldsFrom >= 0) { if (sb == null) { - sb = new StringBuffer(); + sb = new StringBuilder(); } for (i = allFieldsFrom; i < fields.length; i++) { sb.append(fields[i]).append(separator); @@ -168,7 +168,7 @@ public static int parseOutputKeyValueSpec(String keyValueSpec, public static String specToString(String fieldSeparator, String keyValueSpec, int allValueFieldsFrom, List keyFieldList, List valueFieldList) { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append("fieldSeparator: ").append(fieldSeparator).append("\n"); sb.append("keyValueSpec: ").append(keyValueSpec).append("\n"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java index caebe6c823..8ed756367e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java @@ -803,7 +803,7 @@ public boolean accept(Path path) { } public String toString() { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); buf.append("["); for (PathFilter f: filters) { buf.append(f); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java index 3c00689381..96c1e360a3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileSplit.java @@ -175,7 +175,7 @@ public void write(DataOutput out) throws IOException { @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (int i = 0; i < paths.length; i++) { if (i == 0 ) { sb.append("Paths:"); @@ -188,7 +188,7 @@ public String toString() { } if (locations != null) { String locs = ""; - StringBuffer locsb = new StringBuffer(); + StringBuilder locsb = new StringBuilder(); for (int i = 0; i < locations.length; i++) { locsb.append(locations[i] + ":"); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java index e74c3fa813..fd7d27d3bc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/FileInputFormat.java @@ -569,7 +569,7 @@ public static void setInputPaths(Job job, Path... inputPaths) throws IOException { Configuration conf = job.getConfiguration(); Path path = inputPaths[0].getFileSystem(conf).makeQualified(inputPaths[0]); - StringBuffer str = new StringBuffer(StringUtils.escapeString(path.toString())); + StringBuilder str = new StringBuilder(StringUtils.escapeString(path.toString())); for(int i = 1; i < inputPaths.length;i++) { str.append(StringUtils.COMMA_STR); path = inputPaths[i].getFileSystem(conf).makeQualified(inputPaths[i]); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java index 1113bec188..e0dcdf91d8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/InvalidInputException.java @@ -60,7 +60,7 @@ public List getProblems() { * @return the concatenated messages from all of the problems. */ public String getMessage() { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); Iterator itr = problems.iterator(); while(itr.hasNext()) { result.append(itr.next().getMessage()); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java index 16ba22bfb6..48cde0e5f0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/jobcontrol/ControlledJob.java @@ -90,7 +90,7 @@ public ControlledJob(Configuration conf) throws IOException { @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append("job name:\t").append(this.job.getJobName()).append("\n"); sb.append("job id:\t").append(this.controlID).append("\n"); sb.append("job state:\t").append(this.state).append("\n"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java index 2990ca99d3..aa541f3640 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/join/TupleWritable.java @@ -147,7 +147,7 @@ public void remove() { * [<child1>,<child2>,...,<childn>] */ public String toString() { - StringBuffer buf = new StringBuffer("["); + StringBuilder buf = new StringBuilder("["); for (int i = 0; i < values.length; ++i) { buf.append(has(i) ? values[i].toString() : ""); buf.append(","); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplit.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplit.java index d725196b9b..c2366d0d96 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplit.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplit.java @@ -123,7 +123,7 @@ public void write(DataOutput out) throws IOException { @Override public String toString() { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); buf.append("data-size : " + inputDataLength + "\n"); buf.append("start-offset : " + startOffset + "\n"); buf.append("locations : " + "\n"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java index df871ca32a..59ef95bdd4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/Fetcher.java @@ -678,7 +678,7 @@ private boolean verifySanity(long compressedLength, long decompressedLength, private URL getMapOutputURL(MapHost host, Collection maps ) throws MalformedURLException { // Get the base url - StringBuffer url = new StringBuffer(host.getBaseUrl()); + StringBuilder url = new StringBuilder(host.getBaseUrl()); boolean first = true; for (TaskAttemptID mapId : maps) { @@ -688,8 +688,10 @@ private URL getMapOutputURL(MapHost host, Collection maps url.append(mapId); first = false; } - - LOG.debug("MapOutput URL for " + host + " -> " + url.toString()); + + if (LOG.isDebugEnabled()) { + LOG.debug("MapOutput URL for " + host + " -> " + url.toString()); + } return new URL(url.toString()); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleSchedulerImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleSchedulerImpl.java index 83f9669bfe..173cd093e9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleSchedulerImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleSchedulerImpl.java @@ -171,7 +171,7 @@ public void resolve(TaskCompletionEvent event) { } static URI getBaseURI(TaskAttemptID reduceId, String url) { - StringBuffer baseUrl = new StringBuffer(url); + StringBuilder baseUrl = new StringBuilder(url); if (!url.endsWith("/")) { baseUrl.append("/"); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java index 0f65a29b13..396cf585db 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java @@ -520,7 +520,7 @@ Cluster createCluster() throws IOException { } private String getJobPriorityNames() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (JobPriority p : JobPriority.values()) { // UNDEFINED_PRIORITY need not to be displayed in usage if (JobPriority.UNDEFINED_PRIORITY == p) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java index b646b04b74..4b45745821 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java @@ -175,7 +175,7 @@ public void testRecoveryUpgradeV1V2() throws Exception { private void validateContent(Path dir) throws IOException { File fdir = new File(dir.toUri().getPath()); File expectedFile = new File(fdir, partFile); - StringBuffer expectedOutput = new StringBuffer(); + StringBuilder expectedOutput = new StringBuilder(); expectedOutput.append(key1).append('\t').append(val1).append("\n"); expectedOutput.append(val1).append("\n"); expectedOutput.append(val2).append("\n"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java index 2aa7b34a00..7c53833842 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java @@ -227,7 +227,7 @@ private void validateContent(Path dir) throws IOException { private void validateContent(File dir) throws IOException { File expectedFile = new File(dir, partFile); assertTrue("Could not find "+expectedFile, expectedFile.exists()); - StringBuffer expectedOutput = new StringBuffer(); + StringBuilder expectedOutput = new StringBuilder(); expectedOutput.append(key1).append('\t').append(val1).append("\n"); expectedOutput.append(val1).append("\n"); expectedOutput.append(val2).append("\n"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java index 18040f0044..1d22b2bdfc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java @@ -109,7 +109,7 @@ public class HsJobBlock extends HtmlBlock { // todo - switch to use JobInfo List diagnostics = j.getDiagnostics(); if(diagnostics != null && !diagnostics.isEmpty()) { - StringBuffer b = new StringBuffer(); + StringBuilder b = new StringBuilder(); for(String diag: diagnostics) { b.append(addTaskLinks(diag)); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java index 08044c6ff1..7d9d0eecb1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java @@ -117,7 +117,7 @@ public JobInfo(Job job) { this.diagnostics = ""; List diagnostics = job.getDiagnostics(); if (diagnostics != null && !diagnostics.isEmpty()) { - StringBuffer b = new StringBuffer(); + StringBuilder b = new StringBuilder(); for (String diag : diagnostics) { b.append(diag); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java index 3ca6db3ab4..a4426e6b85 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java @@ -534,7 +534,7 @@ public void verifyTaskAttemptGeneric(TaskAttempt ta, TaskType ttype, String expectDiag = ""; List diagnosticsList = ta.getDiagnostics(); if (diagnosticsList != null && !diagnostics.isEmpty()) { - StringBuffer b = new StringBuffer(); + StringBuilder b = new StringBuilder(); for (String diag : diagnosticsList) { b.append(diag); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/VerifyJobsUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/VerifyJobsUtils.java index f636e46c02..2d46f80178 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/VerifyJobsUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/VerifyJobsUtils.java @@ -108,7 +108,7 @@ public static void verifyHsJobGenericSecure(Job job, Boolean uberized, String diagString = ""; List diagList = job.getDiagnostics(); if (diagList != null && !diagList.isEmpty()) { - StringBuffer b = new StringBuffer(); + StringBuilder b = new StringBuilder(); for (String diag : diagList) { b.append(diag); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/RandomTextWriterJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/RandomTextWriterJob.java index 1e3ee8c357..398b5a6015 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/RandomTextWriterJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/RandomTextWriterJob.java @@ -204,7 +204,7 @@ public void map(Text key, Text value, } private Text generateSentence(int noWords) { - StringBuffer sentence = new StringBuffer(); + StringBuilder sentence = new StringBuilder(); String space = " "; for (int i=0; i < noWords; ++i) { sentence.append(words[random.nextInt(words.length)]); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/AccumulatingReducer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/AccumulatingReducer.java index f6c2a06bfb..b6313494e4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/AccumulatingReducer.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/AccumulatingReducer.java @@ -73,7 +73,7 @@ public void reduce(Text key, // concatenate strings if (field.startsWith(VALUE_TYPE_STRING)) { - StringBuffer sSum = new StringBuffer(); + StringBuilder sSum = new StringBuilder(); while (values.hasNext()) sSum.append(values.next().toString()).append(";"); output.collect(key, new Text(sSum.toString())); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/JHLogAnalyzer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/JHLogAnalyzer.java index 5e3e745f02..8937bdafe3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/JHLogAnalyzer.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/JHLogAnalyzer.java @@ -773,7 +773,7 @@ public void parseLogFile(FileSystem fs, /** * Read lines until one ends with a " ." or "\" " */ - private StringBuffer resBuffer = new StringBuffer(); + private StringBuilder resBuffer = new StringBuilder(); private String readLine(BufferedReader reader) throws IOException { resBuffer.setLength(0); reader.mark(maxJobDelimiterLineLength); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRBench.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRBench.java index 36f469385e..4d3352f219 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRBench.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRBench.java @@ -132,7 +132,7 @@ public void generateTextFile(FileSystem fs, Path inputFile, */ private static String pad(long number, int length) { String str = String.valueOf(number); - StringBuffer value = new StringBuffer(); + StringBuilder value = new StringBuilder(); for (int i = str.length(); i < length; i++) { value.append("0"); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java index ec44dd77ef..80a9502774 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java @@ -677,7 +677,7 @@ private static void doSingleBzip2BufferSize(JobConf jConf) } private static String unquote(String in) { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); for(int i=0; i < in.length(); ++i) { char ch = in.charAt(i); if (ch == '\\') { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java index 5fec24a1b1..1ae17584a6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java @@ -236,7 +236,7 @@ private ArrayList createFile(Path targetFile, CompressionCodec codec, } Writer writer = new OutputStreamWriter(ostream); try { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (int i = 0; i < numRecords; i++) { for (int j = 0; j < recordLen; j++) { sb.append(chars[charRand.nextInt(chars.length)]); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java index be7dcc5ec2..18d7010a49 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCFileOutputCommitter.java @@ -105,7 +105,7 @@ public void testCommitter() throws Exception { // validate output File expectedFile = new File(new Path(outDir, file).toString()); - StringBuffer expectedOutput = new StringBuffer(); + StringBuilder expectedOutput = new StringBuilder(); expectedOutput.append(key1).append('\t').append(val1).append("\n"); expectedOutput.append(val1).append("\n"); expectedOutput.append(val2).append("\n"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java index 9b6ebda593..f70a552327 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapProgress.java @@ -119,7 +119,7 @@ public boolean canCommit(TaskAttemptID taskid) throws IOException { public AMFeedback statusUpdate(TaskAttemptID taskId, TaskStatus taskStatus) throws IOException, InterruptedException { - StringBuffer buf = new StringBuffer("Task "); + StringBuilder buf = new StringBuilder("Task "); buf.append(taskId); if (taskStatus != null) { buf.append(" making progress to "); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java index af09e09535..a63dbec6d9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMapRed.java @@ -763,7 +763,7 @@ public void runJob(int items) { SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, inFile, Text.class, Text.class); - StringBuffer content = new StringBuffer(); + StringBuilder content = new StringBuilder(); for (int i = 0; i < 1000; i++) { content.append(i).append(": This is one more line of content\n"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java index 60e32683c3..71f04078b6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java @@ -80,7 +80,7 @@ static String launchWordCount(URI fileSys, JobConf conf, String input, FileSystem fs = FileSystem.get(fileSys, conf); configureWordCount(fs, conf, input, numMaps, numReduces, inDir, outDir); JobClient.runJob(conf); - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); { Path[] parents = FileUtil.stat2Paths(fs.listStatus(outDir.getParent())); Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir, @@ -137,7 +137,7 @@ static String launchExternal(URI uri, JobConf conf, String input, // set the tests jar file conf.setJarByClass(TestMiniMRClasspath.class); JobClient.runJob(conf); - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir, new Utils.OutputFileUtils .OutputFilesFilter())); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java index 43ead04b26..114b6054d3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMultipleTextOutputFormat.java @@ -106,7 +106,7 @@ public void testFormat() throws Exception { File expectedFile_11 = new File(new Path(workDir, file_11).toString()); //System.out.printf("expectedFile_11: %s\n", new Path(workDir, file_11).toString()); - StringBuffer expectedOutput = new StringBuffer(); + StringBuilder expectedOutput = new StringBuilder(); for (int i = 10; i < 20; i++) { expectedOutput.append(""+i).append('\t').append(""+i).append("\n"); } @@ -118,7 +118,7 @@ public void testFormat() throws Exception { File expectedFile_12 = new File(new Path(workDir, file_12).toString()); //System.out.printf("expectedFile_12: %s\n", new Path(workDir, file_12).toString()); - expectedOutput = new StringBuffer(); + expectedOutput = new StringBuilder(); for (int i = 20; i < 30; i++) { expectedOutput.append(""+i).append('\t').append(""+i).append("\n"); } @@ -130,7 +130,7 @@ public void testFormat() throws Exception { File expectedFile_13 = new File(new Path(workDir, file_13).toString()); //System.out.printf("expectedFile_13: %s\n", new Path(workDir, file_13).toString()); - expectedOutput = new StringBuffer(); + expectedOutput = new StringBuilder(); for (int i = 30; i < 40; i++) { expectedOutput.append(""+i).append('\t').append(""+i).append("\n"); } @@ -142,7 +142,7 @@ public void testFormat() throws Exception { File expectedFile_2 = new File(new Path(workDir, file_2).toString()); //System.out.printf("expectedFile_2: %s\n", new Path(workDir, file_2).toString()); - expectedOutput = new StringBuffer(); + expectedOutput = new StringBuilder(); for (int i = 10; i < 40; i++) { expectedOutput.append(""+i).append('\t').append(""+i).append("\n"); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java index 9a2576ec66..29a370de7c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java @@ -548,7 +548,7 @@ public void testGzipEmpty() throws IOException { } private static String unquote(String in) { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); for(int i=0; i < in.length(); ++i) { char ch = in.charAt(i); if (ch == '\\') { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java index fd73410918..bfd8849ef1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java @@ -91,7 +91,7 @@ public static String ifmt(double d) { } public static String formatBytes(long numBytes) { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); boolean bDetails = true; double num = numBytes; @@ -116,7 +116,7 @@ public static String formatBytes(long numBytes) { } public static String formatBytes2(long numBytes) { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); long u = 0; if (numBytes >= TB) { u = numBytes / TB; @@ -145,7 +145,7 @@ public static String formatBytes2(long numBytes) { static final String regexpSpecials = "[]()?*+|.!^-\\~@"; public static String regexpEscape(String plain) { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); char[] ch = plain.toCharArray(); int csup = ch.length; for (int c = 0; c < csup; c++) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java index c971ccc6c0..68bd399dc4 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java @@ -82,7 +82,7 @@ private static String generateRandomWord() { private static String generateRandomLine() { long r = rand.nextLong() % 7; long n = r + 20; - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (int i = 0; i < n; i++) { sb.append(generateRandomWord()).append(" "); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MapReduceTestUtil.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MapReduceTestUtil.java index 4141d26933..9dd49350eb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MapReduceTestUtil.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MapReduceTestUtil.java @@ -97,7 +97,7 @@ public static String generateRandomWord() { public static String generateRandomLine() { long r = rand.nextLong() % 7; long n = r + 20; - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (int i = 0; i < n; i++) { sb.append(generateRandomWord()).append(" "); } @@ -401,7 +401,7 @@ public Counter getCounter(String group, String name) { public static String readOutput(Path outDir, Configuration conf) throws IOException { FileSystem fs = outDir.getFileSystem(conf); - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir, new Utils.OutputFileUtils.OutputFilesFilter())); @@ -436,7 +436,7 @@ public static String readTaskLog(TaskLog.LogName filter, org.apache.hadoop.mapred.TaskAttemptID taskId, boolean isCleanup) throws IOException { // string buffer to store task log - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); int res; // reads the whole tasklog into inputstream diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomTextWriter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomTextWriter.java index dca39dfd71..6f1adcb9e1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomTextWriter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomTextWriter.java @@ -100,7 +100,7 @@ enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN } public static String generateSentenceWithRand(ThreadLocalRandom rand, int noWords) { - StringBuffer sentence = new StringBuffer(words[rand.nextInt(words.length)]); + StringBuilder sentence = new StringBuilder(words[rand.nextInt(words.length)]); for (int i = 1; i < noWords; i++) { sentence.append(" ").append(words[rand.nextInt(words.length)]); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java index be9e6deff3..cf5e74004c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java @@ -262,7 +262,7 @@ private ArrayList createFile(Path targetFile, CompressionCodec codec, } Writer writer = new OutputStreamWriter(ostream); try { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (int i = 0; i < numRecords; i++) { for (int j = 0; j < recordLen; j++) { sb.append(chars[charRand.nextInt(chars.length)]); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java index 855bb2225c..ef1a512af7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRCJCFileOutputCommitter.java @@ -119,7 +119,7 @@ public void testCommitter() throws Exception { // validate output File expectedFile = new File(new Path(outDir, partFile).toString()); - StringBuffer expectedOutput = new StringBuffer(); + StringBuilder expectedOutput = new StringBuilder(); expectedOutput.append(key1).append('\t').append(val1).append("\n"); expectedOutput.append(val1).append("\n"); expectedOutput.append(val2).append("\n"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java index e41c95c490..b3533482b5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java @@ -106,7 +106,7 @@ public static String getResolvedMRHistoryWebAppURLWithoutScheme( JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS, JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT); } address = NetUtils.getConnectAddress(address); - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); InetAddress resolved = address.getAddress(); if (resolved == null || resolved.isAnyLocalAddress() || resolved.isLoopbackAddress()) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomTextWriter.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomTextWriter.java index 7e6c099a06..2340811bdd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomTextWriter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomTextWriter.java @@ -154,7 +154,7 @@ public void map(Text key, Text value, } private Text generateSentence(int noWords) { - StringBuffer sentence = new StringBuffer(); + StringBuilder sentence = new StringBuilder(); String space = " "; for (int i=0; i < noWords; ++i) { sentence.append(words[random.nextInt(words.length)]); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java index a30d62c33c..1f22437228 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Pentomino.java @@ -142,7 +142,7 @@ static class Point implements ColumnName { public static String stringifySolution(int width, int height, List> solution) { String[][] picture = new String[height][width]; - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); // for each piece placement... for(List row: solution) { // go through to find which piece was placed diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java index 402ff028df..632ab53695 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java @@ -66,7 +66,7 @@ protected static interface ColumnName { */ static String stringifySolution(int size, List> solution) { int[][] picture = new int[size][size]; - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); // go through the rows selected in the model and build a picture of the // solution. for(List row: solution) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java index 6df1f1e497..53fc52d342 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java @@ -47,7 +47,7 @@ static class Split { this.filename = filename; } public String toString() { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); result.append(filename); result.append(" on "); for(Host host: locations) { @@ -64,7 +64,7 @@ static class Host { this.hostname = hostname; } public String toString() { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); result.append(splits.size()); result.append(" "); result.append(hostname); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/AbstractITCommitProtocol.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/AbstractITCommitProtocol.java index 3a7cceb236..165379d1dc 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/AbstractITCommitProtocol.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/AbstractITCommitProtocol.java @@ -722,7 +722,7 @@ private void validateContent(Path dir, } Path expectedFile = getPart0000(dir); log().debug("Validating content in {}", expectedFile); - StringBuffer expectedOutput = new StringBuffer(); + StringBuilder expectedOutput = new StringBuilder(); expectedOutput.append(KEY_1).append('\t').append(VAL_1).append("\n"); expectedOutput.append(VAL_1).append("\n"); expectedOutput.append(VAL_2).append("\n"); diff --git a/hadoop-tools/hadoop-datajoin/src/main/java/org/apache/hadoop/contrib/utils/join/JobBase.java b/hadoop-tools/hadoop-datajoin/src/main/java/org/apache/hadoop/contrib/utils/join/JobBase.java index 7267fdecac..2bea8957d3 100644 --- a/hadoop-tools/hadoop-datajoin/src/main/java/org/apache/hadoop/contrib/utils/join/JobBase.java +++ b/hadoop-tools/hadoop-datajoin/src/main/java/org/apache/hadoop/contrib/utils/join/JobBase.java @@ -143,7 +143,7 @@ protected void report() { * */ protected String getReport() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); Iterator iter = this.longCounters.entrySet().iterator(); while (iter.hasNext()) { diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpSync.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpSync.java index dbc86fd0b4..a6f01261ca 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpSync.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpSync.java @@ -614,7 +614,7 @@ private Path translateRenamedPath(Path sourcePath, if (sourcePath.equals(renameItem.getSource())) { return renameItem.getTarget(); } - StringBuffer sb = new StringBuffer(sourcePath.toString()); + StringBuilder sb = new StringBuilder(sourcePath.toString()); String remain = sb.substring(renameItem.getSource().toString().length() + 1); return new Path(renameItem.getTarget(), remain); diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java index e77b2031a7..6752329b48 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java @@ -155,7 +155,7 @@ public static String getRelativePath(Path sourceRootPath, Path childPath) { * @return - String containing first letters of each attribute to preserve */ public static String packAttributes(EnumSet attributes) { - StringBuffer buffer = new StringBuffer(FileAttribute.values().length); + StringBuilder buffer = new StringBuilder(FileAttribute.values().length); int len = 0; for (FileAttribute attribute : attributes) { buffer.append(attribute.name().charAt(0)); diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java index 20eb535d0c..603b1fa6ee 100644 --- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java +++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java @@ -140,7 +140,7 @@ public String getAnonymizedValue(StatePool statePool, Configuration conf) { } private void anonymize(StatePool pool) { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); NodeNameState state = (NodeNameState) pool.getState(getClass()); if (state == null) { state = new NodeNameState(); diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapRed.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapRed.java index ef62505c4c..3d6541565c 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapRed.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapRed.java @@ -254,7 +254,7 @@ void addJobConfToEnvironment(JobConf jobconf, Properties env) { } String safeEnvVarName(String var) { - StringBuffer safe = new StringBuffer(); + StringBuilder safe = new StringBuilder(); int len = var.length(); for (int i = 0; i < len; i++) { char c = var.charAt(i); diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java index 023371ce99..ceac3fe623 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java @@ -291,7 +291,7 @@ void parseArgv() { LOG.warn("-file option is deprecated, please use generic option" + " -files instead."); - StringBuffer fileList = new StringBuffer(); + StringBuilder fileList = new StringBuilder(); for (String file : values) { packageFiles_.add(file); try { diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java index 974cdc7c8d..416aa07b99 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java @@ -23,9 +23,7 @@ import java.util.regex.*; import org.apache.hadoop.io.DataOutputBuffer; -import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Text; -import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.mapred.Reporter; diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java index 041d527ab1..0d330b7c25 100644 --- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java @@ -128,7 +128,7 @@ protected String[] genArgs() { } protected void checkOutput() throws IOException { - StringBuffer output = new StringBuffer(256); + StringBuilder output = new StringBuilder(256); Path[] fileList = FileUtil.stat2Paths(fileSys.listStatus( new Path(OUTPUT_DIR))); for (int i = 0; i < fileList.length; i++){ diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/UtilTest.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/UtilTest.java index 2378c7b414..af9056c923 100644 --- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/UtilTest.java +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/UtilTest.java @@ -86,7 +86,7 @@ void redirectIfAntJunit() throws IOException } public static String collate(List args, String sep) { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); Iterator it = args.iterator(); while (it.hasNext()) { if (buf.length() > 0) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntityGroupId.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntityGroupId.java index 710a1345dc..7acc666053 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntityGroupId.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timeline/TimelineEntityGroupId.java @@ -144,7 +144,7 @@ public String toString() { public static TimelineEntityGroupId fromString(String timelineEntityGroupIdStr) { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); Iterator it = SPLITTER.split(timelineEntityGroupIdStr).iterator(); if (!it.next().equals(TIMELINE_ENTITY_GROUPID_STR_PREFIX)) { throw new IllegalArgumentException( diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/resource/PlacementConstraint.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/resource/PlacementConstraint.java index 79196fbf85..bb87215a46 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/resource/PlacementConstraint.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/resource/PlacementConstraint.java @@ -413,7 +413,7 @@ public boolean equals(Object o) { @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); if (TargetType.ALLOCATION_TAG == this.targetType) { // following by a comma separated tags sb.append(String.join(",", getTargetValues())); @@ -643,7 +643,7 @@ public int hashCode() { @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append("cardinality").append(",").append(getScope()).append(","); for (String tag : getAllocationTags()) { sb.append(tag).append(","); @@ -717,7 +717,7 @@ public T accept(Visitor visitor) { @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append("and("); Iterator it = getChildren().iterator(); while (it.hasNext()) { @@ -759,7 +759,7 @@ public T accept(Visitor visitor) { @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append("or("); Iterator it = getChildren().iterator(); while (it.hasNext()) { @@ -805,7 +805,7 @@ public T accept(Visitor visitor) { @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append("DelayedOr("); Iterator it = getChildren().iterator(); while (it.hasNext()) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java index 6ec8549be9..d9b16cfa21 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestLogsCLI.java @@ -1491,7 +1491,7 @@ public void testSaveContainerLogsLocally() throws Exception { private String readContainerContent(Path containerPath, FileSystem fs) throws IOException { assertTrue(fs.exists(containerPath)); - StringBuffer inputLine = new StringBuffer(); + StringBuilder inputLine = new StringBuilder(); try (BufferedReader reader = new BufferedReader(new InputStreamReader( fs.open(containerPath)))) { String tmp; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java index ed48a92a04..f347e928fa 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java @@ -568,7 +568,7 @@ private static ProcessInfo constructProcessInfo(ProcessInfo pinfo, */ @Override public String toString() { - StringBuffer pTree = new StringBuffer("[ "); + StringBuilder pTree = new StringBuilder("[ "); for (String p : processTree.keySet()) { pTree.append(p); pTree.append(" "); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java index 56d9f25710..349c3e0518 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/JQueryUI.java @@ -130,7 +130,7 @@ protected void initDataTables(List list) { } // for inserting stateSaveInit int pos = init.indexOf('{') + 1; - init = new StringBuffer(init).insert(pos, stateSaveInit).toString(); + init = new StringBuilder(init).insert(pos, stateSaveInit).toString(); list.add(join(id, "DataTable = $('#", id, "').dataTable(", init, ").fnSetFilteringDelay(188);")); String postInit = $(postInitID(DATATABLES, id)); @@ -146,7 +146,7 @@ protected void initDataTables(List list) { init = defaultInit; } int pos = init.indexOf('{') + 1; - init = new StringBuffer(init).insert(pos, stateSaveInit).toString(); + init = new StringBuilder(init).insert(pos, stateSaveInit).toString(); list.add(join(" $('", escapeEcmaScript(selector), "').dataTable(", init, ").fnSetFilteringDelay(288);")); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java index 007721f2ec..054e751ff6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java @@ -401,7 +401,7 @@ void testContainerLogsFileAccess() throws IOException { new BufferedReader(new FileReader(new File(remoteAppLogFile .toUri().getRawPath()))); String line; - StringBuffer sb = new StringBuffer(""); + StringBuilder sb = new StringBuilder(""); while ((line = in.readLine()) != null) { LOG.info(line); sb.append(line); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/sql/FederationQueryRunner.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/sql/FederationQueryRunner.java index 4ff56eef01..7db32701e6 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/sql/FederationQueryRunner.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/sql/FederationQueryRunner.java @@ -181,7 +181,7 @@ protected void rethrow(SQLException cause, String sql, Object... params) causeMessage = ""; } - StringBuffer msg = new StringBuffer(causeMessage); + StringBuilder msg = new StringBuilder(causeMessage); msg.append(" Query: "); msg.append(sql); msg.append(" Parameters: "); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/deletion/task/DockerContainerDeletionTask.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/deletion/task/DockerContainerDeletionTask.java index 8882b45544..9de365a128 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/deletion/task/DockerContainerDeletionTask.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/deletion/task/DockerContainerDeletionTask.java @@ -65,7 +65,7 @@ public void run() { */ @Override public String toString() { - StringBuffer sb = new StringBuffer("DockerContainerDeletionTask : "); + StringBuilder sb = new StringBuilder("DockerContainerDeletionTask : "); sb.append(" id : ").append(this.getTaskId()); sb.append(" containerId : ").append(this.containerId); return sb.toString().trim(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/privileged/PrivilegedOperationExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/privileged/PrivilegedOperationExecutor.java index d3e96d8d5a..c648808efa 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/privileged/PrivilegedOperationExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/privileged/PrivilegedOperationExecutor.java @@ -281,7 +281,7 @@ public IOStreamPair executePrivilegedInteractiveOperation( return null; } - StringBuffer finalOpArg = new StringBuffer(PrivilegedOperation + StringBuilder finalOpArg = new StringBuilder(PrivilegedOperation .CGROUP_ARG_PREFIX); boolean noTasks = true; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkPacketTaggingHandlerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkPacketTaggingHandlerImpl.java index efe07e0228..e861a9c542 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkPacketTaggingHandlerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/NetworkPacketTaggingHandlerImpl.java @@ -104,7 +104,7 @@ public List preStart(Container container) //executable. String tasksFile = cGroupsHandler.getPathForCGroupTasks( CGroupsHandler.CGroupController.NET_CLS, containerIdStr); - String opArg = new StringBuffer(PrivilegedOperation.CGROUP_ARG_PREFIX) + String opArg = new StringBuilder(PrivilegedOperation.CGROUP_ARG_PREFIX) .append(tasksFile).toString(); List ops = new ArrayList<>(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficControlBandwidthHandlerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficControlBandwidthHandlerImpl.java index efe9db35d8..67c32fd085 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficControlBandwidthHandlerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficControlBandwidthHandlerImpl.java @@ -101,7 +101,7 @@ public List bootstrap(Configuration configuration) containerBandwidthMbit = (int) Math.ceil((double) yarnBandwidthMbit / MAX_CONTAINER_COUNT); - StringBuffer logLine = new StringBuffer("strict mode is set to :") + StringBuilder logLine = new StringBuilder("strict mode is set to :") .append(strictMode).append(System.lineSeparator()); if (strictMode) { @@ -152,7 +152,7 @@ public List preStart(Container container) //executable. String tasksFile = cGroupsHandler.getPathForCGroupTasks( CGroupsHandler.CGroupController.NET_CLS, containerIdStr); - String opArg = new StringBuffer(PrivilegedOperation.CGROUP_ARG_PREFIX) + String opArg = new StringBuilder(PrivilegedOperation.CGROUP_ARG_PREFIX) .append(tasksFile).toString(); List ops = new ArrayList<>(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java index e5abca2826..51667786ad 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java @@ -225,7 +225,7 @@ private boolean checkIfAlreadyBootstrapped(String state) if (pattern.matcher(state).find()) { LOG.debug("Matched regex: {}", regex); } else { - String logLine = new StringBuffer("Failed to match regex: ") + String logLine = new StringBuilder("Failed to match regex: ") .append(regex).append(" Current state: ").append(state).toString(); LOG.warn(logLine); return false; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerCommand.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerCommand.java index 260c5b53f5..7859d5a89d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerCommand.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerCommand.java @@ -85,7 +85,7 @@ public Map> getDockerCommandWithArguments() { @Override public String toString() { - StringBuffer ret = new StringBuffer(this.command); + StringBuilder ret = new StringBuilder(this.command); for (Map.Entry> entry : commandArguments.entrySet()) { ret.append(" ").append(entry.getKey()); ret.append("=").append(StringUtils.join(",", entry.getValue())); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalCacheDirectoryManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalCacheDirectoryManager.java index 45746e786f..55f7df7c7a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalCacheDirectoryManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/LocalCacheDirectoryManager.java @@ -165,7 +165,7 @@ static String getRelativePath(int directoryNo) { String relativePath = ""; if (directoryNo > 0) { String tPath = Integer.toString(directoryNo - 1, DIRECTORIES_PER_LEVEL); - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); if (tPath.length() == 1) { sb.append(tPath.charAt(0)); } else { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/com/nvidia/NvidiaGPUPluginForRuntimeV2.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/com/nvidia/NvidiaGPUPluginForRuntimeV2.java index bf7958c0f7..5a0f801449 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/com/nvidia/NvidiaGPUPluginForRuntimeV2.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/com/nvidia/NvidiaGPUPluginForRuntimeV2.java @@ -172,7 +172,7 @@ public DeviceRuntimeSpec onDevicesAllocated(Set allocatedDevices, if (yarnRuntime == YarnRuntimeType.RUNTIME_DOCKER) { String nvidiaRuntime = "nvidia"; String nvidiaVisibleDevices = "NVIDIA_VISIBLE_DEVICES"; - StringBuffer gpuMinorNumbersSB = new StringBuffer(); + StringBuilder gpuMinorNumbersSB = new StringBuilder(); for (Device device : allocatedDevices) { gpuMinorNumbersSB.append(device.getMinorNumber() + ","); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMStateStoreService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMStateStoreService.java index aa56c6f247..ee41eab77e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMStateStoreService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/recovery/NMStateStoreService.java @@ -186,7 +186,7 @@ public void setLogDir(String logDir) { @Override public String toString() { - return new StringBuffer("Status: ").append(getStatus()) + return new StringBuilder("Status: ").append(getStatus()) .append(", Exit code: ").append(exitCode) .append(", Version: ").append(version) .append(", Start Time: ").append(startTime) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/privileged/TestPrivilegedOperationExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/privileged/TestPrivilegedOperationExecutor.java index c5b2e9711c..0823801f7f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/privileged/TestPrivilegedOperationExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/privileged/TestPrivilegedOperationExecutor.java @@ -213,7 +213,7 @@ public void testSquashCGroupOperationsWithValidOperations() { try { PrivilegedOperation op = PrivilegedOperationExecutor .squashCGroupOperations(ops); - String expected = new StringBuffer + String expected = new StringBuilder (PrivilegedOperation.CGROUP_ARG_PREFIX) .append(cGroupTasks1).append(PrivilegedOperation .LINUX_FILE_PATH_SEPARATOR) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestNetworkPacketTaggingHandlerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestNetworkPacketTaggingHandlerImpl.java index 74f6bff6af..e2d107d068 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestNetworkPacketTaggingHandlerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestNetworkPacketTaggingHandlerImpl.java @@ -70,7 +70,7 @@ public void setup() { privilegedOperationExecutorMock = mock(PrivilegedOperationExecutor.class); cGroupsHandlerMock = mock(CGroupsHandler.class); conf = new YarnConfiguration(); - tmpPath = new StringBuffer(System.getProperty("test.build.data")) + tmpPath = new StringBuilder(System.getProperty("test.build.data")) .append('/').append("hadoop.tmp.dir").toString(); containerIdMock = mock(ContainerId.class); containerMock = mock(Container.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestTrafficControlBandwidthHandlerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestTrafficControlBandwidthHandlerImpl.java index ee6b41e758..08f5ee390a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestTrafficControlBandwidthHandlerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestTrafficControlBandwidthHandlerImpl.java @@ -73,7 +73,7 @@ public void setup() { cGroupsHandlerMock = mock(CGroupsHandler.class); trafficControllerMock = mock(TrafficController.class); conf = new YarnConfiguration(); - tmpPath = new StringBuffer(System.getProperty("test.build.data")).append + tmpPath = new StringBuilder(System.getProperty("test.build.data")).append ('/').append("hadoop.tmp.dir").toString(); device = YarnConfiguration.DEFAULT_NM_NETWORK_RESOURCE_INTERFACE; containerIdMock = mock(ContainerId.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestTrafficController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestTrafficController.java index 33b8434c9a..fd77a953d1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestTrafficController.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestTrafficController.java @@ -96,7 +96,7 @@ public class TestTrafficController { public void setup() { privilegedOperationExecutorMock = mock(PrivilegedOperationExecutor.class); conf = new YarnConfiguration(); - tmpPath = new StringBuffer(System.getProperty("test.build.data")).append + tmpPath = new StringBuilder(System.getProperty("test.build.data")).append ('/').append("hadoop.tmp.dir").toString(); conf.set("hadoop.tmp.dir", tmpPath); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestDockerContainerRuntime.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestDockerContainerRuntime.java index c5f508778f..5950a137d2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestDockerContainerRuntime.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestDockerContainerRuntime.java @@ -173,7 +173,7 @@ public class TestDockerContainerRuntime { private String[] testCapabilities; private final String signalPid = "1234"; private final String tmpPath = - new StringBuffer(System.getProperty("test.build.data")) + new StringBuilder(System.getProperty("test.build.data")) .append('/').append("hadoop.tmp.dir").toString(); private static final String RUNTIME_TYPE = "DOCKER"; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestHdfsManifestToResourcesPlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestHdfsManifestToResourcesPlugin.java index aa4005e4db..735f19fbab 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestHdfsManifestToResourcesPlugin.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestHdfsManifestToResourcesPlugin.java @@ -54,7 +54,7 @@ public class TestHdfsManifestToResourcesPlugin { private static final Logger LOG = LoggerFactory.getLogger(TestHdfsManifestToResourcesPlugin.class); private Configuration conf; - private String tmpPath = new StringBuffer( + private String tmpPath = new StringBuilder( System.getProperty("test.build.data")) .append('/').append("hadoop.tmp.dir").toString(); private static final String LAYER_MEDIA_TYPE = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestImageTagToManifestPlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestImageTagToManifestPlugin.java index 3c2a951597..9164b746b9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestImageTagToManifestPlugin.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestImageTagToManifestPlugin.java @@ -54,7 +54,7 @@ public class TestImageTagToManifestPlugin { private MockImageTagToManifestPlugin mockImageTagToManifestPlugin; private Configuration conf; private String tmpPath = - new StringBuffer(System.getProperty("test.build.data")) + new StringBuilder(System.getProperty("test.build.data")) .append('/').append("hadoop.tmp.dir").toString(); private ObjectMapper mapper; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestRuncContainerRuntime.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestRuncContainerRuntime.java index 8a541bbe1a..66ed95e740 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestRuncContainerRuntime.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestRuncContainerRuntime.java @@ -168,7 +168,7 @@ public void setup() throws ContainerExecutionException { mockExecutor = Mockito .mock(PrivilegedOperationExecutor.class); mockCGroupsHandler = Mockito.mock(CGroupsHandler.class); - tmpPath = new StringBuffer(System.getProperty("test.build.data")) + tmpPath = new StringBuilder(System.getProperty("test.build.data")) .append('/').append("hadoop.tmp.dir").toString(); containerId = "container_e11_1518975676334_14532816_01_000001"; container = mock(Container.class); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/TestDockerCommandExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/TestDockerCommandExecutor.java index e5737d9246..93bedbf366 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/TestDockerCommandExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/TestDockerCommandExecutor.java @@ -85,7 +85,7 @@ public void setUp() throws Exception { mockExecutor = mock(PrivilegedOperationExecutor.class); mockCGroupsHandler = mock(CGroupsHandler.class); configuration = new Configuration(); - String tmpPath = new StringBuffer(System.getProperty("test.build.data")) + String tmpPath = new StringBuilder(System.getProperty("test.build.data")) .append('/').append("hadoop.tmp.dir").toString(); configuration.set("hadoop.tmp.dir", tmpPath); runtime = new DockerLinuxContainerRuntime(mockExecutor, mockCGroupsHandler); @@ -115,7 +115,7 @@ public Context createMockNMContext() { LocalDirsHandlerService localDirsHandler = mock(LocalDirsHandlerService.class); - String tmpPath = new StringBuffer(System.getProperty("test.build.data")) + String tmpPath = new StringBuilder(System.getProperty("test.build.data")) .append('/').append("hadoop.tmp.dir").toString(); ConcurrentMap containerMap = diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalCacheDirectoryManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalCacheDirectoryManager.java index 95cca2ca3b..e13ada2ac7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalCacheDirectoryManager.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalCacheDirectoryManager.java @@ -47,7 +47,7 @@ public void testHierarchicalSubDirectoryCreation() { // Testing path generation from "0" to "0/0/z/z" for (int i = 1; i <= 37 * 36 * 36; i++) { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); String num = Integer.toString(i - 1, 36); if (num.length() == 1) { sb.append(num.charAt(0)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/deviceframework/TestDevicePluginAdapter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/deviceframework/TestDevicePluginAdapter.java index 78d794e537..fc6bd3714a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/deviceframework/TestDevicePluginAdapter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/deviceframework/TestDevicePluginAdapter.java @@ -977,7 +977,7 @@ private DeviceRuntimeSpec generateSpec(String version, if (version.equals("v2")) { String nvidiaRuntime = "nvidia"; String nvidiaVisibleDevices = "NVIDIA_VISIBLE_DEVICES"; - StringBuffer gpuMinorNumbersSB = new StringBuffer(); + StringBuilder gpuMinorNumbersSB = new StringBuilder(); for (Device device : allocatedDevices) { gpuMinorNumbersSB.append(device.getMinorNumber() + ","); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/PlanQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/PlanQueue.java index 847c90e545..fc7b2a6aa1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/PlanQueue.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/PlanQueue.java @@ -72,7 +72,7 @@ public PlanQueue(CapacitySchedulerQueueContext queueContext, String queueName, updateQuotas(configuredUserLimit, configuredUserLimitFactor, maxAppsForReservation, configuredMaxAppsPerUserForReservation); - StringBuffer queueInfo = new StringBuffer(); + StringBuilder queueInfo = new StringBuilder(); queueInfo.append("Created Plan Queue: ").append(queueName) .append("\nwith capacity: [").append(super.getCapacity()) .append("]\nwith max capacity: [").append(super.getMaximumCapacity()) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java index a65775104e..62b6703567 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java @@ -600,7 +600,7 @@ private RMNode getRMNode(final String nodeId) { * @return The str String after escaping invalid xml characters. */ public static String escapeInvalidXMLCharacters(String str) { - StringBuffer out = new StringBuffer(); + StringBuilder out = new StringBuilder(); final int strlen = str.length(); final String substitute = "\uFFFD"; int idx = 0; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AllocationTagsInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AllocationTagsInfo.java index ee09aa2f03..c60f167318 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AllocationTagsInfo.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AllocationTagsInfo.java @@ -45,7 +45,7 @@ public void addAllocationTag(AllocationTagInfo info) { @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); Iterator it = allocationTagInfo.iterator(); while (it.hasNext()) { AllocationTagInfo current = it.next(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/constraint/TestPlacementProcessor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/constraint/TestPlacementProcessor.java index 837d78df8d..e7c1d0ca6d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/constraint/TestPlacementProcessor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/constraint/TestPlacementProcessor.java @@ -896,7 +896,7 @@ private static void printTags(Collection nodes, for (MockNM nm : nodes) { Map nmTags = atm .getAllocationTagsWithCount(nm.getNodeId()); - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); if (nmTags != null) { nmTags.forEach((tag, count) -> sb.append(tag + "(" + count + "),")); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/constraint/algorithm/TestCircularIterator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/constraint/algorithm/TestCircularIterator.java index 5ce76b0894..bb005a04ee 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/constraint/algorithm/TestCircularIterator.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/constraint/algorithm/TestCircularIterator.java @@ -35,7 +35,7 @@ public void testIteration() throws Exception { List list = Arrays.asList("a", "b", "c", "d"); CircularIterator ci = new CircularIterator<>(null, list.iterator(), list); - StringBuffer sb = new StringBuffer(""); + StringBuilder sb = new StringBuilder(""); while (ci.hasNext()) { sb.append(ci.next()); } @@ -44,7 +44,7 @@ public void testIteration() throws Exception { Iterator lIter = list.iterator(); lIter.next(); lIter.next(); - sb = new StringBuffer(""); + sb = new StringBuilder(""); ci = new CircularIterator<>(null, lIter, list); while (ci.hasNext()) { sb.append(ci.next()); @@ -55,7 +55,7 @@ public void testIteration() throws Exception { lIter.next(); lIter.next(); lIter.next(); - sb = new StringBuffer(""); + sb = new StringBuilder(""); ci = new CircularIterator<>("x", lIter, list); while (ci.hasNext()) { sb.append(ci.next()); @@ -65,7 +65,7 @@ public void testIteration() throws Exception { list = Arrays.asList("a"); lIter = list.iterator(); lIter.next(); - sb = new StringBuffer(""); + sb = new StringBuilder(""); ci = new CircularIterator<>("y", lIter, list); while (ci.hasNext()) { sb.append(ci.next()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestQueuePlacementPolicy.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestQueuePlacementPolicy.java index c6148cd2f3..5e8f60253c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestQueuePlacementPolicy.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/TestQueuePlacementPolicy.java @@ -99,7 +99,7 @@ public void cleanTest() { @Test public void testSpecifiedUserPolicy() throws Exception { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -118,7 +118,7 @@ public void testSpecifiedUserPolicy() throws Exception { @Test public void testNoCreate() throws Exception { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -144,7 +144,7 @@ public void testNoCreate() throws Exception { @Test public void testSpecifiedThenReject() throws Exception { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -160,7 +160,7 @@ public void testSpecifiedThenReject() throws Exception { @Test public void testOmittedTerminalRule() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -170,7 +170,7 @@ public void testOmittedTerminalRule() { @Test public void testTerminalRuleInMiddle() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -184,7 +184,7 @@ public void testTerminals() { // The default rule is no longer considered terminal when the create flag // is false. The throw now happens when configuring not when assigning the // application - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -197,7 +197,7 @@ public void testDefaultRuleWithQueueAttribute() throws Exception { // This test covers the use case where we would like default rule // to point to a different queue by default rather than root.default createQueue(FSQueueType.LEAF, "root.someDefaultQueue"); - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -212,7 +212,7 @@ public void testDefaultRuleWithQueueAttribute() throws Exception { @Test public void testNestedUserQueueParsingErrors() { // No nested rule specified in hierarchical user queue - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(""); @@ -220,7 +220,7 @@ public void testNestedUserQueueParsingErrors() { assertIfExceptionThrown(sb); // Specified nested rule is not a FSPlacementRule - sb = new StringBuffer(); + sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -230,7 +230,7 @@ public void testNestedUserQueueParsingErrors() { assertIfExceptionThrown(sb); // Parent rule is rule that cannot be one: reject or nestedUserQueue - sb = new StringBuffer(); + sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -241,7 +241,7 @@ public void testNestedUserQueueParsingErrors() { // If the parent rule does not have the create flag the nested rule is not // terminal - sb = new StringBuffer(); + sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -253,7 +253,7 @@ public void testNestedUserQueueParsingErrors() { @Test public void testMultipleParentRules() throws Exception { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -275,7 +275,7 @@ public void testMultipleParentRules() throws Exception { @Test public void testBrokenRules() throws Exception { // broken rule should fail configuring - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(""); @@ -283,7 +283,7 @@ public void testBrokenRules() throws Exception { assertIfExceptionThrown(sb); // policy without rules ignoring policy - sb = new StringBuffer(); + sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(""); @@ -291,7 +291,7 @@ public void testBrokenRules() throws Exception { createPolicy(sb.toString()); // broken rule should fail configuring - sb = new StringBuffer(); + sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -302,7 +302,7 @@ public void testBrokenRules() throws Exception { // parent rule not set to something known: no parent rule is required // required case is only for nestedUserQueue tested earlier - sb = new StringBuffer(); + sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -312,7 +312,7 @@ public void testBrokenRules() throws Exception { createPolicy(sb.toString()); } - private void assertIfExceptionThrown(StringBuffer sb) { + private void assertIfExceptionThrown(StringBuilder sb) { Throwable th = null; try { createPolicy(sb.toString()); @@ -336,7 +336,7 @@ private void assertIfExceptionThrown(String user) { @Test public void testNestedUserQueueParsing() throws Exception { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -349,7 +349,7 @@ public void testNestedUserQueueParsing() throws Exception { @Test public void testNestedUserQueuePrimaryGroup() throws Exception { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -380,7 +380,7 @@ public void testNestedUserQueuePrimaryGroup() throws Exception { @Test public void testNestedUserQueuePrimaryGroupNoCreate() throws Exception { // Primary group rule has create='false' - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -402,7 +402,7 @@ public void testNestedUserQueuePrimaryGroupNoCreate() throws Exception { assertEquals("root.user1group.user1", context.getQueue()); // Both Primary group and nestedUserQueue rule has create='false' - sb = new StringBuffer(); + sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -426,7 +426,7 @@ public void testNestedUserQueuePrimaryGroupNoCreate() throws Exception { @Test public void testNestedUserQueueSecondaryGroup() throws Exception { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -452,7 +452,7 @@ public void testNestedUserQueueSecondaryGroup() throws Exception { public void testNestedUserQueueSpecificRule() throws Exception { // This test covers the use case where users can specify different parent // queues and want user queues under those. - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -477,7 +477,7 @@ public void testNestedUserQueueSpecificRule() throws Exception { public void testNestedUserQueueDefaultRule() throws Exception { // This test covers the use case where we would like user queues to be // created under a default parent queue - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -492,7 +492,7 @@ public void testNestedUserQueueDefaultRule() throws Exception { // Same as above but now with the create flag false for the parent createQueue(FSQueueType.PARENT, "root.parent"); - sb = new StringBuffer(); + sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -509,7 +509,7 @@ public void testNestedUserQueueDefaultRule() throws Exception { // Parent queue returned is already a configured LEAF, should fail and the // context is null. createQueue(FSQueueType.LEAF, "root.parent"); - sb = new StringBuffer(); + sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -526,7 +526,7 @@ public void testNestedUserQueueDefaultRule() throws Exception { @Test public void testUserContainsPeriod() throws Exception { // This test covers the user case where the username contains periods. - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(""); @@ -535,7 +535,7 @@ public void testUserContainsPeriod() throws Exception { context = placementManager.placeApplication(asc, "first.last"); assertEquals("root.first_dot_last", context.getQueue()); - sb = new StringBuffer(); + sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -553,7 +553,7 @@ public void testUserContainsPeriod() throws Exception { @Test public void testGroupContainsPeriod() throws Exception { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -580,7 +580,7 @@ public void testGroupContainsPeriod() throws Exception { @Test public void testEmptyGroupsPrimaryGroupRule() throws Exception { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); @@ -596,7 +596,7 @@ public void testEmptyGroupsPrimaryGroupRule() throws Exception { @Test public void testSpecifiedQueueWithSpaces() throws Exception { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append(""); sb.append(" "); sb.append(" "); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/webapp/FederationInterceptorREST.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/webapp/FederationInterceptorREST.java index 725211ef1f..f0b06fc736 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/webapp/FederationInterceptorREST.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/webapp/FederationInterceptorREST.java @@ -1906,7 +1906,7 @@ public Response addToClusterNodeLabels(NodeLabelsInfo newNodeLabels, ClientMethod remoteMethod = new ClientMethod("addToClusterNodeLabels", argsClasses, args); Map responseInfoMap = invokeConcurrent(subClustersActives, remoteMethod, Response.class); - StringBuffer buffer = new StringBuffer(); + StringBuilder buffer = new StringBuilder(); // SubCluster-0:SUCCESS,SubCluster-1:SUCCESS responseInfoMap.forEach((subClusterInfo, response) -> buildAppendMsg(subClusterInfo, buffer, response)); @@ -1964,7 +1964,7 @@ public Response removeFromClusterNodeLabels(Set oldNodeLabels, new ClientMethod("removeFromClusterNodeLabels", argsClasses, args); Map responseInfoMap = invokeConcurrent(subClustersActives, remoteMethod, Response.class); - StringBuffer buffer = new StringBuffer(); + StringBuilder buffer = new StringBuilder(); // SubCluster-0:SUCCESS,SubCluster-1:SUCCESS responseInfoMap.forEach((subClusterInfo, response) -> buildAppendMsg(subClusterInfo, buffer, response)); @@ -1993,10 +1993,10 @@ public Response removeFromClusterNodeLabels(Set oldNodeLabels, * Build Append information. * * @param subClusterInfo subCluster information. - * @param buffer StringBuffer. + * @param buffer StringBuilder. * @param response response message. */ - private void buildAppendMsg(SubClusterInfo subClusterInfo, StringBuffer buffer, + private void buildAppendMsg(SubClusterInfo subClusterInfo, StringBuilder buffer, Response response) { SubClusterId subClusterId = subClusterInfo.getSubClusterId(); String state = response != null &&