diff --git a/CHANGES.txt b/CHANGES.txt index a6ca9c4a05..e2bcbc714e 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -199,6 +199,9 @@ Trunk (unreleased changes) HADOOP-3659. Patch to allow hadoop native to compile on Mac OS X. (Colin Evans and Allen Wittenauer via tomwhite) + HADOOP-6471. StringBuffer -> StringBuilder - conversion of references + as necessary. (Kay Kay via tomwhite) + OPTIMIZATIONS HADOOP-6467. Improve the performance on HarFileSystem.listStatus(..). diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Anonymizer.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Anonymizer.java index a0c024754b..0b25a0d3d5 100644 --- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Anonymizer.java +++ b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Anonymizer.java @@ -136,7 +136,7 @@ public static String getMD5Hash(String text) { } private static String convertToHex(byte[] data) { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); for (int i = 0; i < data.length; i++) { int halfbyte = (data[i] >>> 4) & 0x0F; int two_halfs = 0; diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/CPUParser.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/CPUParser.java index dce94439a7..5b424a218a 100644 --- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/CPUParser.java +++ b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/CPUParser.java @@ -46,7 +46,7 @@ public CPUParser() { * @return the EventRecord created */ public EventRecord query(String s) throws Exception { - StringBuffer sb = Environment.runCommand("cat /proc/cpuinfo"); + CharSequence sb = Environment.runCommandGeneric("cat /proc/cpuinfo"); EventRecord retval = new EventRecord(InetAddress.getLocalHost() .getCanonicalHostName(), InetAddress.getAllByName(InetAddress.getLocalHost() .getHostName()), Calendar.getInstance(), "CPU", "Unknown", "CPU", "-"); diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Environment.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Environment.java index dc113949d5..5478dbd522 100644 --- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Environment.java +++ b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Environment.java @@ -247,7 +247,7 @@ public static ArrayList getJobs() { if (!file_present) if (superuser) { - StringBuffer sb = runCommand("sudo smartctl -i " + devices[i]); + CharSequence sb = runCommandGeneric("sudo smartctl -i " + devices[i]); String patternStr = "[(failed)(device not supported)]"; Pattern pattern = Pattern.compile(patternStr); Matcher matcher = pattern.matcher(sb.toString()); @@ -263,7 +263,7 @@ public static ArrayList getJobs() { } // now remove disks that dont exist - StringBuffer resetSB = new StringBuffer(); + StringBuilder resetSB = new StringBuilder(); for (int j = 0; j < devices.length; j++) { resetSB.append(devices[j] == null ? "" : devices[j] + ", "); if (devices[j] != null) @@ -323,7 +323,7 @@ public static int getInterval(ArrayList monitors) { * @return true, if the command is availble, false otherwise */ public static boolean checkExistence(String cmd) { - StringBuffer sb = runCommand("which " + cmd); + CharSequence sb = runCommandGeneric("which " + cmd); if (sb.length() > 1) return true; @@ -331,15 +331,30 @@ public static boolean checkExistence(String cmd) { } /** - * Runs a shell command in the system and provides a StringBuffer + * Runs a shell command in the system and provides a StringBuilder + * with the output of the command. + *

This method is deprecated. See related method that returns a CharSequence as oppposed to a StringBuffer. + * + * @param cmd an array of string that form the command to run + * + * @return a text that contains the output of the command + * @see #runCommandGeneric(String[]) + * @deprecated + */ + public static StringBuffer runCommand(String[] cmd) { + return new StringBuffer(runCommandGeneric(cmd)); + } + + /** + * Runs a shell command in the system and provides a StringBuilder * with the output of the command. * * @param cmd an array of string that form the command to run * - * @return a StringBuffer that contains the output of the command + * @return a text that contains the output of the command */ - public static StringBuffer runCommand(String[] cmd) { - StringBuffer retval = new StringBuffer(MAX_OUTPUT_LENGTH); + public static CharSequence runCommandGeneric(String[] cmd) { + StringBuilder retval = new StringBuilder(MAX_OUTPUT_LENGTH); Process p; try { p = Runtime.getRuntime().exec(cmd); @@ -356,19 +371,32 @@ public static StringBuffer runCommand(String[] cmd) { return retval; } + + /** + * Runs a shell command in the system and provides a StringBuilder + * with the output of the command. + *

This method is deprecated in favor of the one that returns CharSequence as opposed to StringBuffer + * @param cmd the command to run + * + * @return a text that contains the output of the command + * @see #runCommandGeneric(String) + * @deprecated + */ + public static StringBuffer runCommand(String cmd) { + return new StringBuffer(runCommandGeneric(cmd)); + } /** - * Runs a shell command in the system and provides a StringBuffer + * Runs a shell command in the system and provides a StringBuilder * with the output of the command. * * @param cmd the command to run * - * @return a StringBuffer that contains the output of the command + * @return a text that contains the output of the command */ - public static StringBuffer runCommand(String cmd) { - return runCommand(cmd.split("\\s+")); - } - + public static CharSequence runCommandGeneric(String cmd) { + return runCommandGeneric(cmd.split("\\s+")); + } /** * Determines the greatest common divisor (GCD) of two integers. * diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HadoopLogParser.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HadoopLogParser.java index b438976960..e30400338b 100644 --- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HadoopLogParser.java +++ b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HadoopLogParser.java @@ -113,7 +113,7 @@ protected Calendar parseDate(String strDate, String strTime) { * */ private void findHostname() { - String startupInfo = Environment.runCommand( + String startupInfo = Environment.runCommandGeneric( "grep --max-count=1 STARTUP_MSG:\\s*host " + file.getName()).toString(); Pattern pattern = Pattern.compile("\\s+(\\w+/.+)\\s+"); Matcher matcher = pattern.matcher(startupInfo); diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LocalStore.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LocalStore.java index aa39a03261..4785874eb0 100644 --- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LocalStore.java +++ b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LocalStore.java @@ -144,11 +144,25 @@ private void append(SerializedRecord sr) { /** * Pack a SerializedRecord into an array of bytes - * + *

+ * This method is deprecated. * @param sr the SerializedRecord to be packed + * @return Packed representation fo the Serialized Record + * @see #packConcurrent(SerializedRecord) + * @deprecated */ public static StringBuffer pack(SerializedRecord sr) { - StringBuffer sb = new StringBuffer(); + return new StringBuffer(packConcurrent(sr)); + } + + /** + * Pack a SerializedRecord into an array of bytes + * + * @param sr the SerializedRecord to be packed + * @return Packed representation fo the Serialized Record + */ + public static CharSequence packConcurrent(SerializedRecord sr) { + StringBuilder sb = new StringBuilder(); ArrayList keys = new ArrayList(sr.fields.keySet()); @@ -162,7 +176,7 @@ public static StringBuffer pack(SerializedRecord sr) { } return sb; } - + /** * Upload the local file store into HDFS, after it * compressing it. Then a new local file is created diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/NICParser.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/NICParser.java index 4d066e8b56..90b4c2f7c5 100644 --- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/NICParser.java +++ b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/NICParser.java @@ -54,7 +54,7 @@ public NICParser() { * @return the EventRecord created */ public EventRecord query(String device) throws UnknownHostException { - StringBuffer sb = Environment.runCommand("/sbin/ifconfig " + device); + CharSequence sb = Environment.runCommandGeneric("/sbin/ifconfig " + device); EventRecord retval = new EventRecord(InetAddress.getLocalHost() .getCanonicalHostName(), InetAddress.getAllByName(InetAddress.getLocalHost() .getHostName()), Calendar.getInstance(), "NIC", "Unknown", device, "-"); diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SMARTParser.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SMARTParser.java index f50ea1da55..82e970c7b4 100644 --- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SMARTParser.java +++ b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SMARTParser.java @@ -66,12 +66,12 @@ public SMARTParser() { */ public EventRecord query(String device) throws Exception { String conf = Environment.getProperty("disks." + device + ".source"); - StringBuffer sb; + CharSequence sb; if (conf == null) - sb = Environment.runCommand("sudo smartctl --all " + device); + sb = Environment.runCommandGeneric("sudo smartctl --all " + device); else - sb = Environment.runCommand("cat " + conf); + sb = Environment.runCommandGeneric("cat " + conf); EventRecord retval = new EventRecord(InetAddress.getLocalHost() .getCanonicalHostName(), InetAddress.getAllByName(InetAddress.getLocalHost() @@ -146,11 +146,11 @@ public EventRecord query(String device) throws Exception { * This format is mostly found in IDE and SATA disks. * * @param er the EventRecord in which to store attributes found - * @param sb the StringBuffer with the text to parse + * @param sb the text to parse * * @return the EventRecord in which new attributes are stored. */ - private EventRecord readColumns(EventRecord er, StringBuffer sb) { + private EventRecord readColumns(EventRecord er, CharSequence sb) { Pattern pattern = Pattern.compile("^\\s{0,2}(\\d{1,3}\\s+.*)$", Pattern.MULTILINE); diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SensorsParser.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SensorsParser.java index fc3d87d236..0d0498bc00 100644 --- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SensorsParser.java +++ b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SensorsParser.java @@ -42,10 +42,10 @@ public class SensorsParser extends ShellParser { * @return the EventRecord created */ public EventRecord query(String s) throws Exception { - StringBuffer sb; + CharSequence sb; - //sb = Environment.runCommand("sensors -A"); - sb = Environment.runCommand("cat sensors.out"); + //sb = Environment.runCommandGeneric("sensors -A"); + sb = Environment.runCommandGeneric("cat sensors.out"); EventRecord retval = new EventRecord(InetAddress.getLocalHost() .getCanonicalHostName(), InetAddress.getAllByName(InetAddress.getLocalHost() @@ -70,7 +70,7 @@ public EventRecord query(String s) throws Exception { * * @return the EventRecord created */ - private EventRecord readGroup(EventRecord er, StringBuffer sb, String prefix) { + private EventRecord readGroup(EventRecord er, CharSequence sb, String prefix) { Pattern pattern = Pattern.compile(".*(" + prefix + "\\s*\\d*)\\s*:\\s*(\\+?\\d+)", Pattern.MULTILINE); diff --git a/src/java/org/apache/hadoop/conf/Configuration.java b/src/java/org/apache/hadoop/conf/Configuration.java index e75c709400..714c6ff427 100644 --- a/src/java/org/apache/hadoop/conf/Configuration.java +++ b/src/java/org/apache/hadoop/conf/Configuration.java @@ -934,7 +934,7 @@ public boolean isIncluded(int value) { @Override public String toString() { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); boolean first = true; for(Range r: ranges) { if (first) { @@ -1846,7 +1846,7 @@ public void setClassLoader(ClassLoader classLoader) { @Override public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append("Configuration: "); if(loadDefaults) { toString(defaultResources, sb); @@ -1858,8 +1858,8 @@ public String toString() { return sb.toString(); } - private void toString(List resources, StringBuffer sb) { - ListIterator i = resources.listIterator(); + private void toString(List resources, StringBuilder sb) { + ListIterator i = resources.listIterator(); while (i.hasNext()) { if (i.nextIndex() != 0) { sb.append(", "); diff --git a/src/java/org/apache/hadoop/fs/FileUtil.java b/src/java/org/apache/hadoop/fs/FileUtil.java index c4952564e3..940cd1cebe 100644 --- a/src/java/org/apache/hadoop/fs/FileUtil.java +++ b/src/java/org/apache/hadoop/fs/FileUtil.java @@ -166,7 +166,7 @@ public static boolean copy(FileSystem srcFS, Path[] srcs, throws IOException { boolean gotException = false; boolean returnVal = true; - StringBuffer exceptions = new StringBuffer(); + StringBuilder exceptions = new StringBuilder(); if (srcs.length == 1) return copy(srcFS, srcs[0], dstFS, dst, deleteSource, overwrite, conf); @@ -529,7 +529,7 @@ public static void unTar(File inFile, File untarDir) throws IOException { } } - StringBuffer untarCommand = new StringBuffer(); + StringBuilder untarCommand = new StringBuilder(); boolean gzipped = inFile.toString().endsWith("gz"); if (gzipped) { untarCommand.append(" gzip -dc '"); @@ -745,7 +745,7 @@ public static int chmod(String filename, String perm */ public static int chmod(String filename, String perm, boolean recursive) throws IOException, InterruptedException { - StringBuffer cmdBuf = new StringBuffer(); + StringBuilder cmdBuf = new StringBuilder(); cmdBuf.append("chmod "); if (recursive) { cmdBuf.append("-R "); diff --git a/src/java/org/apache/hadoop/fs/Path.java b/src/java/org/apache/hadoop/fs/Path.java index e6702aa8e5..02d3475977 100644 --- a/src/java/org/apache/hadoop/fs/Path.java +++ b/src/java/org/apache/hadoop/fs/Path.java @@ -238,7 +238,7 @@ public Path suffix(String suffix) { public String toString() { // we can't use uri.toString(), which escapes everything, because we want // illegal characters unescaped in the string, for glob processing, etc. - StringBuffer buffer = new StringBuffer(); + StringBuilder buffer = new StringBuilder(); if (uri.getScheme() != null) { buffer.append(uri.getScheme()); buffer.append(":"); diff --git a/src/java/org/apache/hadoop/io/BytesWritable.java b/src/java/org/apache/hadoop/io/BytesWritable.java index 9f6bbe0e46..2ff12f86c7 100644 --- a/src/java/org/apache/hadoop/io/BytesWritable.java +++ b/src/java/org/apache/hadoop/io/BytesWritable.java @@ -177,7 +177,7 @@ public boolean equals(Object right_obj) { * Generate the stream of bytes as hex pairs separated by ' '. */ public String toString() { - StringBuffer sb = new StringBuffer(3*size); + StringBuilder sb = new StringBuilder(3*size); for (int idx = 0; idx < size; idx++) { // if not the first, put a blank separator in if (idx != 0) { diff --git a/src/java/org/apache/hadoop/io/MD5Hash.java b/src/java/org/apache/hadoop/io/MD5Hash.java index a28c3ae20a..9692e93927 100644 --- a/src/java/org/apache/hadoop/io/MD5Hash.java +++ b/src/java/org/apache/hadoop/io/MD5Hash.java @@ -183,7 +183,7 @@ public int compare(byte[] b1, int s1, int l1, /** Returns a string representation of this object. */ public String toString() { - StringBuffer buf = new StringBuffer(MD5_LEN*2); + StringBuilder buf = new StringBuilder(MD5_LEN*2); for (int i = 0; i < MD5_LEN; i++) { int b = digest[i]; buf.append(HEX_DIGITS[(b >> 4) & 0xf]); diff --git a/src/java/org/apache/hadoop/io/SequenceFile.java b/src/java/org/apache/hadoop/io/SequenceFile.java index 1f49439ef4..5903884f28 100644 --- a/src/java/org/apache/hadoop/io/SequenceFile.java +++ b/src/java/org/apache/hadoop/io/SequenceFile.java @@ -780,7 +780,7 @@ public int hashCode() { } public String toString() { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append("size: ").append(this.theMetadata.size()).append("\n"); Iterator> iter = this.theMetadata.entrySet().iterator(); diff --git a/src/java/org/apache/hadoop/io/UTF8.java b/src/java/org/apache/hadoop/io/UTF8.java index 20b00b769c..32f0b1ed56 100644 --- a/src/java/org/apache/hadoop/io/UTF8.java +++ b/src/java/org/apache/hadoop/io/UTF8.java @@ -132,7 +132,7 @@ public int compareTo(Object o) { /** Convert to a String. */ public String toString() { - StringBuffer buffer = new StringBuffer(length); + StringBuilder buffer = new StringBuilder(length); try { synchronized (IBUF) { IBUF.reset(bytes, length); @@ -204,12 +204,12 @@ public static byte[] getBytes(String string) { */ public static String readString(DataInput in) throws IOException { int bytes = in.readUnsignedShort(); - StringBuffer buffer = new StringBuffer(bytes); + StringBuilder buffer = new StringBuilder(bytes); readChars(in, buffer, bytes); return buffer.toString(); } - private static void readChars(DataInput in, StringBuffer buffer, int nBytes) + private static void readChars(DataInput in, StringBuilder buffer, int nBytes) throws IOException { DataOutputBuffer obuf = OBUF_FACTORY.get(); obuf.reset(); diff --git a/src/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/src/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java index c893b39c9d..d436b626f7 100644 --- a/src/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java +++ b/src/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java @@ -42,14 +42,14 @@ public class CompressionCodecFactory { private void addCodec(CompressionCodec codec) { String suffix = codec.getDefaultExtension(); - codecs.put(new StringBuffer(suffix).reverse().toString(), codec); + codecs.put(new StringBuilder(suffix).reverse().toString(), codec); } /** * Print the extension map out as a string. */ public String toString() { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); Iterator> itr = codecs.entrySet().iterator(); buf.append("{ "); @@ -112,7 +112,7 @@ public static List> getCodecClasses(Configurat */ public static void setCodecClasses(Configuration conf, List classes) { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); Iterator itr = classes.iterator(); if (itr.hasNext()) { Class cls = itr.next(); @@ -154,7 +154,7 @@ public CompressionCodec getCodec(Path file) { CompressionCodec result = null; if (codecs != null) { String filename = file.getName(); - String reversedFilename = new StringBuffer(filename).reverse().toString(); + String reversedFilename = new StringBuilder(filename).reverse().toString(); SortedMap subMap = codecs.headMap(reversedFilename); if (!subMap.isEmpty()) { diff --git a/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java b/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java index ae34f1014c..518625e604 100644 --- a/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java +++ b/src/java/org/apache/hadoop/ipc/WritableRpcEngine.java @@ -90,7 +90,7 @@ public void write(DataOutput out) throws IOException { } public String toString() { - StringBuffer buffer = new StringBuffer(); + StringBuilder buffer = new StringBuilder(); buffer.append(methodName); buffer.append("("); for (int i = 0; i < parameters.length; i++) { diff --git a/src/java/org/apache/hadoop/net/NetworkTopology.java b/src/java/org/apache/hadoop/net/NetworkTopology.java index 1de588bd43..22ab8cfca4 100644 --- a/src/java/org/apache/hadoop/net/NetworkTopology.java +++ b/src/java/org/apache/hadoop/net/NetworkTopology.java @@ -580,7 +580,7 @@ public int countNumOfAvailableNodes(String scope, /** convert a network tree to a string */ public String toString() { // print the number of racks - StringBuffer tree = new StringBuffer(); + StringBuilder tree = new StringBuilder(); tree.append("Number of racks: "); tree.append(numOfRacks); tree.append("\n"); diff --git a/src/java/org/apache/hadoop/net/ScriptBasedMapping.java b/src/java/org/apache/hadoop/net/ScriptBasedMapping.java index a000bfaf7e..a181d4cc6f 100644 --- a/src/java/org/apache/hadoop/net/ScriptBasedMapping.java +++ b/src/java/org/apache/hadoop/net/ScriptBasedMapping.java @@ -124,7 +124,7 @@ private String runResolveCommand(List args) { if (args.size() == 0) { return null; } - StringBuffer allOutput = new StringBuffer(); + StringBuilder allOutput = new StringBuilder(); int numProcessed = 0; if (maxArgs < MIN_ALLOWABLE_ARGS) { LOG.warn("Invalid value " + Integer.toString(maxArgs) diff --git a/src/java/org/apache/hadoop/record/Buffer.java b/src/java/org/apache/hadoop/record/Buffer.java index 5ad8dc8e7b..563dc15e1c 100644 --- a/src/java/org/apache/hadoop/record/Buffer.java +++ b/src/java/org/apache/hadoop/record/Buffer.java @@ -221,7 +221,7 @@ public boolean equals(Object other) { // inheric javadoc public String toString() { - StringBuffer sb = new StringBuffer(2*count); + StringBuilder sb = new StringBuilder(2*count); for(int idx = 0; idx < count; idx++) { sb.append(Character.forDigit((bytes[idx] & 0xF0) >> 4, 16)); sb.append(Character.forDigit(bytes[idx] & 0x0F, 16)); diff --git a/src/java/org/apache/hadoop/record/CsvRecordInput.java b/src/java/org/apache/hadoop/record/CsvRecordInput.java index 45141ae5d1..6e8a00c617 100644 --- a/src/java/org/apache/hadoop/record/CsvRecordInput.java +++ b/src/java/org/apache/hadoop/record/CsvRecordInput.java @@ -51,7 +51,7 @@ private void throwExceptionOnError(String tag) throws IOException { private String readField(String tag) throws IOException { try { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); while (true) { char c = (char) stream.read(); switch (c) { diff --git a/src/java/org/apache/hadoop/record/Utils.java b/src/java/org/apache/hadoop/record/Utils.java index a365719762..1f51079d29 100644 --- a/src/java/org/apache/hadoop/record/Utils.java +++ b/src/java/org/apache/hadoop/record/Utils.java @@ -45,7 +45,7 @@ private Utils() { * @return */ static String toXMLString(String s) { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (int idx = 0; idx < s.length(); idx++) { char ch = s.charAt(idx); if (ch == '<') { @@ -86,7 +86,7 @@ static private int h2c(char ch) { * @return */ static String fromXMLString(String s) { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (int idx = 0; idx < s.length();) { char ch = s.charAt(idx++); if (ch == '%') { @@ -109,7 +109,7 @@ static String fromXMLString(String s) { * @return */ static String toCSVString(String s) { - StringBuffer sb = new StringBuffer(s.length()+1); + StringBuilder sb = new StringBuilder(s.length()+1); sb.append('\''); int len = s.length(); for (int i = 0; i < len; i++) { @@ -151,7 +151,7 @@ static String fromCSVString(String s) throws IOException { throw new IOException("Error deserializing string."); } int len = s.length(); - StringBuffer sb = new StringBuffer(len-1); + StringBuilder sb = new StringBuilder(len-1); for (int i = 1; i < len; i++) { char c = s.charAt(i); if (c == '%') { @@ -214,7 +214,7 @@ static Buffer fromXMLBuffer(String s) * @return */ static String toCSVBuffer(Buffer buf) { - StringBuffer sb = new StringBuffer("#"); + StringBuilder sb = new StringBuilder("#"); sb.append(buf.toString()); return sb.toString(); } diff --git a/src/java/org/apache/hadoop/record/XmlRecordOutput.java b/src/java/org/apache/hadoop/record/XmlRecordOutput.java index 79a63d29bd..a532baecca 100644 --- a/src/java/org/apache/hadoop/record/XmlRecordOutput.java +++ b/src/java/org/apache/hadoop/record/XmlRecordOutput.java @@ -41,7 +41,7 @@ public class XmlRecordOutput implements RecordOutput { private Stack compoundStack; private void putIndent() { - StringBuffer sb = new StringBuffer(""); + StringBuilder sb = new StringBuilder(""); for (int idx = 0; idx < indent; idx++) { sb.append(" "); } diff --git a/src/java/org/apache/hadoop/record/compiler/JRecord.java b/src/java/org/apache/hadoop/record/compiler/JRecord.java index c68438cde2..08c60a9cf3 100644 --- a/src/java/org/apache/hadoop/record/compiler/JRecord.java +++ b/src/java/org/apache/hadoop/record/compiler/JRecord.java @@ -782,7 +782,7 @@ public JRecord(String name, ArrayList> flist) { // precompute signature int idx = name.lastIndexOf('.'); String recName = name.substring(idx+1); - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); sb.append("L").append(recName).append("("); for (Iterator> i = flist.iterator(); i.hasNext();) { String s = i.next().getType().getSignature(); diff --git a/src/java/org/apache/hadoop/util/Progress.java b/src/java/org/apache/hadoop/util/Progress.java index e6a2e0c5e3..d4e172016a 100644 --- a/src/java/org/apache/hadoop/util/Progress.java +++ b/src/java/org/apache/hadoop/util/Progress.java @@ -211,12 +211,12 @@ public synchronized void setStatus(String status) { } public String toString() { - StringBuffer result = new StringBuffer(); + StringBuilder result = new StringBuilder(); toString(result); return result.toString(); } - private synchronized void toString(StringBuffer buffer) { + private synchronized void toString(StringBuilder buffer) { buffer.append(status); if (phases.size() != 0 && currentPhase < phases.size()) { buffer.append(" > "); diff --git a/src/java/org/apache/hadoop/util/StringUtils.java b/src/java/org/apache/hadoop/util/StringUtils.java index 12d5ae5841..086098b43c 100644 --- a/src/java/org/apache/hadoop/util/StringUtils.java +++ b/src/java/org/apache/hadoop/util/StringUtils.java @@ -128,7 +128,7 @@ public static String formatPercent(double done, int digits) { public static String arrayToString(String[] strs) { if (strs.length == 0) { return ""; } - StringBuffer sbuf = new StringBuffer(); + StringBuilder sbuf = new StringBuilder(); sbuf.append(strs[0]); for (int idx = 1; idx < strs.length; idx++) { sbuf.append(","); @@ -183,7 +183,7 @@ public static String uriToString(URI[] uris){ if (uris == null) { return null; } - StringBuffer ret = new StringBuffer(uris[0].toString()); + StringBuilder ret = new StringBuilder(uris[0].toString()); for(int i = 1; i < uris.length;i++){ ret.append(","); ret.append(uris[i].toString()); @@ -247,7 +247,7 @@ public static String formatTimeDiff(long finishTime, long startTime){ * @param timeDiff The time difference to format */ public static String formatTime(long timeDiff){ - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); long hours = timeDiff / (60*60*1000); long rem = (timeDiff % (60*60*1000)); long minutes = rem / (60*1000); @@ -279,7 +279,7 @@ public static String formatTime(long timeDiff){ */ public static String getFormattedTimeWithDiff(DateFormat dateFormat, long finishTime, long startTime){ - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); if (0 != finishTime) { buf.append(dateFormat.format(new Date(finishTime))); if (0 != startTime){ @@ -533,7 +533,7 @@ public static String unEscapeString(String str, char escapeChar, * @return a message for logging */ private static String toStartupShutdownString(String prefix, String [] msg) { - StringBuffer b = new StringBuffer(prefix); + StringBuilder b = new StringBuilder(prefix); b.append("\n/************************************************************"); for(String s : msg) b.append("\n" + prefix + s); @@ -645,7 +645,7 @@ public static String escapeHTML(String string) { if(string == null) { return null; } - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); boolean lastCharacterWasSpace = false; char[] chars = string.toCharArray(); for(char c : chars) { @@ -708,7 +708,7 @@ public static synchronized String limitDecimalTo2(double d) { * @param strings Strings to join. */ public static String join(CharSequence separator, Iterable strings) { - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); boolean first = true; for (String s : strings) { if (first) { diff --git a/src/test/core/org/apache/hadoop/io/RandomDatum.java b/src/test/core/org/apache/hadoop/io/RandomDatum.java index ab8f34feba..5a4bbc0bcd 100644 --- a/src/test/core/org/apache/hadoop/io/RandomDatum.java +++ b/src/test/core/org/apache/hadoop/io/RandomDatum.java @@ -64,7 +64,7 @@ public boolean equals(Object o) { /** Returns a string representation of this object. */ public String toString() { - StringBuffer buf = new StringBuffer(length*2); + StringBuilder buf = new StringBuilder(length*2); for (int i = 0; i < length; i++) { int b = data[i]; buf.append(HEX_DIGITS[(b >> 4) & 0xf]); diff --git a/src/test/core/org/apache/hadoop/io/TestText.java b/src/test/core/org/apache/hadoop/io/TestText.java index dc6b1bb82a..12e3734d2c 100644 --- a/src/test/core/org/apache/hadoop/io/TestText.java +++ b/src/test/core/org/apache/hadoop/io/TestText.java @@ -35,7 +35,7 @@ public class TestText extends TestCase { // generate a valid java String private static String getTestString(int len) throws Exception { - StringBuffer buffer = new StringBuffer(); + StringBuilder buffer = new StringBuilder(); int length = (len==RAND_LEN) ? RANDOM.nextInt(1000) : len; while (buffer.length()