diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 6e1cc1103f..8a4f13c10a 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -567,6 +567,9 @@ Release 2.7.0 - UNRELEASED
HADOOP-11388. Remove deprecated o.a.h.metrics.file.FileContext.
(Li Lu via wheat9)
+ HADOOP-11386. Replace \n by %n in format hadoop-common format strings.
+ (Li Lu via wheat9)
+
Release 2.6.0 - 2014-11-18
INCOMPATIBLE CHANGES
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
index eb1fb6b7d5..bb221ee361 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
@@ -71,10 +71,10 @@ public class ReconfigurationServlet extends HttpServlet {
private void printHeader(PrintWriter out, String nodeName) {
out.print("
");
- out.printf("%s Reconfiguration Utility\n",
+ out.printf("%s Reconfiguration Utility%n",
StringEscapeUtils.escapeHtml(nodeName));
out.print("\n");
- out.printf("%s Reconfiguration Utility
\n",
+ out.printf("%s Reconfiguration Utility
%n",
StringEscapeUtils.escapeHtml(nodeName));
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
index d437a663f5..ba65cd2e3b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
@@ -195,11 +195,11 @@ class Display extends FsCommand {
FileChecksum checksum = item.fs.getFileChecksum(item.path);
if (checksum == null) {
- out.printf("%s\tNONE\t\n", item.toString());
+ out.printf("%s\tNONE\t%n", item.toString());
} else {
String checksumString = StringUtils.byteToHexString(
checksum.getBytes(), 0, checksum.getLength());
- out.printf("%s\t%s\t%s\n",
+ out.printf("%s\t%s\t%s%n",
item.toString(), checksum.getAlgorithmName(),
checksumString);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java
index c11678dd5d..f17be1ab90 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java
@@ -2341,7 +2341,7 @@ public class TFile {
* A list of TFile paths.
*/
public static void main(String[] args) {
- System.out.printf("TFile Dumper (TFile %s, BCFile %s)\n", TFile.API_VERSION
+ System.out.printf("TFile Dumper (TFile %s, BCFile %s)%n", TFile.API_VERSION
.toString(), BCFile.API_VERSION.toString());
if (args.length == 0) {
System.out
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
index 829a1c68c4..ad94c4297b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java
@@ -176,7 +176,7 @@ class TFileDumper {
for (Iterator> it = entrySet.iterator(); it
.hasNext();) {
Map.Entry e = it.next();
- out.printf("%s : %s\n", Align.format(e.getKey(), maxKeyLength,
+ out.printf("%s : %s%n", Align.format(e.getKey(), maxKeyLength,
Align.LEFT), e.getValue());
}
out.println();
@@ -200,7 +200,7 @@ class TFileDumper {
String endKey = "End-Key";
int endKeyWidth = Math.max(endKey.length(), maxKeySampleLen * 2 + 5);
- out.printf("%s %s %s %s %s %s\n", Align.format(blkID, blkIDWidth,
+ out.printf("%s %s %s %s %s %s%n", Align.format(blkID, blkIDWidth,
Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER),
Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(
rawSize, rawSizeWidth, Align.CENTER), Align.format(records,
@@ -267,7 +267,7 @@ class TFileDumper {
* 10);
String compression = "Compression";
int compressionWidth = compression.length();
- out.printf("%s %s %s %s %s\n", Align.format(name, nameWidth,
+ out.printf("%s %s %s %s %s%n", Align.format(name, nameWidth,
Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER),
Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(
rawSize, rawSizeWidth, Align.CENTER), Align.format(compression,
@@ -280,7 +280,7 @@ class TFileDumper {
BlockRegion region = e.getValue().getRegion();
String blkCompression =
e.getValue().getCompressionAlgorithm().getName();
- out.printf("%s %s %s %s %s\n", Align.format(blkName, nameWidth,
+ out.printf("%s %s %s %s %s%n", Align.format(blkName, nameWidth,
Align.LEFT), Align.format(region.getOffset(), offsetWidth,
Align.LEFT), Align.format(region.getCompressedSize(),
blkLenWidth, Align.LEFT), Align.format(region.getRawSize(),
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
index 768294d707..e152d46c49 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java
@@ -181,7 +181,7 @@ public class ShellBasedIdMapping implements IdMappingServiceProvider {
final Integer key, final String value,
final Integer ekey, final String evalue) {
LOG.warn("\n" + header + String.format(
- "new entry (%d, %s), existing entry: (%d, %s).\n%s\n%s",
+ "new entry (%d, %s), existing entry: (%d, %s).%n%s%n%s",
key, value, ekey, evalue,
"The new entry is to be ignored for the following reason.",
DUPLICATE_NAME_ID_DEBUG_INFO));
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java
index 641635542f..81448ab2d4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeLibraryChecker.java
@@ -108,14 +108,14 @@ public class NativeLibraryChecker {
}
System.out.println("Native library checking:");
- System.out.printf("hadoop: %b %s\n", nativeHadoopLoaded, hadoopLibraryName);
- System.out.printf("zlib: %b %s\n", zlibLoaded, zlibLibraryName);
- System.out.printf("snappy: %b %s\n", snappyLoaded, snappyLibraryName);
- System.out.printf("lz4: %b %s\n", lz4Loaded, lz4LibraryName);
- System.out.printf("bzip2: %b %s\n", bzip2Loaded, bzip2LibraryName);
- System.out.printf("openssl: %b %s\n", openSslLoaded, openSslDetail);
+ System.out.printf("hadoop: %b %s%n", nativeHadoopLoaded, hadoopLibraryName);
+ System.out.printf("zlib: %b %s%n", zlibLoaded, zlibLibraryName);
+ System.out.printf("snappy: %b %s%n", snappyLoaded, snappyLibraryName);
+ System.out.printf("lz4: %b %s%n", lz4Loaded, lz4LibraryName);
+ System.out.printf("bzip2: %b %s%n", bzip2Loaded, bzip2LibraryName);
+ System.out.printf("openssl: %b %s%n", openSslLoaded, openSslDetail);
if (Shell.WINDOWS) {
- System.out.printf("winutils: %b %s\n", winutilsExists, winutilsPath);
+ System.out.printf("winutils: %b %s%n", winutilsExists, winutilsPath);
}
if ((!nativeHadoopLoaded) || (Shell.WINDOWS && (!winutilsExists)) ||