diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java index 5125be078d..f1517d65bd 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java @@ -21,12 +21,11 @@ import java.io.File; import java.io.IOException; -import java.io.UnsupportedEncodingException; import java.lang.reflect.InvocationTargetException; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.ByteBuffer; -import java.nio.charset.IllegalCharsetNameException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; @@ -426,12 +425,8 @@ DER get(int... tags) { } String getAsString() { - try { - return new String(bb.array(), bb.arrayOffset() + bb.position(), - bb.remaining(), "UTF-8"); - } catch (UnsupportedEncodingException e) { - throw new IllegalCharsetNameException("UTF-8"); // won't happen. - } + return new String(bb.array(), bb.arrayOffset() + bb.position(), + bb.remaining(), StandardCharsets.UTF_8); } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index ea3d6dc74e..8fc3a696c4 100755 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -3565,7 +3565,7 @@ private void checkForOverride(Properties properties, String name, String attr, S * @throws IOException raised on errors performing I/O. */ public void writeXml(OutputStream out) throws IOException { - writeXml(new OutputStreamWriter(out, "UTF-8")); + writeXml(new OutputStreamWriter(out, StandardCharsets.UTF_8)); } public void writeXml(Writer out) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java index df878d9987..45636c5666 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java @@ -18,6 +18,7 @@ package org.apache.hadoop.fs; import java.io.IOException; +import java.nio.charset.StandardCharsets; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.binary.Base64; @@ -76,7 +77,7 @@ public static byte[] decodeValue(String value) throws IOException { String en = value.substring(0, 2); if (value.startsWith("\"") && value.endsWith("\"")) { value = value.substring(1, value.length()-1); - result = value.getBytes("utf-8"); + result = value.getBytes(StandardCharsets.UTF_8); } else if (en.equalsIgnoreCase(HEX_PREFIX)) { value = value.substring(2, value.length()); try { @@ -90,7 +91,7 @@ public static byte[] decodeValue(String value) throws IOException { } } if (result == null) { - result = value.getBytes("utf-8"); + result = value.getBytes(StandardCharsets.UTF_8); } } return result; @@ -114,7 +115,7 @@ public static String encodeValue(byte[] value, XAttrCodec encoding) } else if (encoding == BASE64) { return BASE64_PREFIX + base64.encodeToString(value); } else { - return "\"" + new String(value, "utf-8") + "\""; + return "\"" + new String(value, StandardCharsets.UTF_8) + "\""; } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java index b3487ef309..e70cc6d8b1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java @@ -387,7 +387,7 @@ public void testMultiByteCharacters() throws IOException { String name = "multi_byte_\u611b_name"; String value = "multi_byte_\u0641_value"; out = new BufferedWriter(new OutputStreamWriter( - new FileOutputStream(CONFIG_MULTI_BYTE), "UTF-8")); + new FileOutputStream(CONFIG_MULTI_BYTE), StandardCharsets.UTF_8)); startConfig(); declareProperty(name, value, value); endConfig(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java index 38e16221a4..79049d3837 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java @@ -32,6 +32,7 @@ import java.io.*; import java.net.URI; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; @@ -673,7 +674,7 @@ public void testFSOutputStreamBuilder() throws Exception { fileSys.createFile(path).recursive(); FSDataOutputStream out = builder.build(); String content = "Create with a generic type of createFile!"; - byte[] contentOrigin = content.getBytes("UTF8"); + byte[] contentOrigin = content.getBytes(StandardCharsets.UTF_8); out.write(contentOrigin); out.close(); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java index bd9baaa93f..f1ddddd4cb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java @@ -106,7 +106,7 @@ static URL createURL(Path path, Map params, Map> multiValuedEntry : multiValuedParams.entrySet()) { - String name = URLEncoder.encode(multiValuedEntry.getKey(), "UTF8"); + String name = URLEncoder.encode(multiValuedEntry.getKey(), "UTF-8"); List values = multiValuedEntry.getValue(); for (String value : values) { sb.append(separator).append(name).append("="). diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java index e94e48ed0f..0df38d5204 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/blockaliasmap/impl/TextFileRegionAliasMap.java @@ -26,6 +26,7 @@ import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Base64; import java.util.Iterator; @@ -148,7 +149,7 @@ TextWriter createWriter(Path file, CompressionCodec codec, String delim, } OutputStream tmp = fs.create(file); java.io.Writer out = new BufferedWriter(new OutputStreamWriter( - (null == codec) ? tmp : codec.createOutputStream(tmp), "UTF-8")); + (null == codec) ? tmp : codec.createOutputStream(tmp), StandardCharsets.UTF_8)); return new TextWriter(out, delim); } @@ -379,7 +380,7 @@ public Iterator iterator() { FRIterator i = new FRIterator(); try { BufferedReader r = - new BufferedReader(new InputStreamReader(createStream(), "UTF-8")); + new BufferedReader(new InputStreamReader(createStream(), StandardCharsets.UTF_8)); iterators.put(i, r); i.pending = nextInternal(i); } catch (IOException e) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java index 1d34c773e6..263241a4b9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockReceiver.java @@ -29,6 +29,7 @@ import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; import java.util.ArrayDeque; import java.util.Arrays; import java.util.Queue; @@ -1063,7 +1064,7 @@ void receiveBlock( // send a special ack upstream. if (datanode.isRestarting() && isClient && !isTransfer) { try (Writer out = new OutputStreamWriter( - replicaInfo.createRestartMetaStream(), "UTF-8")) { + replicaInfo.createRestartMetaStream(), StandardCharsets.UTF_8)) { // write out the current time. out.write(Long.toString(Time.now() + restartBudget)); out.flush(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java index 4611df765f..8c643e9e16 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java @@ -28,6 +28,7 @@ import java.io.OutputStreamWriter; import java.io.RandomAccessFile; import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; @@ -399,7 +400,7 @@ void saveDfsUsed() { try { long used = getDfsUsed(); try (Writer out = new OutputStreamWriter( - Files.newOutputStream(outFile.toPath()), "UTF-8")) { + Files.newOutputStream(outFile.toPath()), StandardCharsets.UTF_8)) { // mtime is written last, so that truncated writes won't be valid. out.write(Long.toString(used) + " " + Long.toString(timer.now())); // This is only called as part of the volume shutdown. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java index 2935e6ae32..47f0a3556a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java @@ -27,6 +27,7 @@ import java.io.RandomAccessFile; import java.net.URI; import java.nio.channels.ClosedChannelException; +import java.nio.charset.StandardCharsets; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.Collection; @@ -929,7 +930,7 @@ public void save() throws IOException { boolean success = false; try (BufferedWriter writer = new BufferedWriter( new OutputStreamWriter(fileIoProvider.getFileOutputStream( - FsVolumeImpl.this, getTempSaveFile()), "UTF-8"))) { + FsVolumeImpl.this, getTempSaveFile()), StandardCharsets.UTF_8))) { WRITER.writeValue(writer, state); success = true; } finally { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java index d8fb81b2ad..63fe238cd5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java @@ -66,6 +66,7 @@ import java.io.InputStreamReader; import java.net.InetSocketAddress; import java.net.URI; +import java.nio.charset.StandardCharsets; import java.text.DateFormat; import java.util.*; import java.util.concurrent.TimeUnit; @@ -740,7 +741,7 @@ private static Options buildCliOptions() { private static String[] readPathFile(String file) throws IOException { List list = Lists.newArrayList(); BufferedReader reader = new BufferedReader( - new InputStreamReader(new FileInputStream(file), "UTF-8")); + new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8)); try { String line; while ((line = reader.readLine()) != null) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java index 9f0288ebf3..a0da4eaf80 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java @@ -26,6 +26,7 @@ import java.net.URL; import java.net.URLConnection; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; import java.security.PrivilegedExceptionAction; import java.util.concurrent.TimeUnit; @@ -207,7 +208,7 @@ private Integer listCorruptFileBlocks(String dir, String baseUrl) } InputStream stream = connection.getInputStream(); BufferedReader input = new BufferedReader(new InputStreamReader( - stream, "UTF-8")); + stream, StandardCharsets.UTF_8)); try { String line = null; while ((line = input.readLine()) != null) { @@ -376,7 +377,7 @@ else if (args[idx].equals("-replicaDetails")) { } InputStream stream = connection.getInputStream(); BufferedReader input = new BufferedReader(new InputStreamReader( - stream, "UTF-8")); + stream, StandardCharsets.UTF_8)); String line = null; String lastLine = NamenodeFsck.CORRUPT_STATUS; int errCode = -1; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java index 0c075ff6da..b66fad834c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java @@ -622,7 +622,7 @@ private INodeSection.INode.Builder processINodeXml(Node node) inodeBld.setId(id); String name = node.removeChildStr(SECTION_NAME); if (name != null) { - inodeBld.setName(ByteString.copyFrom(name, "UTF8")); + inodeBld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8)); } switch (type) { case "FILE": @@ -838,7 +838,7 @@ private void processSymlinkXml(Node node, } String target = node.removeChildStr(INODE_SECTION_TARGET); if (target != null) { - bld.setTarget(ByteString.copyFrom(target, "UTF8")); + bld.setTarget(ByteString.copyFrom(target, StandardCharsets.UTF_8)); } Long lval = node.removeChildLong(INODE_SECTION_MTIME); if (lval != null) { @@ -900,7 +900,7 @@ private INodeSection.XAttrFeatureProto.Builder xattrsXmlToProto(Node xattrs) } val = new HexBinaryAdapter().unmarshal(valHex); } else { - val = valStr.getBytes("UTF8"); + val = valStr.getBytes(StandardCharsets.UTF_8); } b.setValue(ByteString.copyFrom(val)); @@ -1232,7 +1232,7 @@ public void process() throws IOException { } String name = inodeRef.removeChildStr("name"); if (name != null) { - bld.setName(ByteString.copyFrom(name, "UTF8")); + bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8)); } Integer dstSnapshotId = inodeRef.removeChildInt( INODE_REFERENCE_SECTION_DST_SNAPSHOT_ID); @@ -1468,7 +1468,7 @@ private void processDirDiffEntry() throws IOException { bld.setChildrenSize(childrenSize); String name = dirDiff.removeChildStr(SECTION_NAME); if (name != null) { - bld.setName(ByteString.copyFrom(name, "UTF8")); + bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8)); } Node snapshotCopy = dirDiff.removeChild( SNAPSHOT_DIFF_SECTION_SNAPSHOT_COPY); @@ -1514,7 +1514,7 @@ private void processDirDiffEntry() throws IOException { } created.verifyNoRemainingKeys("created"); FsImageProto.SnapshotDiffSection.CreatedListEntry.newBuilder(). - setName(ByteString.copyFrom(cleName, "UTF8")). + setName(ByteString.copyFrom(cleName, StandardCharsets.UTF_8)). build().writeDelimitedTo(out); actualCreatedListSize++; } @@ -1571,7 +1571,7 @@ private void processFileDiffEntry() throws IOException { } String name = fileDiff.removeChildStr(SECTION_NAME); if (name != null) { - bld.setName(ByteString.copyFrom(name, "UTF8")); + bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8)); } Node snapshotCopy = fileDiff.removeChild( SNAPSHOT_DIFF_SECTION_SNAPSHOT_COPY); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java index e54b7332b1..e43e9b14b9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hdfs; import java.io.*; +import java.nio.charset.StandardCharsets; import java.security.Permission; import java.security.PrivilegedExceptionAction; import java.text.SimpleDateFormat; @@ -1917,7 +1918,7 @@ private static void corrupt( char c = content.charAt(0); sb.setCharAt(0, ++c); for(MaterializedReplica replica : replicas) { - replica.corruptData(sb.toString().getBytes("UTF8")); + replica.corruptData(sb.toString().getBytes(StandardCharsets.UTF_8)); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java index 6330c1bddb..669224818f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java @@ -45,6 +45,7 @@ import java.net.ServerSocket; import java.net.SocketTimeoutException; import java.net.URI; +import java.nio.charset.StandardCharsets; import java.security.NoSuchAlgorithmException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; @@ -1885,7 +1886,7 @@ public void testDFSDataOutputStreamBuilderForCreation() throws Exception { .replication((short) 1) .blockSize(4096) .build()) { - byte[] contentOrigin = content.getBytes("UTF8"); + byte[] contentOrigin = content.getBytes(StandardCharsets.UTF_8); out1.write(contentOrigin); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java index 7ebf55f571..fbd0248175 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java @@ -194,10 +194,10 @@ private static void writeFile(FileSystem fs, Path path, byte[] buffer, */ public static Configuration initializeStorageStateConf(int numDirs, Configuration conf) { - StringBuffer nameNodeDirs = - new StringBuffer(new File(TEST_ROOT_DIR, "name1").toString()); - StringBuffer dataNodeDirs = - new StringBuffer(new File(TEST_ROOT_DIR, "data1").toString()); + StringBuilder nameNodeDirs = + new StringBuilder(new File(TEST_ROOT_DIR, "name1").toString()); + StringBuilder dataNodeDirs = + new StringBuilder(new File(TEST_ROOT_DIR, "data1").toString()); for (int i = 2; i <= numDirs; i++) { nameNodeDirs.append("," + new File(TEST_ROOT_DIR, "name"+i)); dataNodeDirs.append("," + new File(TEST_ROOT_DIR, "data"+i)); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java index f8d66c2f2c..dbd77c7f13 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestProvidedImpl.java @@ -33,6 +33,7 @@ import java.io.Writer; import java.net.URI; import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -147,7 +148,7 @@ public FileRegion next() { newFile.getAbsolutePath()); newFile.createNewFile(); Writer writer = new OutputStreamWriter( - new FileOutputStream(newFile.getAbsolutePath()), "utf-8"); + new FileOutputStream(newFile.getAbsolutePath()), StandardCharsets.UTF_8); for(int i=0; i< BLK_LEN/(Integer.SIZE/8); i++) { writer.write(currentCount); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSPermissionChecker.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSPermissionChecker.java index f13ed7efdc..95b63960e3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSPermissionChecker.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSPermissionChecker.java @@ -39,6 +39,7 @@ import static org.mockito.Mockito.mock; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.function.LongFunction; @@ -433,7 +434,7 @@ private static INodeDirectory createINodeDirectory(INodeDirectory parent, PermissionStatus permStatus = PermissionStatus.createImmutable(owner, group, FsPermission.createImmutable(perm)); INodeDirectory inodeDirectory = new INodeDirectory( - HdfsConstants.GRANDFATHER_INODE_ID, name.getBytes("UTF-8"), permStatus, 0L); + HdfsConstants.GRANDFATHER_INODE_ID, name.getBytes(StandardCharsets.UTF_8), permStatus, 0L); parent.addChild(inodeDirectory); return inodeDirectory; } @@ -443,8 +444,8 @@ private static INodeFile createINodeFile(INodeDirectory parent, String name, PermissionStatus permStatus = PermissionStatus.createImmutable(owner, group, FsPermission.createImmutable(perm)); INodeFile inodeFile = new INodeFile(HdfsConstants.GRANDFATHER_INODE_ID, - name.getBytes("UTF-8"), permStatus, 0L, 0L, null, REPLICATION, - PREFERRED_BLOCK_SIZE); + name.getBytes(StandardCharsets.UTF_8), permStatus, 0L, 0L, null, + REPLICATION, PREFERRED_BLOCK_SIZE); parent.addChild(inodeFile); return inodeFile; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java index 2a9eda3ca5..8d4281c038 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java @@ -316,7 +316,7 @@ public void testLengthParamLongerThanFile() throws IOException { String content = "testLengthParamLongerThanFile"; FSDataOutputStream testFileOut = webhdfs.create(testFile); try { - testFileOut.write(content.getBytes("US-ASCII")); + testFileOut.write(content.getBytes(StandardCharsets.US_ASCII)); } finally { IOUtils.closeStream(testFileOut); } @@ -366,7 +366,7 @@ public void testOffsetPlusLengthParamsLongerThanFile() throws IOException { String content = "testOffsetPlusLengthParamsLongerThanFile"; FSDataOutputStream testFileOut = webhdfs.create(testFile); try { - testFileOut.write(content.getBytes("US-ASCII")); + testFileOut.write(content.getBytes(StandardCharsets.US_ASCII)); } finally { IOUtils.closeStream(testFileOut); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java index 5368bc7481..b836120a8d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java @@ -210,7 +210,7 @@ protected void doUnregistration() || jobImpl.getInternalState() == JobStateInternal.ERROR) { finishState = FinalApplicationStatus.FAILED; } - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (String s : job.getDiagnostics()) { sb.append(s).append("\n"); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java index 3ed65b94cd..982d364f32 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java @@ -120,7 +120,7 @@ public JobInfo(Job job, Boolean hasAccess) { List diagnostics = job.getDiagnostics(); if (diagnostics != null && !diagnostics.isEmpty()) { - StringBuffer b = new StringBuffer(); + StringBuilder b = new StringBuilder(); for (String diag : diagnostics) { b.append(diag); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobResourceUploaderWithSharedCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobResourceUploaderWithSharedCache.java index 5555043bcd..002ee712d2 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobResourceUploaderWithSharedCache.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/TestJobResourceUploaderWithSharedCache.java @@ -32,6 +32,7 @@ import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Map; import java.util.jar.JarOutputStream; import java.util.zip.ZipEntry; @@ -339,12 +340,12 @@ private Path makeJar(Path p, int index) throws FileNotFoundException, IOException { FileOutputStream fos = new FileOutputStream(new File(p.toUri().getPath())); - JarOutputStream jos = new JarOutputStream(fos); - ZipEntry ze = new ZipEntry("distributed.jar.inside" + index); - jos.putNextEntry(ze); - jos.write(("inside the jar!" + index).getBytes()); - jos.closeEntry(); - jos.close(); + try (JarOutputStream jos = new JarOutputStream(fos)) { + ZipEntry ze = new ZipEntry("distributed.jar.inside" + index); + jos.putNextEntry(ze); + jos.write(("inside the jar!" + index).getBytes()); + jos.closeEntry(); + } localFs.setPermission(p, new FsPermission("700")); return p; } @@ -354,12 +355,12 @@ private Path makeArchive(String archiveFile, String filename) Path archive = new Path(testRootDir, archiveFile); Path file = new Path(testRootDir, filename); DataOutputStream out = localFs.create(archive); - ZipOutputStream zos = new ZipOutputStream(out); - ZipEntry ze = new ZipEntry(file.toString()); - zos.putNextEntry(ze); - zos.write(input.getBytes("UTF-8")); - zos.closeEntry(); - zos.close(); + try (ZipOutputStream zos = new ZipOutputStream(out)) { + ZipEntry ze = new ZipEntry(file.toString()); + zos.putNextEntry(ze); + zos.write(input.getBytes(StandardCharsets.UTF_8)); + zos.closeEntry(); + } return archive; } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java index ae68d74d8d..ec44dd77ef 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java @@ -92,7 +92,7 @@ public void after() { private static LineReader makeStream(String str) throws IOException { return new LineReader(new ByteArrayInputStream( - str.getBytes("UTF-8")), defaultConf); + str.getBytes(StandardCharsets.UTF_8)), defaultConf); } private static void writeFile(FileSystem fs, Path name, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java index 4e14797a16..f0b1df3eac 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFieldSelection.java @@ -26,6 +26,7 @@ import org.junit.Test; import static org.junit.Assert.assertEquals; +import java.nio.charset.StandardCharsets; import java.text.NumberFormat; public class TestFieldSelection { @@ -60,7 +61,7 @@ public static void launch() throws Exception { TestMRFieldSelection.constructInputOutputData(inputData, expectedOutput, numOfInputLines); FSDataOutputStream fileOut = fs.create(new Path(INPUT_DIR, inputFile)); - fileOut.write(inputData.toString().getBytes("utf-8")); + fileOut.write(inputData.toString().getBytes(StandardCharsets.UTF_8)); fileOut.close(); System.out.println("inputData:"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java index 0991ae0b38..301cadb08b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestKeyValueTextInputFormat.java @@ -30,6 +30,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -134,9 +135,7 @@ public void testFormat() throws Exception { } } private LineReader makeStream(String str) throws IOException { - return new LineReader(new ByteArrayInputStream - (str.getBytes("UTF-8")), - defaultConf); + return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), defaultConf); } @Test public void testUTF8() throws Exception { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java index 22d9a57b89..9a2576ec66 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestTextInputFormat.java @@ -42,6 +42,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.junit.Assert.*; public class TestTextInputFormat { @@ -330,14 +331,10 @@ private void verifyPartitions(int length, int numSplits, Path file, } private static LineReader makeStream(String str) throws IOException { - return new LineReader(new ByteArrayInputStream - (str.getBytes("UTF-8")), - defaultConf); + return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), defaultConf); } private static LineReader makeStream(String str, int bufsz) throws IOException { - return new LineReader(new ByteArrayInputStream - (str.getBytes("UTF-8")), - bufsz); + return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), bufsz); } @Test (timeout=5000) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java index b839a2c3af..845139bf35 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/lib/aggregate/TestAggregates.java @@ -26,6 +26,7 @@ import static org.junit.Assert.assertEquals; import java.io.*; +import java.nio.charset.StandardCharsets; import java.util.*; import java.text.NumberFormat; @@ -55,8 +56,8 @@ public static void launch() throws Exception { fs.mkdirs(INPUT_DIR); fs.delete(OUTPUT_DIR, true); - StringBuffer inputData = new StringBuffer(); - StringBuffer expectedOutput = new StringBuffer(); + StringBuilder inputData = new StringBuilder(); + StringBuilder expectedOutput = new StringBuilder(); expectedOutput.append("max\t19\n"); expectedOutput.append("min\t1\n"); @@ -76,7 +77,7 @@ public static void launch() throws Exception { expectedOutput.append("uniq_count\t15\n"); - fileOut.write(inputData.toString().getBytes("utf-8")); + fileOut.write(inputData.toString().getBytes(StandardCharsets.UTF_8)); fileOut.close(); System.out.println("inputData:"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java index 3aac54e715..1c354d1348 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/aggregate/TestMapReduceAggregates.java @@ -30,6 +30,7 @@ import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.junit.Test; +import java.nio.charset.StandardCharsets; import java.text.NumberFormat; import static org.junit.Assert.assertEquals; @@ -61,8 +62,8 @@ public static void launch() throws Exception { fs.mkdirs(INPUT_DIR); fs.delete(OUTPUT_DIR, true); - StringBuffer inputData = new StringBuffer(); - StringBuffer expectedOutput = new StringBuffer(); + StringBuilder inputData = new StringBuilder(); + StringBuilder expectedOutput = new StringBuilder(); expectedOutput.append("max\t19\n"); expectedOutput.append("min\t1\n"); @@ -82,7 +83,7 @@ public static void launch() throws Exception { expectedOutput.append("uniq_count\t15\n"); - fileOut.write(inputData.toString().getBytes("utf-8")); + fileOut.write(inputData.toString().getBytes(StandardCharsets.UTF_8)); fileOut.close(); System.out.println("inputData:"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java index 537d23c5f0..3784846002 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestMRKeyValueTextInputFormat.java @@ -45,6 +45,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static java.nio.charset.StandardCharsets.UTF_8; import static org.junit.Assert.*; public class TestMRKeyValueTextInputFormat { @@ -253,9 +254,7 @@ public void testSplitableCodecs() throws Exception { } private LineReader makeStream(String str) throws IOException { - return new LineReader(new ByteArrayInputStream - (str.getBytes("UTF-8")), - defaultConf); + return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), defaultConf); } @Test diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java index 95b6264ba6..ba3b0d7a59 100644 --- a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java +++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java @@ -14,6 +14,7 @@ package org.apache.hadoop.maven.plugin.cmakebuilder; +import java.nio.charset.StandardCharsets; import java.util.Locale; import org.apache.hadoop.maven.plugin.util.Exec; import org.apache.maven.execution.MavenSession; @@ -165,7 +166,7 @@ private void writeStatusFile(String status) throws IOException { testName + ".pstatus")); BufferedWriter out = null; try { - out = new BufferedWriter(new OutputStreamWriter(fos, "UTF8")); + out = new BufferedWriter(new OutputStreamWriter(fos, StandardCharsets.UTF_8)); out.write(status + "\n"); } finally { if (out != null) { diff --git a/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java b/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java index c72a926b13..0773c79bdf 100644 --- a/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java +++ b/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java @@ -691,7 +691,7 @@ public void map(LongWritable key, HarEntry value, if (value.isDir()) { towrite = encodeName(relPath.toString()) + " dir " + propStr + " 0 0 "; - StringBuffer sbuff = new StringBuffer(); + StringBuilder sbuff = new StringBuilder(); sbuff.append(towrite); for (String child: value.children) { sbuff.append(encodeName(child) + " "); diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java index 5f54673d7a..1aee53def1 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SharedKeyCredentials.java @@ -24,6 +24,7 @@ import java.net.HttpURLConnection; import java.net.URL; import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; @@ -98,12 +99,7 @@ public void signRequest(HttpURLConnection connection, final long contentLength) } private String computeHmac256(final String stringToSign) { - byte[] utf8Bytes; - try { - utf8Bytes = stringToSign.getBytes(AbfsHttpConstants.UTF_8); - } catch (final UnsupportedEncodingException e) { - throw new IllegalArgumentException(e); - } + byte[] utf8Bytes = stringToSign.getBytes(StandardCharsets.UTF_8); byte[] hmac; synchronized (this) { hmac = hmacSha256.doFinal(utf8Bytes); diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java index 5735423aaf..c9f89e6643 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemDelegationSAS.java @@ -390,7 +390,7 @@ public void testProperties() throws Exception { fs.create(reqPath).close(); final String propertyName = "user.mime_type"; - final byte[] propertyValue = "text/plain".getBytes("utf-8"); + final byte[] propertyValue = "text/plain".getBytes(StandardCharsets.UTF_8); fs.setXAttr(reqPath, propertyName, propertyValue); assertArrayEquals(propertyValue, fs.getXAttr(reqPath, propertyName)); diff --git a/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/ITestProvidedImplementation.java b/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/ITestProvidedImplementation.java index cf86dd7dcb..361243fd69 100644 --- a/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/ITestProvidedImplementation.java +++ b/hadoop-tools/hadoop-fs2img/src/test/java/org/apache/hadoop/hdfs/server/namenode/ITestProvidedImplementation.java @@ -28,6 +28,7 @@ import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.nio.channels.ReadableByteChannel; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -180,7 +181,7 @@ public void setSeed() throws Exception { LOG.info("Creating " + newFile.toString()); newFile.createNewFile(); Writer writer = new OutputStreamWriter( - new FileOutputStream(newFile.getAbsolutePath()), "utf-8"); + new FileOutputStream(newFile.getAbsolutePath()), StandardCharsets.UTF_8); for(int j=0; j < baseFileLen*i; j++) { writer.write("0"); } diff --git a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java index 9ecd9e8e5d..a4a2a3538c 100644 --- a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java +++ b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/ExecutionSummarizer.java @@ -222,7 +222,7 @@ public String toString() { // Gets the stringified version of DataStatistics static String stringifyDataStatistics(DataStatistics stats) { if (stats != null) { - StringBuffer buffer = new StringBuffer(); + StringBuilder buffer = new StringBuilder(); String compressionStatus = stats.isDataCompressed() ? "Compressed" : "Uncompressed"; diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java index a6983e1c6c..cfa6e77b0b 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java @@ -128,7 +128,7 @@ static URL qualifyHost(URL url) { static final String regexpSpecials = "[]()?*+|.!^-\\~@"; public static String regexpEscape(String plain) { - StringBuffer buf = new StringBuffer(); + StringBuilder buf = new StringBuilder(); char[] ch = plain.toCharArray(); int csup = ch.length; for (int c = 0; c < csup; c++) { diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamDataProtocol.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamDataProtocol.java index 14f0f9607e..23bb36ba0e 100644 --- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamDataProtocol.java +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamDataProtocol.java @@ -19,11 +19,8 @@ package org.apache.hadoop.streaming; import java.io.*; -import java.util.*; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; +import java.nio.charset.StandardCharsets; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner; import org.junit.Test; @@ -59,7 +56,7 @@ protected void createInput() throws IOException { DataOutputStream out = new DataOutputStream( new FileOutputStream(INPUT_FILE.getAbsoluteFile())); - out.write(input.getBytes("UTF-8")); + out.write(input.getBytes(StandardCharsets.UTF_8)); out.close(); } diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java index 766402184c..4bb20c7621 100644 --- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamReduceNone.java @@ -19,11 +19,8 @@ package org.apache.hadoop.streaming; import java.io.*; -import java.util.*; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; +import java.nio.charset.StandardCharsets; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.Path; import static org.junit.Assert.*; import org.junit.Test; @@ -56,7 +53,7 @@ protected void createInput() throws IOException { DataOutputStream out = new DataOutputStream( new FileOutputStream(INPUT_FILE.getAbsoluteFile())); - out.write(input.getBytes("UTF-8")); + out.write(input.getBytes(StandardCharsets.UTF_8)); out.close(); } diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java index 53009dbbab..077b02c6cb 100644 --- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamXmlRecordReader.java @@ -21,6 +21,7 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; /** * This class tests StreamXmlRecordReader @@ -44,9 +45,9 @@ protected void createInput() throws IOException FileOutputStream out = new FileOutputStream(INPUT_FILE.getAbsoluteFile()); String dummyXmlStartTag = "\n"; String dummyXmlEndTag = "\n"; - out.write(dummyXmlStartTag.getBytes("UTF-8")); - out.write(input.getBytes("UTF-8")); - out.write(dummyXmlEndTag.getBytes("UTF-8")); + out.write(dummyXmlStartTag.getBytes(StandardCharsets.UTF_8)); + out.write(input.getBytes(StandardCharsets.UTF_8)); + out.write(dummyXmlEndTag.getBytes(StandardCharsets.UTF_8)); out.close(); } diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreaming.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreaming.java index 4f39120a16..5139cf617d 100644 --- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreaming.java +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreaming.java @@ -19,6 +19,7 @@ package org.apache.hadoop.streaming; import java.io.*; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -120,7 +121,7 @@ protected void createInput() throws IOException { DataOutputStream out = getFileSystem().create(new Path( INPUT_FILE.getPath())); - out.write(getInputData().getBytes("UTF-8")); + out.write(getInputData().getBytes(StandardCharsets.UTF_8)); out.close(); } diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingKeyValue.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingKeyValue.java index c21cb159f4..ff95bd49ce 100644 --- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingKeyValue.java +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingKeyValue.java @@ -22,6 +22,7 @@ import static org.junit.Assert.*; import java.io.*; +import java.nio.charset.StandardCharsets; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.mapreduce.MRJobConfig; @@ -68,7 +69,7 @@ protected void createInput() throws IOException { DataOutputStream out = new DataOutputStream( new FileOutputStream(INPUT_FILE.getAbsoluteFile())); - out.write(input.getBytes("UTF-8")); + out.write(input.getBytes(StandardCharsets.UTF_8)); out.close(); } diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingSeparator.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingSeparator.java index f8167bbdd7..66ee174be7 100644 --- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingSeparator.java +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingSeparator.java @@ -22,11 +22,8 @@ import static org.junit.Assert.*; import java.io.*; -import java.util.*; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; +import java.nio.charset.StandardCharsets; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.fs.Path; /** * This class tests hadoopStreaming with customized separator in MapReduce local mode. @@ -64,7 +61,7 @@ protected void createInput() throws IOException { DataOutputStream out = new DataOutputStream( new FileOutputStream(INPUT_FILE.getAbsoluteFile())); - out.write(input.getBytes("UTF-8")); + out.write(input.getBytes(StandardCharsets.UTF_8)); out.close(); } diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestTypedBytesStreaming.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestTypedBytesStreaming.java index 05a050cac8..02daa1894c 100644 --- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestTypedBytesStreaming.java +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestTypedBytesStreaming.java @@ -22,6 +22,7 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; @@ -48,7 +49,7 @@ public TestTypedBytesStreaming() throws IOException { protected void createInput() throws IOException { DataOutputStream out = new DataOutputStream(new FileOutputStream(INPUT_FILE.getAbsoluteFile())); - out.write(input.getBytes("UTF-8")); + out.write(input.getBytes(StandardCharsets.UTF_8)); out.close(); } diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java index 5bf2fe52d4..270a1f7fd9 100644 --- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java @@ -101,9 +101,9 @@ public void createInput() throws IOException { FileOutputStream out = new FileOutputStream(INPUT_FILE.getAbsoluteFile()); String dummyXmlStartTag = "\n"; String dummyXmlEndTag = "\n"; - out.write(dummyXmlStartTag.getBytes("UTF-8")); - out.write(input.getBytes("UTF-8")); - out.write(dummyXmlEndTag.getBytes("UTF-8")); + out.write(dummyXmlStartTag.getBytes(StandardCharsets.UTF_8)); + out.write(input.getBytes(StandardCharsets.UTF_8)); + out.write(dummyXmlEndTag.getBytes(StandardCharsets.UTF_8)); out.close(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestTopCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestTopCLI.java index 63ebffaca4..6aaab2f18f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestTopCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestTopCLI.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.io.PrintStream; import java.net.URL; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -157,7 +158,7 @@ public void testHeaderNodeManagers() throws Exception { System.setErr(out); topcli.showTopScreen(); out.flush(); - actual = outStream.toString("UTF-8"); + actual = outStream.toString(StandardCharsets.UTF_8.name()); } String expected = "NodeManager(s)" diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java index 57ec83fc61..25f7747df8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java @@ -38,6 +38,7 @@ import java.io.PrintStream; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Arrays; @@ -364,7 +365,7 @@ public void testGetContainers() throws Exception { verify(client).getContainers(attemptId); ByteArrayOutputStream baos = new ByteArrayOutputStream(); OutputStreamWriter stream = - new OutputStreamWriter(baos, "UTF-8"); + new OutputStreamWriter(baos, StandardCharsets.UTF_8); PrintWriter pw = new PrintWriter(stream); pw.println("Total number of containers :3"); pw.printf(ApplicationCLI.CONTAINER_PATTERN, "Container-Id", "Start Time", diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java index 5a4beca990..007721f2ec 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/TestAggregatedLogFormat.java @@ -30,6 +30,7 @@ import java.io.UnsupportedEncodingException; import java.io.Writer; import java.net.URI; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.StandardOpenOption; @@ -159,7 +160,7 @@ private void writeSrcFileAndALog(Path srcFilePath, String fileName, final long l File outputFile = new File(new File(srcFilePath.toString()), fileName); FileOutputStream os = new FileOutputStream(outputFile); - final OutputStreamWriter osw = new OutputStreamWriter(os, "UTF8"); + final OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8); final int ch = filler; UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); @@ -473,7 +474,7 @@ private OutputStreamWriter getOutputStreamWriter(Path srcFilePath, } File outputFile = new File(new File(srcFilePath.toString()), fileName); FileOutputStream os = new FileOutputStream(outputFile); - OutputStreamWriter osw = new OutputStreamWriter(os, "UTF8"); + OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8); return osw; } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java index eb3db5eee1..355398e9f7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/TestApplicationHistoryServer.java @@ -24,6 +24,7 @@ import java.io.InputStream; import java.net.HttpURLConnection; import java.net.URL; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; @@ -227,6 +228,6 @@ private String readInputStream(InputStream input) throws Exception { while ((read = input.read(buffer)) >= 0) { data.write(buffer, 0, read); } - return new String(data.toByteArray(), "UTF-8"); + return new String(data.toByteArray(), StandardCharsets.UTF_8); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java index 9c4c2c72e5..86d78f2fc3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java @@ -21,6 +21,7 @@ import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -585,7 +586,7 @@ private Map> recoverSubClusterAMRMTokenIdenti // entry for subClusterId -> UAM AMRMTokenIdentifier String scId = key.substring(NMSS_SECONDARY_SC_PREFIX.length()); Token aMRMTokenIdentifier = new Token<>(); - aMRMTokenIdentifier.decodeFromUrlString(new String(value, STRING_TO_BYTE_FORMAT)); + aMRMTokenIdentifier.decodeFromUrlString(new String(value, StandardCharsets.UTF_8)); uamMap.put(scId, aMRMTokenIdentifier); LOG.debug("Recovered UAM in {} from NMSS.", scId); } @@ -1345,7 +1346,7 @@ private List registerAndAllocateWithNewSubClusters( } else if (getNMStateStore() != null) { getNMStateStore().storeAMRMProxyAppContextEntry(attemptId, NMSS_SECONDARY_SC_PREFIX + subClusterId, - token.encodeToUrlString().getBytes(STRING_TO_BYTE_FORMAT)); + token.encodeToUrlString().getBytes(StandardCharsets.UTF_8)); } } catch (Throwable e) { LOG.error("Failed to persist UAM token from {} Application {}", @@ -1884,7 +1885,7 @@ public void callback(AllocateResponse response) { try { getNMStateStore().storeAMRMProxyAppContextEntry(attemptId, NMSS_SECONDARY_SC_PREFIX + subClusterId.getId(), - newToken.encodeToUrlString().getBytes(STRING_TO_BYTE_FORMAT)); + newToken.encodeToUrlString().getBytes(StandardCharsets.UTF_8)); } catch (IOException e) { LOG.error("Error storing UAM token as AMRMProxy " + "context entry in NMSS for {}.", attemptId, e); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsBlkioResourceHandlerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsBlkioResourceHandlerImpl.java index 20d9d5ccf9..865d2b19fd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsBlkioResourceHandlerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsBlkioResourceHandlerImpl.java @@ -31,6 +31,7 @@ import java.io.File; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; @@ -73,7 +74,7 @@ private void checkDiskScheduler() { // are using the CFQ scheduler. If they aren't print a warning try { byte[] contents = Files.readAllBytes(Paths.get(PARTITIONS_FILE)); - data = new String(contents, "UTF-8").trim(); + data = new String(contents, StandardCharsets.UTF_8).trim(); } catch (IOException e) { String msg = "Couldn't read " + PARTITIONS_FILE + "; can't determine disk scheduler type"; @@ -96,7 +97,7 @@ private void checkDiskScheduler() { if (schedulerFile.exists()) { try { byte[] contents = Files.readAllBytes(Paths.get(schedulerPath)); - String schedulerString = new String(contents, "UTF-8").trim(); + String schedulerString = new String(contents, StandardCharsets.UTF_8).trim(); if (!schedulerString.contains("[cfq]")) { LOG.warn("Device " + partition + " does not use the CFQ" + " scheduler; disk isolation using " diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsCpuResourceHandlerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsCpuResourceHandlerImpl.java index 4ce1375f8e..f724b8803d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsCpuResourceHandlerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsCpuResourceHandlerImpl.java @@ -37,6 +37,7 @@ import java.io.File; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; @@ -132,7 +133,7 @@ public static boolean cpuLimitsExist(String path) File quotaFile = new File(path, CPU.getName() + "." + CGroupsHandler.CGROUP_CPU_QUOTA_US); if (quotaFile.exists()) { - String contents = FileUtils.readFileToString(quotaFile, "UTF-8"); + String contents = FileUtils.readFileToString(quotaFile, StandardCharsets.UTF_8); int quotaUS = Integer.parseInt(contents.trim()); if (quotaUS != -1) { return true; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsHandlerImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsHandlerImpl.java index 03038b86fc..40149abd40 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsHandlerImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsHandlerImpl.java @@ -39,6 +39,7 @@ import org.apache.hadoop.yarn.util.SystemClock; import java.io.*; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Arrays; @@ -216,7 +217,7 @@ static Map> parseMtab(String mtab) try { FileInputStream fis = new FileInputStream(new File(mtab)); - in = new BufferedReader(new InputStreamReader(fis, "UTF-8")); + in = new BufferedReader(new InputStreamReader(fis, StandardCharsets.UTF_8)); for (String str = in.readLine(); str != null; str = in.readLine()) { @@ -474,7 +475,7 @@ private void logLineFromTasksFile(File cgf) { if (LOG.isDebugEnabled()) { try (BufferedReader inl = new BufferedReader(new InputStreamReader(new FileInputStream(cgf - + "/tasks"), "UTF-8"))) { + + "/tasks"), StandardCharsets.UTF_8))) { str = inl.readLine(); if (str != null) { LOG.debug("First line in cgroup tasks file: {} {}", cgf, str); @@ -559,7 +560,7 @@ public void updateCGroupParam(CGroupController controller, String cGroupId, try { File file = new File(cGroupParamPath); - Writer w = new OutputStreamWriter(new FileOutputStream(file), "UTF-8"); + Writer w = new OutputStreamWriter(new FileOutputStream(file), StandardCharsets.UTF_8); pw = new PrintWriter(w); pw.write(value); } catch (IOException e) { @@ -595,7 +596,7 @@ public String getCGroupParam(CGroupController controller, String cGroupId, try { byte[] contents = Files.readAllBytes(Paths.get(cGroupParamPath)); - return new String(contents, "UTF-8").trim(); + return new String(contents, StandardCharsets.UTF_8).trim(); } catch (IOException e) { throw new ResourceHandlerException( "Unable to read from " + cGroupParamPath); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java index b171ed00e3..e5abca2826 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TrafficController.java @@ -31,6 +31,7 @@ import org.slf4j.LoggerFactory; import java.io.*; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.BitSet; import java.util.HashMap; @@ -619,9 +620,9 @@ public PrivilegedOperation commitBatchToTempFile() File(tmpDirPath)); try ( - Writer writer = new OutputStreamWriter(new FileOutputStream(tcCmds), - "UTF-8"); - PrintWriter printWriter = new PrintWriter(writer)) { + Writer writer = new OutputStreamWriter(new FileOutputStream(tcCmds), + StandardCharsets.UTF_8); + PrintWriter printWriter = new PrintWriter(writer)) { for (String command : commands) { printWriter.println(command); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DefaultLinuxContainerRuntime.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DefaultLinuxContainerRuntime.java index 20e281b121..168f033c24 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DefaultLinuxContainerRuntime.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/DefaultLinuxContainerRuntime.java @@ -48,6 +48,7 @@ import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -257,9 +258,9 @@ private String writeCommandToTempFile(ContainerExecContext ctx) File commandFile = File.createTempFile(TMP_FILE_PREFIX + filePrefix, TMP_FILE_SUFFIX, cmdDir); try ( - Writer writer = new OutputStreamWriter( - new FileOutputStream(commandFile.toString()), "UTF-8"); - PrintWriter printWriter = new PrintWriter(writer); + Writer writer = new OutputStreamWriter( + new FileOutputStream(commandFile.toString()), StandardCharsets.UTF_8); + PrintWriter printWriter = new PrintWriter(writer); ) { Map> cmd = new HashMap>(); // command = exec diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerClient.java index 6cad26e4c0..b97890078c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerClient.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/DockerClient.java @@ -38,6 +38,7 @@ import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; @@ -55,9 +56,9 @@ private String writeEnvFile(DockerRunCommand cmd, String filePrefix, File dockerEnvFile = File.createTempFile(TMP_FILE_PREFIX + filePrefix, TMP_ENV_FILE_SUFFIX, cmdDir); try ( - Writer envWriter = new OutputStreamWriter( - new FileOutputStream(dockerEnvFile), "UTF-8"); - PrintWriter envPrintWriter = new PrintWriter(envWriter); + Writer envWriter = new OutputStreamWriter( + new FileOutputStream(dockerEnvFile), StandardCharsets.UTF_8); + PrintWriter envPrintWriter = new PrintWriter(envWriter); ) { for (Map.Entry entry : cmd.getEnv() .entrySet()) { @@ -94,7 +95,7 @@ public String writeCommandToTempFile(DockerCommand cmd, TMP_FILE_SUFFIX, cmdDir); try ( Writer writer = new OutputStreamWriter( - new FileOutputStream(dockerCommandFile.toString()), "UTF-8"); + new FileOutputStream(dockerCommandFile.toString()), StandardCharsets.UTF_8); PrintWriter printWriter = new PrintWriter(writer); ) { printWriter.println("[docker-command-execution]"); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/gpu/NvidiaDockerV1CommandPlugin.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/gpu/NvidiaDockerV1CommandPlugin.java index 252e9b29b8..60f5313a90 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/gpu/NvidiaDockerV1CommandPlugin.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/gpu/NvidiaDockerV1CommandPlugin.java @@ -38,6 +38,7 @@ import java.io.StringWriter; import java.net.URL; import java.net.URLConnection; +import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -104,7 +105,7 @@ private void init() throws ContainerExecutionException { uc.setRequestProperty("X-Requested-With", "Curl"); StringWriter writer = new StringWriter(); - IOUtils.copy(uc.getInputStream(), writer, "utf-8"); + IOUtils.copy(uc.getInputStream(), writer, StandardCharsets.UTF_8); cliOptions = writer.toString(); LOG.info("Additional docker CLI options from plugin to run GPU " diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java index b97549305a..0dde3b1576 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/CgroupsLCEResourcesHandler.java @@ -27,6 +27,7 @@ import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Writer; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.ArrayList; import java.util.Collections; @@ -220,7 +221,7 @@ private void updateCgroup(String controller, String groupName, String param, PrintWriter pw = null; try { File file = new File(path + "/" + param); - Writer w = new OutputStreamWriter(new FileOutputStream(file), "UTF-8"); + Writer w = new OutputStreamWriter(new FileOutputStream(file), StandardCharsets.UTF_8); pw = new PrintWriter(w); pw.write(value); } catch (IOException e) { @@ -249,7 +250,7 @@ private void logLineFromTasksFile(File cgf) { if (LOG.isDebugEnabled()) { try (BufferedReader inl = new BufferedReader(new InputStreamReader(new FileInputStream(cgf - + "/tasks"), "UTF-8"))) { + + "/tasks"), StandardCharsets.UTF_8))) { str = inl.readLine(); if (str != null) { LOG.debug("First line in cgroup tasks file: {} {}", cgf, str); @@ -403,7 +404,7 @@ private Map> parseMtab() throws IOException { try { FileInputStream fis = new FileInputStream(new File(getMtabFileName())); - in = new BufferedReader(new InputStreamReader(fis, "UTF-8")); + in = new BufferedReader(new InputStreamReader(fis, StandardCharsets.UTF_8)); for (String str = in.readLine(); str != null; str = in.readLine()) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java index c492ee4ae7..318d774072 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/util/ProcessIdFileReader.java @@ -22,13 +22,14 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.Path; import org.apache.hadoop.util.Shell; import org.apache.hadoop.yarn.api.records.ContainerId; -import org.apache.hadoop.yarn.util.ConverterUtils; /** * Helper functionality to read the pid from a file. @@ -57,7 +58,7 @@ public static String getProcessId(Path path) throws IOException { File file = new File(path.toString()); if (file.exists()) { FileInputStream fis = new FileInputStream(file); - bufReader = new BufferedReader(new InputStreamReader(fis, "UTF-8")); + bufReader = new BufferedReader(new InputStreamReader(fis, StandardCharsets.UTF_8)); while (true) { String line = bufReader.readLine(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java index ff47b606c7..d801652377 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/FSConfigToCSConfigConverter.java @@ -346,7 +346,7 @@ private void performRuleConversion(FairScheduler fs) if (!rulesToFile) { String json = ((ByteArrayOutputStream)mappingRulesOutputStream) - .toString(StandardCharsets.UTF_8.displayName()); + .toString(StandardCharsets.UTF_8.name()); capacitySchedulerConfig.setMappingRuleJson(json); } } else { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java index a51bd2afec..2cf5c9b073 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java @@ -817,7 +817,7 @@ public void testAppSubmit(String acceptMedia, String contentMedia) HashMap tokens = new HashMap<>(); HashMap secrets = new HashMap<>(); secrets.put("secret1", Base64.encodeBase64String( - "mysecret".getBytes("UTF8"))); + "mysecret".getBytes(StandardCharsets.UTF_8))); credentials.setSecrets(secrets); credentials.setTokens(tokens); ApplicationSubmissionContextInfo appInfo = new ApplicationSubmissionContextInfo(); @@ -840,7 +840,7 @@ public void testAppSubmit(String acceptMedia, String contentMedia) appInfo.getContainerLaunchContextInfo().setEnvironment(environment); appInfo.getContainerLaunchContextInfo().setAcls(acls); appInfo.getContainerLaunchContextInfo().getAuxillaryServiceData() - .put("test", Base64.encodeBase64URLSafeString("value12".getBytes("UTF8"))); + .put("test", Base64.encodeBase64URLSafeString("value12".getBytes(StandardCharsets.UTF_8))); appInfo.getContainerLaunchContextInfo().setCredentials(credentials); appInfo.getResource().setMemory(1024); appInfo.getResource().setvCores(1); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java index ea28656834..678e7a7d2d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokenAuthentication.java @@ -31,6 +31,7 @@ import java.io.StringWriter; import java.net.HttpURLConnection; import java.net.URL; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collection; import java.util.concurrent.Callable; @@ -250,7 +251,7 @@ public void testDelegationTokenAuth() throws Exception { InputStream errorStream = conn.getErrorStream(); String error = ""; BufferedReader reader = null; - reader = new BufferedReader(new InputStreamReader(errorStream, "UTF8")); + reader = new BufferedReader(new InputStreamReader(errorStream, StandardCharsets.UTF_8)); for (String line; (line = reader.readLine()) != null;) { error += line; } @@ -356,7 +357,7 @@ public Void call() throws Exception { assertEquals(Status.OK.getStatusCode(), conn.getResponseCode()); BufferedReader reader = null; try { - reader = new BufferedReader(new InputStreamReader(response, "UTF8")); + reader = new BufferedReader(new InputStreamReader(response, StandardCharsets.UTF_8)); for (String line; (line = reader.readLine()) != null;) { JSONObject obj = new JSONObject(line); if (obj.has("token")) { @@ -432,7 +433,7 @@ public String call() throws Exception { InputStream response = conn.getInputStream(); assertEquals(Status.OK.getStatusCode(), conn.getResponseCode()); try (BufferedReader reader = new BufferedReader(new InputStreamReader( - response, "UTF8"))) { + response, StandardCharsets.UTF_8))) { String line; while ((line = reader.readLine()) != null) { JSONObject obj = new JSONObject(line); @@ -490,7 +491,7 @@ static void setupConn(HttpURLConnection conn, String method, conn.setRequestProperty("Content-Type", contentType + ";charset=UTF8"); if (body != null && !body.isEmpty()) { OutputStream stream = conn.getOutputStream(); - stream.write(body.getBytes("UTF8")); + stream.write(body.getBytes(StandardCharsets.UTF_8)); stream.close(); } } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesHttpStaticUserPermissions.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesHttpStaticUserPermissions.java index cef32f4c2b..dbd0e425b3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesHttpStaticUserPermissions.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesHttpStaticUserPermissions.java @@ -28,6 +28,7 @@ import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; +import java.nio.charset.StandardCharsets; import java.util.Map; import java.util.HashMap; @@ -181,7 +182,7 @@ public void testWebServiceAccess() throws Exception { InputStream errorStream = conn.getErrorStream(); String error = ""; BufferedReader reader = new BufferedReader( - new InputStreamReader(errorStream, "UTF8")); + new InputStreamReader(errorStream, StandardCharsets.UTF_8)); for (String line; (line = reader.readLine()) != null;) { error += line; }