diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java index ddadfbf217..b279787133 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/KDiag.java @@ -46,7 +46,7 @@ import java.io.PrintWriter; import java.lang.reflect.InvocationTargetException; import java.net.InetAddress; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; @@ -924,7 +924,7 @@ private void printEnv(String variable) { */ private void dump(File file) throws IOException { try (InputStream in = Files.newInputStream(file.toPath())) { - for (String line : IOUtils.readLines(in, Charset.defaultCharset())) { + for (String line : IOUtils.readLines(in, StandardCharsets.UTF_8)) { println("%s", line); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java index 7eda7f5fa1..9c4fb64d14 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ProviderUtils.java @@ -23,7 +23,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.net.URL; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.commons.io.IOUtils; @@ -225,8 +225,7 @@ public static char[] locatePassword(String envWithPass, String fileWithPass) throw new IOException("Password file does not exist"); } try (InputStream is = pwdFile.openStream()) { - pass = IOUtils.toString(is, Charset.defaultCharset()).trim() - .toCharArray(); + pass = IOUtils.toString(is, StandardCharsets.UTF_8).trim().toCharArray(); } } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java index 28aa21376a..d1eab8f4e1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java @@ -21,7 +21,6 @@ import java.io.File; import java.io.IOException; import java.io.InputStreamReader; -import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.Collections; @@ -223,8 +222,7 @@ public static boolean updateMapInternal(BiMap map, Process process = Runtime.getRuntime().exec( new String[] { "bash", "-c", command }); br = new BufferedReader( - new InputStreamReader(process.getInputStream(), - Charset.defaultCharset())); + new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8)); String line = null; while ((line = br.readLine()) != null) { String[] nameId = line.split(regex); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java index 31b0546d36..47e44c9e09 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReflectionUtils.java @@ -29,7 +29,7 @@ import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; @@ -244,7 +244,7 @@ public static void logThreadInfo(Log log, try { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); printThreadInfo(new PrintStream(buffer, false, "UTF-8"), title); - log.info(buffer.toString(Charset.defaultCharset().name())); + log.info(buffer.toString(StandardCharsets.UTF_8.name())); } catch (UnsupportedEncodingException ignored) { } } @@ -273,7 +273,7 @@ public static void logThreadInfo(Logger log, try { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); printThreadInfo(new PrintStream(buffer, false, "UTF-8"), title); - log.info(buffer.toString(Charset.defaultCharset().name())); + log.info(buffer.toString(StandardCharsets.UTF_8.name())); } catch (UnsupportedEncodingException ignored) { } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java index 7ecbc056e6..49c5a38765 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.io.InputStreamReader; import java.io.InterruptedIOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; @@ -949,11 +949,11 @@ private void runCommand() throws IOException { timeOutTimer.schedule(timeoutTimerTask, timeOutInterval); } final BufferedReader errReader = - new BufferedReader(new InputStreamReader( - process.getErrorStream(), Charset.defaultCharset())); + new BufferedReader(new InputStreamReader(process.getErrorStream(), + StandardCharsets.UTF_8)); BufferedReader inReader = - new BufferedReader(new InputStreamReader( - process.getInputStream(), Charset.defaultCharset())); + new BufferedReader(new InputStreamReader(process.getInputStream(), + StandardCharsets.UTF_8)); final StringBuffer errMsg = new StringBuffer(); // read error and input streams as this would free up the buffers diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java index a9e7f57a29..c99b97e6e4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java @@ -27,7 +27,7 @@ import java.io.StringWriter; import java.lang.reflect.Method; import java.net.URI; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; @@ -125,7 +125,7 @@ public InputStream getInputStream(PathData item) throws IOException { private String inputStreamToString(InputStream stream) throws IOException { StringWriter writer = new StringWriter(); - IOUtils.copy(stream, writer, Charset.defaultCharset()); + IOUtils.copy(stream, writer, StandardCharsets.UTF_8); return writer.toString(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestKDiag.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestKDiag.java index 3a94bd99b8..79dcd1afc5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestKDiag.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestKDiag.java @@ -36,7 +36,7 @@ import java.io.File; import java.io.FileInputStream; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.Properties; import java.util.concurrent.TimeUnit; @@ -236,7 +236,7 @@ public void testKeytabUnknownPrincipal() throws Throwable { */ private void dump(File file) throws IOException { try (FileInputStream in = new FileInputStream(file)) { - for (String line : IOUtils.readLines(in, Charset.defaultCharset())) { + for (String line : IOUtils.readLines(in, StandardCharsets.UTF_8)) { LOG.info(line); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java index 366c114fc1..69e252222b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/web/TestWebDelegationToken.java @@ -64,7 +64,7 @@ import java.io.Writer; import java.net.HttpURLConnection; import java.net.URL; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.security.Principal; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; @@ -555,8 +555,7 @@ public Void run() throws Exception { HttpURLConnection conn = aUrl.openConnection(url, token); Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); - List ret = IOUtils.readLines(conn.getInputStream(), - Charset.defaultCharset()); + List ret = IOUtils.readLines(conn.getInputStream(), StandardCharsets.UTF_8); Assert.assertEquals(1, ret.size()); Assert.assertEquals(FOO_USER, ret.get(0)); @@ -626,8 +625,7 @@ public Void run() throws Exception { HttpURLConnection conn = aUrl.openConnection(url, token); Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); - List ret = IOUtils - .readLines(conn.getInputStream(), Charset.defaultCharset()); + List ret = IOUtils.readLines(conn.getInputStream(), StandardCharsets.UTF_8); Assert.assertEquals(1, ret.size()); Assert.assertEquals(FOO_USER, ret.get(0)); @@ -851,15 +849,14 @@ public void testProxyUser() throws Exception { HttpURLConnection conn = (HttpURLConnection) new URL(strUrl).openConnection(); Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); - List ret = - IOUtils.readLines(conn.getInputStream(), Charset.defaultCharset()); + List ret = IOUtils.readLines(conn.getInputStream(), StandardCharsets.UTF_8); Assert.assertEquals(1, ret.size()); Assert.assertEquals(OK_USER, ret.get(0)); strUrl = String.format("%s?user.name=%s&DOAS=%s", url.toExternalForm(), FOO_USER, OK_USER); conn = (HttpURLConnection) new URL(strUrl).openConnection(); Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); - ret = IOUtils.readLines(conn.getInputStream(), Charset.defaultCharset()); + ret = IOUtils.readLines(conn.getInputStream(), StandardCharsets.UTF_8); Assert.assertEquals(1, ret.size()); Assert.assertEquals(OK_USER, ret.get(0)); @@ -877,7 +874,7 @@ public Void run() throws Exception { Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); List ret = IOUtils - .readLines(conn.getInputStream(), Charset.defaultCharset()); + .readLines(conn.getInputStream(), StandardCharsets.UTF_8); Assert.assertEquals(1, ret.size()); Assert.assertEquals(OK_USER, ret.get(0)); @@ -898,7 +895,7 @@ public Void run() throws Exception { Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); ret = IOUtils - .readLines(conn.getInputStream(), Charset.defaultCharset()); + .readLines(conn.getInputStream(), StandardCharsets.UTF_8); Assert.assertEquals(1, ret.size()); Assert.assertEquals(FOO_USER, ret.get(0)); @@ -960,7 +957,7 @@ public Void run() throws Exception { Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); List ret = IOUtils - .readLines(conn.getInputStream(), Charset.defaultCharset()); + .readLines(conn.getInputStream(), StandardCharsets.UTF_8); Assert.assertEquals(1, ret.size()); Assert.assertEquals("remoteuser=" + FOO_USER+ ":ugi=" + FOO_USER, ret.get(0)); @@ -969,8 +966,7 @@ public Void run() throws Exception { conn = aUrl.openConnection(url, token, OK_USER); Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); - ret = IOUtils - .readLines(conn.getInputStream(), Charset.defaultCharset()); + ret = IOUtils.readLines(conn.getInputStream(), StandardCharsets.UTF_8); Assert.assertEquals(1, ret.size()); Assert.assertEquals("realugi=" + FOO_USER +":remoteuser=" + OK_USER + ":ugi=" + OK_USER, ret.get(0)); @@ -1022,8 +1018,7 @@ public Void run() throws Exception { HttpURLConnection conn = aUrl.openConnection(url, token, OK_USER); Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); - List ret = IOUtils - .readLines(conn.getInputStream(), Charset.defaultCharset()); + List ret = IOUtils.readLines(conn.getInputStream(), StandardCharsets.UTF_8); Assert.assertEquals(1, ret.size()); Assert.assertEquals("realugi=" + FOO_USER +":remoteuser=" + OK_USER + ":ugi=" + OK_USER, ret.get(0)); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/StatUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/StatUtils.java index fef35d0561..8da6df88c2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/StatUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/StatUtils.java @@ -22,7 +22,7 @@ import java.io.BufferedReader; import java.io.InputStreamReader; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -113,10 +113,9 @@ private static String getPermissionStringFromProcess(String[] shellCommand, ExecutorService executorService = Executors.newSingleThreadExecutor(); executorService.awaitTermination(2000, TimeUnit.MILLISECONDS); try { - Future future = - executorService.submit(() -> new BufferedReader( - new InputStreamReader(process.getInputStream(), - Charset.defaultCharset())).lines().findFirst().orElse("")); + Future future = executorService.submit(() -> new BufferedReader( + new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8)).lines() + .findFirst().orElse("")); return future.get(); } finally { process.destroy(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java index ae656742e9..803e428373 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestApplicationClassLoader.java @@ -31,7 +31,7 @@ import java.io.IOException; import java.io.InputStream; import java.net.URL; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.List; import java.util.jar.JarOutputStream; import java.util.zip.ZipEntry; @@ -135,7 +135,7 @@ public void testGetResource() throws IOException { InputStream in = appClassloader.getResourceAsStream("resource.txt"); assertNotNull("Resource should not be null for app classloader", in); - assertEquals("hello", IOUtils.toString(in, Charset.defaultCharset())); + assertEquals("hello", IOUtils.toString(in, StandardCharsets.UTF_8)); } private File makeTestJar() throws IOException { diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/AbstractSecureRegistryTest.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/AbstractSecureRegistryTest.java index 75b6fb287d..a510f84bd9 100644 --- a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/AbstractSecureRegistryTest.java +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/AbstractSecureRegistryTest.java @@ -49,7 +49,7 @@ import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.security.Principal; import java.util.HashSet; import java.util.Properties; @@ -220,7 +220,7 @@ public static void setupKDCAndPrincipals() throws Exception { BOB_LOCALHOST, keytab_bob)); jaasFile = new File(kdcWorkDir, "jaas.txt"); - FileUtils.write(jaasFile, jaas.toString(), Charset.defaultCharset()); + FileUtils.write(jaasFile, jaas.toString(), StandardCharsets.UTF_8); LOG.info("\n"+ jaas); RegistrySecurity.bindJVMtoJAASFile(jaasFile); } diff --git a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/TestSecureLogins.java b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/TestSecureLogins.java index 1cdc47d562..52d677e00a 100644 --- a/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/TestSecureLogins.java +++ b/hadoop-common-project/hadoop-registry/src/test/java/org/apache/hadoop/registry/secure/TestSecureLogins.java @@ -20,7 +20,7 @@ import java.io.File; import java.lang.reflect.Constructor; import java.lang.reflect.Method; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.security.Principal; import java.security.PrivilegedExceptionAction; import java.util.HashMap; @@ -93,8 +93,7 @@ public void testClientLogin() throws Throwable { logLoginDetails(ALICE_LOCALHOST, client); String confFilename = System.getProperty(Environment.JAAS_CONF_KEY); assertNotNull("Unset: "+ Environment.JAAS_CONF_KEY, confFilename); - String config = FileUtils.readFileToString(new File(confFilename), - Charset.defaultCharset()); + String config = FileUtils.readFileToString(new File(confFilename), StandardCharsets.UTF_8); LOG.info("{}=\n{}", confFilename, config); RegistrySecurity.setZKSaslClientProperties(ALICE, ALICE_CLIENT_CONTEXT); } finally { @@ -133,8 +132,7 @@ public LoginContext createLoginContextZookeeperLocalhost() throws @Test public void testKerberosAuth() throws Throwable { File krb5conf = getKdc().getKrb5conf(); - String krbConfig = FileUtils.readFileToString(krb5conf, - Charset.defaultCharset()); + String krbConfig = FileUtils.readFileToString(krb5conf, StandardCharsets.UTF_8); LOG.info("krb5.conf at {}:\n{}", krb5conf, krbConfig); Subject subject = new Subject(); Class kerb5LoginClass = diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java index cdb4a537b1..f584b56ebb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java @@ -57,7 +57,7 @@ import java.net.HttpURLConnection; import java.net.URI; import java.net.URL; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; @@ -1711,8 +1711,7 @@ public void testNoRedirect() throws Exception { conn.connect(); // Verify that we read what we wrote Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode()); - String content = IOUtils.toString( - conn.getInputStream(), Charset.defaultCharset()); + String content = IOUtils.toString(conn.getInputStream(), StandardCharsets.UTF_8); Assert.assertEquals(testContent, content); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/CancelCommand.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/CancelCommand.java index 0789430a38..6889f5f92e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/CancelCommand.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/CancelCommand.java @@ -32,7 +32,7 @@ import org.apache.hadoop.hdfs.tools.DiskBalancerCLI; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; /** * Cancels a running plan. @@ -77,7 +77,7 @@ public void execute(CommandLine cmd) throws Exception { "Invalid plan file specified."); String planData = null; try (FSDataInputStream plan = open(planFile)) { - planData = IOUtils.toString(plan, Charset.defaultCharset()); + planData = IOUtils.toString(plan, StandardCharsets.UTF_8); } cancelPlan(planData); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/ExecuteCommand.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/ExecuteCommand.java index 4cc1ade5d0..78b16e999f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/ExecuteCommand.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/diskbalancer/command/ExecuteCommand.java @@ -32,7 +32,7 @@ import org.apache.hadoop.hdfs.tools.DiskBalancerCLI; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; /** * executes a given plan. @@ -69,7 +69,7 @@ public void execute(CommandLine cmd) throws Exception { String planData = null; try (FSDataInputStream plan = open(planFile)) { - planData = IOUtils.toString(plan, Charset.defaultCharset()); + planData = IOUtils.toString(plan, StandardCharsets.UTF_8); } boolean skipDateCheck = false; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/shell/TestHdfsTextCommand.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/shell/TestHdfsTextCommand.java index cb7773c838..57e7cf415f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/shell/TestHdfsTextCommand.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/shell/TestHdfsTextCommand.java @@ -25,7 +25,7 @@ import java.io.InputStream; import java.io.StringWriter; import java.lang.reflect.Method; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; @@ -107,7 +107,7 @@ public void testDisplayForAvroFiles() throws Exception { private String inputStreamToString(InputStream stream) throws IOException { StringWriter writer = new StringWriter(); - IOUtils.copy(stream, writer, Charset.defaultCharset()); + IOUtils.copy(stream, writer, StandardCharsets.UTF_8); return writer.toString(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewerForXAttr.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewerForXAttr.java index f402017b9c..6c7d7b7896 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewerForXAttr.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewerForXAttr.java @@ -25,7 +25,7 @@ import java.net.HttpURLConnection; import java.net.URI; import java.net.URL; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -126,8 +126,7 @@ public void testWebImageViewerForListXAttrs() throws Exception { assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode()); - String content = IOUtils - .toString(connection.getInputStream(), Charset.defaultCharset()); + String content = IOUtils.toString(connection.getInputStream(), StandardCharsets.UTF_8); assertTrue("Missing user.attr1 in response ", content.contains("user.attr1")); @@ -152,8 +151,7 @@ public void testWebImageViewerForGetXAttrsWithOutParameters() connection.connect(); assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode()); - String content = IOUtils - .toString(connection.getInputStream(), Charset.defaultCharset()); + String content = IOUtils.toString(connection.getInputStream(), StandardCharsets.UTF_8); assertTrue("Missing user.attr1 in response ", content.contains("user.attr1")); @@ -186,8 +184,7 @@ public void testWebImageViewerForGetXAttrsWithParameters() throws Exception { connection.connect(); assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode()); - String content = IOUtils - .toString(connection.getInputStream(), Charset.defaultCharset()); + String content = IOUtils.toString(connection.getInputStream(), StandardCharsets.UTF_8); assertEquals(attr1JSon, content); } } @@ -209,8 +206,7 @@ public void testWebImageViewerForGetXAttrsWithCodecParameters() connection.connect(); assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode()); - String content = IOUtils - .toString(connection.getInputStream(), Charset.defaultCharset()); + String content = IOUtils.toString(connection.getInputStream(), StandardCharsets.UTF_8); assertEquals(attr1JSon, content); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java index 5322960a3c..f43e541aca 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java @@ -48,7 +48,6 @@ import java.net.URI; import java.net.URISyntaxException; import java.net.URL; -import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.security.PrivilegedExceptionAction; import java.util.Arrays; @@ -1554,7 +1553,7 @@ private void checkResponseContainsLocation(URL url, String TYPE) conn.setRequestMethod(TYPE); conn.setInstanceFollowRedirects(false); String response = - IOUtils.toString(conn.getInputStream(), Charset.defaultCharset()); + IOUtils.toString(conn.getInputStream(), StandardCharsets.UTF_8); LOG.info("Response was : " + response); Assert.assertEquals( "Response wasn't " + HttpURLConnection.HTTP_OK, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java index 414b3ca30d..371a07c17c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJavaSerialization.java @@ -23,7 +23,7 @@ import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.Iterator; import java.util.StringTokenizer; @@ -135,8 +135,7 @@ public void testMapReduceJob() throws Exception { new Utils.OutputFileUtils.OutputFilesFilter())); assertEquals(1, outputFiles.length); try (InputStream is = fs.open(outputFiles[0])) { - String reduceOutput = - org.apache.commons.io.IOUtils.toString(is, Charset.defaultCharset()); + String reduceOutput = org.apache.commons.io.IOUtils.toString(is, StandardCharsets.UTF_8); String[] lines = reduceOutput.split("\n"); assertEquals("Unexpected output; received output '" + reduceOutput + "'", "a\t1", lines[0]); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/test/java/org/apache/hadoop/mapred/uploader/TestFrameworkUploader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/test/java/org/apache/hadoop/mapred/uploader/TestFrameworkUploader.java index 1b65d17397..d5d59e66ee 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/test/java/org/apache/hadoop/mapred/uploader/TestFrameworkUploader.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-uploader/src/test/java/org/apache/hadoop/mapred/uploader/TestFrameworkUploader.java @@ -44,7 +44,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.util.HashMap; @@ -437,8 +437,7 @@ public void testNativeIO() throws IOException { // Create a target file File targetFile = new File(parent, "a.txt"); try(FileOutputStream os = new FileOutputStream(targetFile)) { - IOUtils.writeLines(Lists.newArrayList("a", "b"), null, os, - Charset.defaultCharset()); + IOUtils.writeLines(Lists.newArrayList("a", "b"), null, os, StandardCharsets.UTF_8); } Assert.assertFalse(uploader.checkSymlink(targetFile)); diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/resourcegz/ResourceGzMojo.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/resourcegz/ResourceGzMojo.java index 359389fddb..692b92b5e3 100644 --- a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/resourcegz/ResourceGzMojo.java +++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/resourcegz/ResourceGzMojo.java @@ -24,7 +24,7 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; @@ -114,7 +114,7 @@ public void accept(Path path) { BufferedReader is = Files.newBufferedReader(path) ) { getLog().info("Compressing " + path + " to " + outFile); - IOUtils.copy(is, os, Charset.defaultCharset()); + IOUtils.copy(is, os, StandardCharsets.UTF_8); } } else { throw new IOException("Directory " + outFile.getParent() diff --git a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java index e324f1dedd..b475cc25e8 100644 --- a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java +++ b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java @@ -42,7 +42,7 @@ import java.io.File; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.Random; public class TestHadoopArchiveLogs { @@ -279,8 +279,7 @@ private void _testGenerateScript(boolean proxy) throws Exception { Assert.assertFalse(localScript.exists()); hal.generateScript(localScript); Assert.assertTrue(localScript.exists()); - String script = - IOUtils.toString(localScript.toURI(), Charset.defaultCharset()); + String script = IOUtils.toString(localScript.toURI(), StandardCharsets.UTF_8); String[] lines = script.split("\n"); Assert.assertEquals(22, lines.length); Assert.assertEquals("#!/bin/bash", lines[0]); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java index a1e56c3ce4..db5b5b5685 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java @@ -21,7 +21,7 @@ import javax.net.ssl.SSLException; import java.io.IOException; import java.net.SocketException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import com.amazonaws.SdkClientException; import com.amazonaws.services.s3.model.GetObjectRequest; @@ -200,8 +200,7 @@ public ObjectMetadata getObjectMetadata() { * @return mocked object. */ private S3ObjectInputStream getMockedInputStream(boolean triggerFailure) { - return new S3ObjectInputStream( - IOUtils.toInputStream(INPUT, Charset.defaultCharset()), null) { + return new S3ObjectInputStream(IOUtils.toInputStream(INPUT, StandardCharsets.UTF_8), null) { private final IOException exception = new SSLException(new SocketException("Connection reset")); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/select/ITestS3SelectCLI.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/select/ITestS3SelectCLI.java index a29abfdf63..b22d2f53b4 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/select/ITestS3SelectCLI.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/select/ITestS3SelectCLI.java @@ -22,7 +22,7 @@ import java.io.File; import java.io.FileInputStream; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.List; import org.junit.Assume; @@ -168,8 +168,7 @@ D, v(CSV_OUTPUT_QUOTE_FIELDS, CSV_OUTPUT_QUOTE_FIELDS_AS_NEEEDED), o(OPT_OUTPUT), localFile.toString(), landsatSrc, SELECT_SUNNY_ROWS_NO_LIMIT); - List lines = IOUtils.readLines(new FileInputStream(localFile), - Charset.defaultCharset()); + List lines = IOUtils.readLines(new FileInputStream(localFile), StandardCharsets.UTF_8); LOG.info("Result from select:\n{}", lines.get(0)); assertEquals(lineCount, lines.size()); selectCount.assertDiffEquals("select count", 1); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java index 1bbf0f2c15..6d143a3bd4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java @@ -97,7 +97,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.nio.ByteBuffer; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.text.MessageFormat; import java.util.Collection; import java.util.HashMap; @@ -549,7 +549,7 @@ private void createConfigFileCache(final FileSystem fileSystem) { case TEMPLATE: try (FSDataInputStream fileInput = fileSystem .open(new Path(key.getSrcFile()))) { - return IOUtils.toString(fileInput, Charset.defaultCharset()); + return IOUtils.toString(fileInput, StandardCharsets.UTF_8); } default: return null; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java index f7c0a7b374..57d76dfeec 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java @@ -48,7 +48,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -466,8 +466,7 @@ private static void resolvePlainTemplateAndSaveOnHdfs(FileSystem fs, content = substituteStrWithTokens(content, tokensForSubstitution); try (OutputStream output = fs.create(remoteFile)) { - org.apache.commons.io.IOUtils - .write(content, output, Charset.defaultCharset()); + org.apache.commons.io.IOUtils.write(content, output, StandardCharsets.UTF_8); } catch (IOException e) { log.info("Failed to create " + remoteFile); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java index bb6660e074..c996225c9a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java @@ -87,7 +87,7 @@ public static Credentials readCredentialsFromConfigFile(Path configFile, if (fs != null) { FSDataInputStream fileHandle = fs.open(configFile); if (fileHandle != null) { - contents = IOUtils.toString(fileHandle, Charset.defaultCharset()); + contents = IOUtils.toString(fileHandle, StandardCharsets.UTF_8); } } if (contents == null) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/resolver/DefaultSubClusterResolverImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/resolver/DefaultSubClusterResolverImpl.java index d3c5c269ab..7bb04ffde4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/resolver/DefaultSubClusterResolverImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/resolver/DefaultSubClusterResolverImpl.java @@ -19,7 +19,7 @@ package org.apache.hadoop.yarn.server.federation.resolver; import java.io.BufferedReader; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.InvalidPathException; import java.nio.file.Path; @@ -107,7 +107,7 @@ public void load() { } try { - reader = Files.newBufferedReader(file, Charset.defaultCharset()); + reader = Files.newBufferedReader(file, StandardCharsets.UTF_8); String line = null; while ((line = reader.readLine()) != null) { String[] tokens = line.split(","); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/LogServlet.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/LogServlet.java index c61391b2bc..7c48687bd2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/LogServlet.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/LogServlet.java @@ -54,7 +54,7 @@ import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.StreamingOutput; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -420,7 +420,7 @@ private static GenericEntity> createEmptyLogsInfo() { private static StreamingOutput createEmptyStream() { return outputStream -> outputStream.write( - "".getBytes(Charset.defaultCharset())); + "".getBytes(StandardCharsets.UTF_8)); } /** diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java index 2cb84514f2..0ef7aad8cd 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java @@ -24,7 +24,7 @@ import java.io.PrintStream; import java.net.InetAddress; import java.net.UnknownHostException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -350,8 +350,7 @@ public int reacquireContainer(ContainerReacquisitionContext ctx) } try { - return Integer.parseInt( - FileUtils.readFileToString(file, Charset.defaultCharset()).trim()); + return Integer.parseInt(FileUtils.readFileToString(file, StandardCharsets.UTF_8).trim()); } catch (NumberFormatException e) { throw new IOException("Error parsing exit code from pid " + pid, e); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupElasticMemoryController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupElasticMemoryController.java index dc7f354d1a..0e7e74d02a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupElasticMemoryController.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupElasticMemoryController.java @@ -34,7 +34,7 @@ import java.io.File; import java.io.InputStream; import java.lang.reflect.Constructor; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; @@ -269,9 +269,8 @@ public void run() { // Listen to any errors in the background. We do not expect this to // be large in size, so it will fit into a string. - Future errorListener = executor.submit( - () -> IOUtils.toString(process.getErrorStream(), - Charset.defaultCharset())); + Future errorListener = + executor.submit(() -> IOUtils.toString(process.getErrorStream(), StandardCharsets.UTF_8)); // We get Linux event increments (8 bytes) forwarded from the event stream // The events cannot be split, so it is safe to read them as a whole diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java index ab142a1db0..8248b330d4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java @@ -20,7 +20,7 @@ import java.io.File; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Arrays; @@ -270,16 +270,14 @@ public void testAcquireWithExitCodeTimeout() throws Exception { try { int writtenExitCode = 10; - FileUtils.writeStringToFile(pidFile, "2992", - Charset.defaultCharset(), false); + FileUtils.writeStringToFile(pidFile, "2992", StandardCharsets.UTF_8, false); TimerTask task = new java.util.TimerTask() { @Override public void run() { try { - FileUtils.writeStringToFile(exitCodeFile, - Integer.toString(writtenExitCode), - Charset.defaultCharset(), false); + FileUtils.writeStringToFile(exitCodeFile, Integer.toString(writtenExitCode), + StandardCharsets.UTF_8, false); } catch (IOException ioe) { LOG.warn("Could not write pid file"); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestCGroupElasticMemoryController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestCGroupElasticMemoryController.java index f10ec50f3f..71b392f2d3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestCGroupElasticMemoryController.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestCGroupElasticMemoryController.java @@ -26,7 +26,7 @@ import org.junit.Test; import java.io.File; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -97,8 +97,7 @@ public void testMultipleOOMEvents() throws Exception { script.getAbsolutePath()); try { FileUtils.writeStringToFile(script, - "#!/bin/bash\nprintf oomevent;printf oomevent;\n", - Charset.defaultCharset(), false); + "#!/bin/bash\nprintf oomevent;printf oomevent;\n", StandardCharsets.UTF_8, false); assertTrue("Could not set executable", script.setExecutable(true)); @@ -138,9 +137,8 @@ public void testStopBeforeStart() throws Exception { conf.set(YarnConfiguration.NM_ELASTIC_MEMORY_CONTROL_OOM_LISTENER_PATH, script.getAbsolutePath()); try { - FileUtils.writeStringToFile(script, - "#!/bin/bash\nprintf oomevent;printf oomevent;\n", - Charset.defaultCharset(), false); + FileUtils.writeStringToFile(script, "#!/bin/bash\nprintf oomevent;printf oomevent;\n", + StandardCharsets.UTF_8, false); assertTrue("Could not set executable", script.setExecutable(true)); @@ -181,9 +179,8 @@ public void testInfiniteOOM() throws Exception { script.getAbsolutePath()); Runnable handler = mock(Runnable.class); try { - FileUtils.writeStringToFile(script, - "#!/bin/bash\nprintf oomevent;sleep 1000;\n", - Charset.defaultCharset(), false); + FileUtils.writeStringToFile(script, "#!/bin/bash\nprintf oomevent;sleep 1000;\n", + StandardCharsets.UTF_8, false); assertTrue("Could not set executable", script.setExecutable(true)); @@ -223,9 +220,8 @@ public void testNothingToKill() throws Exception { script.getAbsolutePath()); Runnable handler = mock(Runnable.class); try { - FileUtils.writeStringToFile(script, - "#!/bin/bash\nprintf oomevent;sleep 1000;\n", - Charset.defaultCharset(), false); + FileUtils.writeStringToFile(script, "#!/bin/bash\nprintf oomevent;sleep 1000;\n", + StandardCharsets.UTF_8, false); assertTrue("Could not set executable", script.setExecutable(true)); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/placement/TestPlacementRuleFS.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/placement/TestPlacementRuleFS.java index 741304eafb..116c0ace70 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/placement/TestPlacementRuleFS.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/placement/TestPlacementRuleFS.java @@ -31,7 +31,7 @@ import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import java.io.IOException; -import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; @@ -194,8 +194,7 @@ private Element createConf(String str) { Document doc = null; try { DocumentBuilder builder = docBuilderFactory.newDocumentBuilder(); - doc = builder.parse(IOUtils.toInputStream(str, - Charset.defaultCharset())); + doc = builder.parse(IOUtils.toInputStream(str, StandardCharsets.UTF_8)); } catch (Exception ex) { fail("Element creation failed, failing test"); }