diff --git a/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java b/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java
index f5cff2b529..72f0f16309 100644
--- a/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java
+++ b/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java
@@ -19,7 +19,7 @@
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
/**
* Example that uses AuthenticatedURL
.
@@ -42,7 +42,7 @@ public static void main(String[] args) {
if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
BufferedReader reader = new BufferedReader(
new InputStreamReader(
- conn.getInputStream(), Charset.forName("UTF-8")));
+ conn.getInputStream(), StandardCharsets.UTF_8));
String line = reader.readLine();
while (line != null) {
System.out.println(line);
diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
index 19947aafad..7bf3398a21 100644
--- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
+++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/PseudoAuthenticationHandler.java
@@ -23,7 +23,7 @@
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Properties;
@@ -53,8 +53,6 @@ public class PseudoAuthenticationHandler implements AuthenticationHandler {
*/
public static final String ANONYMOUS_ALLOWED = TYPE + ".anonymous.allowed";
- private static final Charset UTF8_CHARSET = Charset.forName("UTF-8");
-
private static final String PSEUDO_AUTH = "PseudoAuth";
private boolean acceptAnonymous;
@@ -146,7 +144,7 @@ private String getUserName(HttpServletRequest request) {
if(queryString == null || queryString.length() == 0) {
return null;
}
- List list = URLEncodedUtils.parse(queryString, UTF8_CHARSET);
+ List list = URLEncodedUtils.parse(queryString, StandardCharsets.UTF_8);
if (list != null) {
for (NameValuePair nv : list) {
if (PseudoAuthenticator.USER_NAME.equals(nv.getName())) {
diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/StringSignerSecretProvider.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/StringSignerSecretProvider.java
index ed6b1aeccc..9d857640bc 100644
--- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/StringSignerSecretProvider.java
+++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/StringSignerSecretProvider.java
@@ -13,7 +13,7 @@
*/
package org.apache.hadoop.security.authentication.util;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.Properties;
import javax.servlet.ServletContext;
@@ -38,7 +38,7 @@ public void init(Properties config, ServletContext servletContext,
long tokenValidity) throws Exception {
String signatureSecret = config.getProperty(
AuthenticationFilter.SIGNATURE_SECRET, null);
- secret = signatureSecret.getBytes(Charset.forName("UTF-8"));
+ secret = signatureSecret.getBytes(StandardCharsets.UTF_8);
secrets = new byte[][]{secret};
}
diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestZKSignerSecretProvider.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestZKSignerSecretProvider.java
index 628342e40d..4f090c234e 100644
--- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestZKSignerSecretProvider.java
+++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestZKSignerSecretProvider.java
@@ -13,7 +13,7 @@
*/
package org.apache.hadoop.security.authentication.util;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.Properties;
import java.util.Random;
import javax.servlet.ServletContext;
@@ -140,11 +140,11 @@ public void testUpgradeChangeSecretLength() throws Exception {
long seed = System.currentTimeMillis();
Random rand = new Random(seed);
byte[] secret2 = Long.toString(rand.nextLong())
- .getBytes(Charset.forName("UTF-8"));
+ .getBytes(StandardCharsets.UTF_8);
byte[] secret1 = Long.toString(rand.nextLong())
- .getBytes(Charset.forName("UTF-8"));
+ .getBytes(StandardCharsets.UTF_8);
byte[] secret3 = Long.toString(rand.nextLong())
- .getBytes(Charset.forName("UTF-8"));
+ .getBytes(StandardCharsets.UTF_8);
rand = new Random(seed);
// Secrets 4 and 5 get thrown away by ZK when the new secret provider tries
// to init
@@ -238,7 +238,7 @@ private class OldMockZKSignerSecretProvider
@Override
protected byte[] generateRandomSecret() {
- return Long.toString(rand.nextLong()).getBytes(Charset.forName("UTF-8"));
+ return Long.toString(rand.nextLong()).getBytes(StandardCharsets.UTF_8);
}
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index ab7ff0bd40..7c4f617b17 100755
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -43,6 +43,7 @@
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
@@ -82,7 +83,6 @@
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.commons.collections.map.UnmodifiableMap;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -2903,7 +2903,7 @@ public Reader getConfResourceAsReader(String name) {
LOG.info("found resource " + name + " at " + url);
}
- return new InputStreamReader(url.openStream(), Charsets.UTF_8);
+ return new InputStreamReader(url.openStream(), StandardCharsets.UTF_8);
} catch (Exception e) {
return null;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
index a1a325b925..28799349a8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
@@ -899,7 +899,7 @@ private static void runCommandOnStream(
try (BufferedReader reader =
new BufferedReader(
new InputStreamReader(process.getInputStream(),
- Charset.forName("UTF-8")))) {
+ StandardCharsets.UTF_8))) {
String line;
while((line = reader.readLine()) != null) {
LOG.debug(line);
@@ -922,7 +922,7 @@ private static void runCommandOnStream(
try (BufferedReader reader =
new BufferedReader(
new InputStreamReader(process.getErrorStream(),
- Charset.forName("UTF-8")))) {
+ StandardCharsets.UTF_8))) {
String line;
while((line = reader.readLine()) != null) {
LOG.debug(line);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FileSystemMultipartUploader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FileSystemMultipartUploader.java
index 481d927672..28a4bce048 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FileSystemMultipartUploader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FileSystemMultipartUploader.java
@@ -20,6 +20,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
@@ -30,7 +31,6 @@
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -104,7 +104,7 @@ public CompletableFuture startUpload(Path filePath)
fs.mkdirs(collectorPath, FsPermission.getDirDefault());
ByteBuffer byteBuffer = ByteBuffer.wrap(
- collectorPath.toString().getBytes(Charsets.UTF_8));
+ collectorPath.toString().getBytes(StandardCharsets.UTF_8));
return BBUploadHandle.from(byteBuffer);
});
}
@@ -130,7 +130,7 @@ private PartHandle innerPutPart(Path filePath,
byte[] uploadIdByteArray = uploadId.toByteArray();
checkUploadId(uploadIdByteArray);
Path collectorPath = new Path(new String(uploadIdByteArray, 0,
- uploadIdByteArray.length, Charsets.UTF_8));
+ uploadIdByteArray.length, StandardCharsets.UTF_8));
Path partPath =
mergePaths(collectorPath, mergePaths(new Path(Path.SEPARATOR),
new Path(partNumber + ".part")));
@@ -149,7 +149,7 @@ private PartHandle innerPutPart(Path filePath,
cleanupWithLogger(LOG, inputStream);
}
return BBPartHandle.from(ByteBuffer.wrap(
- partPath.toString().getBytes(Charsets.UTF_8)));
+ partPath.toString().getBytes(StandardCharsets.UTF_8)));
}
private Path createCollectorPath(Path filePath) {
@@ -210,7 +210,7 @@ private PathHandle innerComplete(
.map(pair -> {
byte[] byteArray = pair.getValue().toByteArray();
return new Path(new String(byteArray, 0, byteArray.length,
- Charsets.UTF_8));
+ StandardCharsets.UTF_8));
})
.collect(Collectors.toList());
@@ -223,7 +223,7 @@ private PathHandle innerComplete(
"Duplicate PartHandles");
byte[] uploadIdByteArray = multipartUploadId.toByteArray();
Path collectorPath = new Path(new String(uploadIdByteArray, 0,
- uploadIdByteArray.length, Charsets.UTF_8));
+ uploadIdByteArray.length, StandardCharsets.UTF_8));
boolean emptyFile = totalPartsLen(partHandles) == 0;
if (emptyFile) {
@@ -250,7 +250,7 @@ public CompletableFuture abort(UploadHandle uploadId,
byte[] uploadIdByteArray = uploadId.toByteArray();
checkUploadId(uploadIdByteArray);
Path collectorPath = new Path(new String(uploadIdByteArray, 0,
- uploadIdByteArray.length, Charsets.UTF_8));
+ uploadIdByteArray.length, StandardCharsets.UTF_8));
return FutureIO.eval(() -> {
// force a check for a file existing; raises FNFE if not found
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
index 1ac204f5f8..70c7fe3812 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
@@ -22,6 +22,7 @@
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Iterator;
import java.util.LinkedList;
@@ -114,7 +115,7 @@ protected void processArguments(LinkedList items)
private void writeDelimiter(FSDataOutputStream out) throws IOException {
if (delimiter != null) {
- out.write(delimiter.getBytes("UTF-8"));
+ out.write(delimiter.getBytes(StandardCharsets.UTF_8));
}
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
index 5f47ddb339..ca2687ce5f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
@@ -120,7 +120,7 @@ public static String quoteHtmlChars(String item) {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
try {
quoteHtmlChars(buffer, bytes, 0, bytes.length);
- return buffer.toString("UTF-8");
+ return new String(buffer.toByteArray(), StandardCharsets.UTF_8);
} catch (IOException ioe) {
// Won't happen, since it is a bytearrayoutputstream
return null;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
index 7be50b0c53..92eff36ced 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java
@@ -20,7 +20,6 @@
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import java.nio.charset.UnsupportedCharsetException;
import java.util.ArrayList;
import org.apache.commons.codec.binary.Base64;
@@ -75,14 +74,10 @@ public DefaultStringifier(Configuration conf, Class c) {
@Override
public T fromString(String str) throws IOException {
- try {
- byte[] bytes = Base64.decodeBase64(str.getBytes("UTF-8"));
- inBuf.reset(bytes, bytes.length);
- T restored = deserializer.deserialize(null);
- return restored;
- } catch (UnsupportedCharsetException ex) {
- throw new IOException(ex.toString());
- }
+ byte[] bytes = Base64.decodeBase64(str.getBytes(StandardCharsets.UTF_8));
+ inBuf.reset(bytes, bytes.length);
+ T restored = deserializer.deserialize(null);
+ return restored;
}
@Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java
index 187398de0e..b46aed1208 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java
@@ -25,6 +25,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
+import java.nio.charset.StandardCharsets;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
@@ -86,12 +87,12 @@ public static int writeCompressedByteArray(DataOutput out,
public static String readCompressedString(DataInput in) throws IOException {
byte[] bytes = readCompressedByteArray(in);
if (bytes == null) return null;
- return new String(bytes, "UTF-8");
+ return new String(bytes, StandardCharsets.UTF_8);
}
public static int writeCompressedString(DataOutput out, String s) throws IOException {
- return writeCompressedByteArray(out, (s != null) ? s.getBytes("UTF-8") : null);
+ return writeCompressedByteArray(out, (s != null) ? s.getBytes(StandardCharsets.UTF_8) : null);
}
/*
@@ -103,7 +104,7 @@ public static int writeCompressedString(DataOutput out, String s) throws IOExce
*/
public static void writeString(DataOutput out, String s) throws IOException {
if (s != null) {
- byte[] buffer = s.getBytes("UTF-8");
+ byte[] buffer = s.getBytes(StandardCharsets.UTF_8);
int len = buffer.length;
out.writeInt(len);
out.write(buffer, 0, len);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java
index 32879597a9..a765ab83ca 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java
@@ -23,6 +23,7 @@
import java.io.PrintWriter;
import java.net.URL;
import java.net.URLConnection;
+import java.nio.charset.StandardCharsets;
import java.util.regex.Pattern;
import javax.net.ssl.HttpsURLConnection;
@@ -33,7 +34,6 @@
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.classification.VisibleForTesting;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
@@ -297,7 +297,7 @@ private void process(String urlString) throws Exception {
// read from the servlet
BufferedReader in = new BufferedReader(
- new InputStreamReader(connection.getInputStream(), Charsets.UTF_8));
+ new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8));
for (String line;;) {
line = in.readLine();
if (line == null) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
index 2d22b75841..f4848fed51 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
@@ -23,6 +23,8 @@
import java.net.URL;
import java.net.URLClassLoader;
import static java.security.AccessController.*;
+
+import java.nio.charset.StandardCharsets;
import java.security.PrivilegedAction;
import java.util.Iterator;
import java.util.Map;
@@ -289,7 +291,7 @@ static String toString(Configuration c) {
PropertiesConfiguration tmp = new PropertiesConfiguration();
tmp.copy(c);
tmp.write(pw);
- return buffer.toString("UTF-8");
+ return new String(buffer.toByteArray(), StandardCharsets.UTF_8);
} catch (Exception e) {
throw new MetricsConfigException(e);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPlainServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPlainServer.java
index 270b579324..8e3de21064 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPlainServer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslPlainServer.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.security;
+import java.nio.charset.StandardCharsets;
import java.security.Provider;
import java.util.Map;
@@ -82,7 +83,7 @@ public byte[] evaluateResponse(byte[] response) throws SaslException {
try {
String payload;
try {
- payload = new String(response, "UTF-8");
+ payload = new String(response, StandardCharsets.UTF_8);
} catch (Exception e) {
throw new IllegalArgumentException("Received corrupt response", e);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java
index 260f1d2249..d594d26515 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java
@@ -24,7 +24,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.ProviderUtils;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -33,6 +32,7 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
+import java.nio.charset.StandardCharsets;
import java.security.GeneralSecurityException;
import java.security.KeyStore;
import java.security.KeyStoreException;
@@ -199,7 +199,7 @@ public CredentialEntry getCredentialEntry(String alias)
public static char[] bytesToChars(byte[] bytes) throws IOException {
String pass;
- pass = new String(bytes, Charsets.UTF_8);
+ pass = new String(bytes, StandardCharsets.UTF_8);
return pass.toCharArray();
}
@@ -268,7 +268,7 @@ CredentialEntry innerSetCredential(String alias, char[] material)
writeLock.lock();
try {
keyStore.setKeyEntry(alias,
- new SecretKeySpec(new String(material).getBytes("UTF-8"),
+ new SecretKeySpec(new String(material).getBytes(StandardCharsets.UTF_8),
getAlgorithm()), password, null);
} catch (KeyStoreException e) {
throw new IOException("Can't store credential " + alias + " in " + this,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
index 0c960d891b..2ae98f0337 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
@@ -70,7 +70,7 @@ public synchronized CredentialEntry createCredentialEntry(String name, char[] cr
" already exists in " + this);
}
credentials.addSecretKey(new Text(name),
- new String(credential).getBytes("UTF-8"));
+ new String(credential).getBytes(StandardCharsets.UTF_8));
return new CredentialEntry(name, credential);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
index 571e54c5f9..1400e572d9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java
@@ -51,7 +51,7 @@
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.security.Principal;
import java.util.Enumeration;
import java.util.List;
@@ -94,8 +94,6 @@ public class DelegationTokenAuthenticationFilter
public static final String DELEGATION_TOKEN_SECRET_MANAGER_ATTR =
"hadoop.http.delegation-token-secret-manager";
- private static final Charset UTF8_CHARSET = Charset.forName("UTF-8");
-
private static final ThreadLocal UGI_TL =
new ThreadLocal();
public static final String PROXYUSER_PREFIX = "proxyuser";
@@ -226,7 +224,7 @@ static String getDoAs(HttpServletRequest request) {
if (queryString == null) {
return null;
}
- List list = URLEncodedUtils.parse(queryString, UTF8_CHARSET);
+ List list = URLEncodedUtils.parse(queryString, StandardCharsets.UTF_8);
if (list != null) {
for (NameValuePair nv : list) {
if (DelegationTokenAuthenticatedURL.DO_AS.
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/ServletUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/ServletUtils.java
index 078dfa44bd..e43668e2c6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/ServletUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/ServletUtils.java
@@ -23,7 +23,7 @@
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.List;
/**
@@ -31,7 +31,6 @@
*/
@InterfaceAudience.Private
class ServletUtils {
- private static final Charset UTF8_CHARSET = Charset.forName("UTF-8");
/**
* Extract a query string parameter without triggering http parameters
@@ -49,7 +48,7 @@ public static String getParameter(HttpServletRequest request, String name)
if (queryString == null) {
return null;
}
- List list = URLEncodedUtils.parse(queryString, UTF8_CHARSET);
+ List list = URLEncodedUtils.parse(queryString, StandardCharsets.UTF_8);
if (list != null) {
for (NameValuePair nv : list) {
if (name.equals(nv.getName())) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java
index 38777d8f66..574d9062c3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoLinux.java
@@ -22,7 +22,7 @@
import java.io.InputStreamReader;
import java.io.IOException;
import java.math.BigInteger;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.HashMap;
@@ -247,7 +247,7 @@ private void readProcMemInfoFile(boolean readAgain) {
try {
fReader = new InputStreamReader(
Files.newInputStream(Paths.get(procfsMemFile)),
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
in = new BufferedReader(fReader);
} catch (IOException f) {
// shouldn't happen....
@@ -319,7 +319,7 @@ private void readProcCpuInfoFile() {
try {
fReader =
new InputStreamReader(Files.newInputStream(Paths.get(procfsCpuFile)),
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
in = new BufferedReader(fReader);
} catch (IOException f) {
// shouldn't happen....
@@ -380,7 +380,7 @@ private void readProcStatFile() {
try {
fReader = new InputStreamReader(
Files.newInputStream(Paths.get(procfsStatFile)),
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
in = new BufferedReader(fReader);
} catch (IOException f) {
// shouldn't happen....
@@ -435,7 +435,7 @@ private void readProcNetInfoFile() {
try {
fReader = new InputStreamReader(
Files.newInputStream(Paths.get(procfsNetFile)),
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
in = new BufferedReader(fReader);
} catch (IOException f) {
return;
@@ -490,7 +490,7 @@ private void readProcDisksInfoFile() {
try {
in = new BufferedReader(new InputStreamReader(
Files.newInputStream(Paths.get(procfsDisksFile)),
- Charset.forName("UTF-8")));
+ StandardCharsets.UTF_8));
} catch (IOException f) {
return;
}
@@ -558,7 +558,7 @@ int readDiskBlockInformation(String diskName, int defSector) {
try {
in = new BufferedReader(new InputStreamReader(
Files.newInputStream(Paths.get(procfsDiskSectorFile)),
- Charset.forName("UTF-8")));
+ StandardCharsets.UTF_8));
} catch (IOException f) {
return defSector;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java
index 17d8233342..4c9f418daf 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ZKUtil.java
@@ -19,6 +19,7 @@
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -27,7 +28,6 @@
import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Id;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Splitter;
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
@@ -148,7 +148,7 @@ public static List parseAuth(String authString) throws
"Auth '" + comp + "' not of expected form scheme:auth");
}
ret.add(new ZKAuthInfo(parts[0],
- parts[1].getBytes(Charsets.UTF_8)));
+ parts[1].getBytes(StandardCharsets.UTF_8)));
}
return ret;
}
@@ -172,7 +172,7 @@ public static String resolveConfIndirection(String valInConf)
return valInConf;
}
String path = valInConf.substring(1).trim();
- return Files.asCharSource(new File(path), Charsets.UTF_8).read().trim();
+ return Files.asCharSource(new File(path), StandardCharsets.UTF_8).read().trim();
}
/**
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java
index 3055e7bf65..a0cbf86fb9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/curator/ZKCuratorManager.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.util.curator;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
@@ -260,7 +260,7 @@ public byte[] getData(final String path, Stat stat) throws Exception {
public String getStringData(final String path) throws Exception {
byte[] bytes = getData(path);
if (bytes != null) {
- return new String(bytes, Charset.forName("UTF-8"));
+ return new String(bytes, StandardCharsets.UTF_8);
}
return null;
}
@@ -275,7 +275,7 @@ public String getStringData(final String path) throws Exception {
public String getStringData(final String path, Stat stat) throws Exception {
byte[] bytes = getData(path, stat);
if (bytes != null) {
- return new String(bytes, Charset.forName("UTF-8"));
+ return new String(bytes, StandardCharsets.UTF_8);
}
return null;
}
@@ -299,7 +299,7 @@ public void setData(String path, byte[] data, int version) throws Exception {
* @throws Exception If it cannot contact Zookeeper.
*/
public void setData(String path, String data, int version) throws Exception {
- byte[] bytes = data.getBytes(Charset.forName("UTF-8"));
+ byte[] bytes = data.getBytes(StandardCharsets.UTF_8);
setData(path, bytes, version);
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java
index b07ba76e8e..210b36f2db 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java
@@ -60,7 +60,7 @@ public class TestCommonConfigurationFields extends TestConfigurationFieldsBase {
@SuppressWarnings("deprecation")
@Override
public void initializeMemberVariables() {
- xmlFilename = new String("core-default.xml");
+ xmlFilename = "core-default.xml";
configurationClasses = new Class[] {
CommonConfigurationKeys.class,
CommonConfigurationKeysPublic.class,
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
index 94d90b2eb9..177223dc08 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
@@ -698,7 +698,7 @@ public void testUnTar() throws Exception {
OutputStream os = new FileOutputStream(simpleTar);
try (TarOutputStream tos = new TarOutputStream(os)) {
TarEntry te = new TarEntry("/bar/foo");
- byte[] data = "some-content".getBytes("UTF-8");
+ byte[] data = "some-content".getBytes(StandardCharsets.UTF_8);
te.setSize(data.length);
tos.putNextEntry(te);
tos.write(data);
@@ -782,7 +782,7 @@ public void testUnZip() throws Exception {
ZipArchiveList.add(new ZipArchiveEntry("foo_" + i));
ZipArchiveEntry archiveEntry = ZipArchiveList.get(i);
archiveEntry.setUnixMode(count += 0100);
- byte[] data = "some-content".getBytes("UTF-8");
+ byte[] data = "some-content".getBytes(StandardCharsets.UTF_8);
archiveEntry.setSize(data.length);
tos.putArchiveEntry(archiveEntry);
tos.write(data);
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
index 471d2458f4..eccf491cca 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
@@ -30,6 +30,7 @@
import java.io.File;
import java.io.IOException;
import java.net.URI;
+import java.nio.charset.StandardCharsets;
import java.util.HashSet;
import java.util.Set;
@@ -117,7 +118,7 @@ private void writeVersionToMasterIndexImpl(int version, Path masterIndexPath) th
final FSDataOutputStream fsdos = localFileSystem.create(masterIndexPath);
try {
String versionString = version + "\n";
- fsdos.write(versionString.getBytes("UTF-8"));
+ fsdos.write(versionString.getBytes(StandardCharsets.UTF_8));
fsdos.flush();
} finally {
fsdos.close();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractMultipartUploaderTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractMultipartUploaderTest.java
index c395afdb37..7420b47a98 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractMultipartUploaderTest.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractMultipartUploaderTest.java
@@ -22,13 +22,13 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.assertj.core.api.Assertions;
import org.junit.Assume;
import org.junit.Test;
@@ -596,8 +596,8 @@ public void testMultipartUploadAbort() throws Exception {
abortUpload(uploadHandle, file);
String contents = "ThisIsPart49\n";
- int len = contents.getBytes(Charsets.UTF_8).length;
- InputStream is = IOUtils.toInputStream(contents, "UTF-8");
+ int len = contents.getBytes(StandardCharsets.UTF_8).length;
+ InputStream is = IOUtils.toInputStream(contents, StandardCharsets.UTF_8);
intercept(IOException.class,
() -> awaitFuture(
@@ -624,7 +624,7 @@ public void testMultipartUploadAbort() throws Exception {
public void testAbortUnknownUpload() throws Exception {
Path file = methodPath();
ByteBuffer byteBuffer = ByteBuffer.wrap(
- "invalid-handle".getBytes(Charsets.UTF_8));
+ "invalid-handle".getBytes(StandardCharsets.UTF_8));
intercept(FileNotFoundException.class,
() -> abortUpload(BBUploadHandle.from(byteBuffer), file));
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
index b61abddd43..bbccbfbc16 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/ContractTestUtils.java
@@ -45,6 +45,7 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -806,7 +807,7 @@ public static String readUTF8(FileSystem fs,
try (FSDataInputStream in = fs.open(path)) {
byte[] buf = new byte[length];
in.readFully(0, buf);
- return new String(buf, "UTF-8");
+ return new String(buf, StandardCharsets.UTF_8);
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java
index 13f7eccd55..a027b4d682 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestHAAdmin.java
@@ -23,6 +23,7 @@
import java.io.IOException;
import java.io.PrintStream;
import java.net.InetSocketAddress;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
@@ -30,7 +31,6 @@
import org.junit.Before;
import org.junit.Test;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -97,8 +97,8 @@ private Object runTool(String ... args) throws Exception {
outBytes.reset();
LOG.info("Running: HAAdmin " + Joiner.on(" ").join(args));
int ret = tool.run(args);
- errOutput = new String(errOutBytes.toByteArray(), Charsets.UTF_8);
- output = new String(outBytes.toByteArray(), Charsets.UTF_8);
+ errOutput = new String(errOutBytes.toByteArray(), StandardCharsets.UTF_8);
+ output = new String(outBytes.toByteArray(), StandardCharsets.UTF_8);
LOG.info("Err_output:\n" + errOutput + "\nOutput:\n" + output);
return ret;
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestIsActiveServlet.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestIsActiveServlet.java
index cfc7e359e9..22bea17a7c 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestIsActiveServlet.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestIsActiveServlet.java
@@ -27,6 +27,7 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.anyInt;
@@ -90,6 +91,6 @@ protected boolean isActive() {
private String doGet() throws IOException {
servlet.doGet(req, resp);
- return new String(respOut.toByteArray(), "UTF-8");
+ return new String(respOut.toByteArray(), StandardCharsets.UTF_8);
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java
index 91c0f1b442..f9a5a30966 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java
@@ -23,6 +23,7 @@
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@@ -57,7 +58,7 @@ public static void makeTestFile() throws Exception {
for (File f : new File[] { testFilePathIs, testFilePathRaf,
testFilePathFadis }) {
FileOutputStream fos = new FileOutputStream(f);
- fos.write("hello".getBytes("UTF-8"));
+ fos.write("hello".getBytes(StandardCharsets.UTF_8));
fos.close();
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
index 7ae5d7d7ca..4eb260c033 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
@@ -22,8 +22,8 @@
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.nio.charset.CharacterCodingException;
+import java.nio.charset.StandardCharsets;
import java.util.Random;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.primitives.Bytes;
import org.junit.Test;
@@ -105,7 +105,7 @@ public void testCoding() throws Exception {
ByteBuffer bb = Text.encode(before);
byte[] utf8Text = bb.array();
- byte[] utf8Java = before.getBytes("UTF-8");
+ byte[] utf8Java = before.getBytes(StandardCharsets.UTF_8);
assertEquals(0, WritableComparator.compareBytes(
utf8Text, 0, bb.limit(),
utf8Java, 0, utf8Java.length));
@@ -392,7 +392,7 @@ public void testReadWriteOperations() {
@Test
public void testReadWithKnownLength() throws IOException {
String line = "hello world";
- byte[] inputBytes = line.getBytes(Charsets.UTF_8);
+ byte[] inputBytes = line.getBytes(StandardCharsets.UTF_8);
DataInputBuffer in = new DataInputBuffer();
Text text = new Text();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java
index 2d60b5ecca..6899d1cdca 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java
@@ -23,6 +23,7 @@
import java.io.IOException;
import java.io.UTFDataFormatException;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import java.util.Random;
import org.apache.hadoop.test.GenericTestUtils;
@@ -110,7 +111,7 @@ public void testNullEncoding() throws Exception {
DataOutputBuffer dob = new DataOutputBuffer();
new UTF8(s).write(dob);
- assertEquals(s, new String(dob.getData(), 2, dob.getLength()-2, "UTF-8"));
+ assertEquals(s, new String(dob.getData(), 2, dob.getLength()-2, StandardCharsets.UTF_8));
}
/**
@@ -125,7 +126,7 @@ public void testNonBasicMultilingualPlane() throws Exception {
String catFace = "\uD83D\uDC31";
// This encodes to 4 bytes in UTF-8:
- byte[] encoded = catFace.getBytes("UTF-8");
+ byte[] encoded = catFace.getBytes(StandardCharsets.UTF_8);
assertEquals(4, encoded.length);
assertEquals("f09f90b1", StringUtils.byteToHexString(encoded));
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java
index 67889405c1..420c16bef5 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestFileSink.java
@@ -23,6 +23,7 @@
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.util.regex.Pattern;
import org.apache.hadoop.io.IOUtils;
@@ -113,7 +114,7 @@ public void testFileSink() throws IOException {
is = new FileInputStream(outFile);
baos = new ByteArrayOutputStream((int)outFile.length());
IOUtils.copyBytes(is, baos, 1024, true);
- outFileContent = new String(baos.toByteArray(), "UTF-8");
+ outFileContent = new String(baos.toByteArray(), StandardCharsets.UTF_8);
} finally {
IOUtils.cleanupWithLogger(null, baos, is);
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestStatsDMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestStatsDMetrics.java
index 99a75787ad..2ce02f74f1 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestStatsDMetrics.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/sink/TestStatsDMetrics.java
@@ -25,7 +25,7 @@
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
@@ -75,7 +75,7 @@ public void testPutMetrics() throws IOException, IllegalAccessException {
sock.receive(p);
String result =new String(p.getData(), 0, p.getLength(),
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
assertTrue(
"Received data did not match data sent",
result.equals("host.process.jvm.Context.foo1:1.25|c") ||
@@ -109,7 +109,7 @@ public void testPutMetrics2() throws IOException, IllegalAccessException {
sink.putMetrics(record);
sock.receive(p);
String result =
- new String(p.getData(), 0, p.getLength(), Charset.forName("UTF-8"));
+ new String(p.getData(), 0, p.getLength(), StandardCharsets.UTF_8);
assertTrue("Received data did not match data sent",
result.equals("process.jvm.Context.foo1:1|c") ||
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestTableMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestTableMapping.java
index 697b0bad43..50fe0c098f 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestTableMapping.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestTableMapping.java
@@ -21,11 +21,11 @@
import static org.junit.Assert.assertEquals;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
@@ -41,7 +41,7 @@ public class TestTableMapping {
public void testResolve() throws IOException {
File mapFile = File.createTempFile(getClass().getSimpleName() +
".testResolve", ".txt");
- Files.asCharSink(mapFile, Charsets.UTF_8).write(
+ Files.asCharSink(mapFile, StandardCharsets.UTF_8).write(
hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
mapFile.deleteOnExit();
TableMapping mapping = new TableMapping();
@@ -64,7 +64,7 @@ public void testResolve() throws IOException {
public void testTableCaching() throws IOException {
File mapFile = File.createTempFile(getClass().getSimpleName() +
".testTableCaching", ".txt");
- Files.asCharSink(mapFile, Charsets.UTF_8).write(
+ Files.asCharSink(mapFile, StandardCharsets.UTF_8).write(
hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
mapFile.deleteOnExit();
TableMapping mapping = new TableMapping();
@@ -128,7 +128,7 @@ public void testFileDoesNotExist() {
public void testClearingCachedMappings() throws IOException {
File mapFile = File.createTempFile(getClass().getSimpleName() +
".testClearingCachedMappings", ".txt");
- Files.asCharSink(mapFile, Charsets.UTF_8).write(
+ Files.asCharSink(mapFile, StandardCharsets.UTF_8).write(
hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
mapFile.deleteOnExit();
@@ -147,7 +147,7 @@ public void testClearingCachedMappings() throws IOException {
assertEquals("/rack1", result.get(0));
assertEquals("/rack2", result.get(1));
- Files.asCharSink(mapFile, Charsets.UTF_8).write("");
+ Files.asCharSink(mapFile, StandardCharsets.UTF_8).write("");
mapping.reloadCachedMappings();
@@ -166,7 +166,7 @@ public void testClearingCachedMappings() throws IOException {
public void testBadFile() throws IOException {
File mapFile = File.createTempFile(getClass().getSimpleName() +
".testBadFile", ".txt");
- Files.asCharSink(mapFile, Charsets.UTF_8).write("bad contents");
+ Files.asCharSink(mapFile, StandardCharsets.UTF_8).write("bad contents");
mapFile.deleteOnExit();
TableMapping mapping = new TableMapping();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestZKDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestZKDelegationTokenSecretManager.java
index 2312af3c79..25dae7e4fd 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestZKDelegationTokenSecretManager.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/delegation/TestZKDelegationTokenSecretManager.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.security.token.delegation;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -400,7 +401,7 @@ public List getDefaultAcl() {
.connectString(connectString)
.retryPolicy(retryPolicy)
.aclProvider(digestAclProvider)
- .authorization("digest", userPass.getBytes("UTF-8"))
+ .authorization("digest", userPass.getBytes(StandardCharsets.UTF_8))
.build();
curatorFramework.start();
ZKDelegationTokenSecretManager.setCurator(curatorFramework);
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java
index 529887b297..716dfe0c36 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestClasspath.java
@@ -24,6 +24,7 @@
import java.io.IOException;
import java.io.PrintStream;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
@@ -46,7 +47,7 @@ public class TestClasspath {
.class);
private static final File TEST_DIR = GenericTestUtils.getTestDir(
"TestClasspath");
- private static final Charset UTF8 = Charset.forName("UTF-8");
+ private static final Charset UTF8 = StandardCharsets.UTF_8;
static {
ExitUtil.disableSystemExit();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestPureJavaCrc32.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestPureJavaCrc32.java
index 7fb90a15e3..bf3e58793b 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestPureJavaCrc32.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestPureJavaCrc32.java
@@ -21,6 +21,7 @@
import java.io.FileOutputStream;
import java.io.PrintStream;
import java.lang.reflect.Constructor;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
@@ -49,7 +50,7 @@ public void testCorrectness() throws Exception {
checkOnBytes(new byte[] {40, 60, 97, -70}, false);
- checkOnBytes("hello world!".getBytes("UTF-8"), false);
+ checkOnBytes("hello world!".getBytes(StandardCharsets.UTF_8), false);
for (int i = 0; i < 10000; i++) {
byte randomBytes[] = new byte[new Random().nextInt(2048)];
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestZKUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestZKUtil.java
index 93790eb135..d12fff2732 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestZKUtil.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestZKUtil.java
@@ -22,6 +22,7 @@
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import org.apache.hadoop.test.GenericTestUtils;
@@ -31,7 +32,6 @@
import org.apache.zookeeper.data.ACL;
import org.junit.Test;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
public class TestZKUtil {
@@ -131,7 +131,7 @@ public void testConfIndirection() throws IOException {
assertEquals("x", ZKUtil.resolveConfIndirection("x"));
TEST_FILE.getParentFile().mkdirs();
- Files.asCharSink(TEST_FILE, Charsets.UTF_8).write("hello world");
+ Files.asCharSink(TEST_FILE, StandardCharsets.UTF_8).write("hello world");
assertEquals("hello world", ZKUtil.resolveConfIndirection(
"@" + TEST_FILE.getAbsolutePath()));
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/curator/TestZKCuratorManager.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/curator/TestZKCuratorManager.java
index aced6e8d28..69cba8f4ad 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/curator/TestZKCuratorManager.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/curator/TestZKCuratorManager.java
@@ -22,6 +22,7 @@
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -117,7 +118,7 @@ public void testGetStringData() throws Exception {
curator.create(node1);
assertNull(curator.getStringData(node1));
- byte[] setData = "setData".getBytes("UTF-8");
+ byte[] setData = "setData".getBytes(StandardCharsets.UTF_8);
curator.setData(node1, setData, -1);
assertEquals("setData", curator.getStringData(node1));
@@ -136,7 +137,7 @@ public void testTransaction() throws Exception {
String fencingNodePath = "/fencing";
String node1 = "/node1";
String node2 = "/node2";
- byte[] testData = "testData".getBytes("UTF-8");
+ byte[] testData = "testData".getBytes(StandardCharsets.UTF_8);
assertFalse(curator.exists(fencingNodePath));
assertFalse(curator.exists(node1));
assertFalse(curator.exists(node2));
@@ -154,7 +155,7 @@ public void testTransaction() throws Exception {
assertTrue(Arrays.equals(testData, curator.getData(node1)));
assertTrue(Arrays.equals(testData, curator.getData(node2)));
- byte[] setData = "setData".getBytes("UTF-8");
+ byte[] setData = "setData".getBytes(StandardCharsets.UTF_8);
txn = curator.createTransaction(zkAcl, fencingNodePath);
txn.setData(node1, setData, -1);
txn.delete(node2);
diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java
index b9b8d9cee6..e29ce4ba5f 100644
--- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java
+++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java
@@ -33,7 +33,7 @@
import java.io.Writer;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
@@ -64,8 +64,7 @@ public void writeTo(Object obj, Class> aClass, Type type,
Annotation[] annotations, MediaType mediaType,
MultivaluedMap stringObjectMultivaluedMap,
OutputStream outputStream) throws IOException, WebApplicationException {
- Writer writer = new OutputStreamWriter(outputStream, Charset
- .forName("UTF-8"));
+ Writer writer = new OutputStreamWriter(outputStream, StandardCharsets.UTF_8);
JsonSerialization.writer().writeValue(writer, obj);
}
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java
index 04aabfc635..dafd3e2d00 100644
--- a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/binding/JsonSerDeser.java
@@ -28,6 +28,7 @@
import java.io.EOFException;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
/**
* Support for marshalling objects to and from JSON.
@@ -47,7 +48,6 @@
@InterfaceStability.Evolving
public class JsonSerDeser extends JsonSerialization {
- private static final String UTF_8 = "UTF-8";
public static final String E_NO_DATA = "No data at path";
public static final String E_DATA_TOO_SHORT = "Data at path too short";
public static final String E_MISSING_MARKER_STRING =
@@ -102,7 +102,7 @@ public T fromBytes(String path, byte[] bytes, String marker)
if (StringUtils.isNotEmpty(marker) && len < marker.length()) {
throw new NoRecordException(path, E_DATA_TOO_SHORT);
}
- String json = new String(bytes, 0, len, UTF_8);
+ String json = new String(bytes, 0, len, StandardCharsets.UTF_8);
if (StringUtils.isNotEmpty(marker)
&& !json.contains(marker)) {
throw new NoRecordException(path, E_MISSING_MARKER_STRING + marker);
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistrySecurity.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistrySecurity.java
index d48920a222..0dc587f64e 100644
--- a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistrySecurity.java
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/impl/zk/RegistrySecurity.java
@@ -42,6 +42,7 @@
import javax.security.auth.login.AppConfigurationEntry;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Collections;
@@ -295,7 +296,7 @@ private void initSecurity() throws IOException {
digestAuthUser = id;
digestAuthPassword = pass;
String authPair = id + ":" + pass;
- digestAuthData = authPair.getBytes("UTF-8");
+ digestAuthData = authPair.getBytes(StandardCharsets.UTF_8);
if (LOG.isDebugEnabled()) {
LOG.debug("Auth is Digest ACL: {}", aclToString(acl));
}
diff --git a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
index 02d866f4d3..b6de757fc3 100644
--- a/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
+++ b/hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/server/dns/RegistryDNS.java
@@ -80,6 +80,7 @@
import java.nio.channels.DatagramChannel;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
+import java.nio.charset.StandardCharsets;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
@@ -628,7 +629,7 @@ private void enableDNSSECIfNecessary(Zone zone, Configuration conf,
Name zoneName = zone.getOrigin();
DNSKEYRecord dnskeyRecord = dnsKeyRecs.get(zoneName);
if (dnskeyRecord == null) {
- byte[] key = Base64.decodeBase64(publicKey.getBytes("UTF-8"));
+ byte[] key = Base64.decodeBase64(publicKey.getBytes(StandardCharsets.UTF_8));
dnskeyRecord = new DNSKEYRecord(zoneName,
DClass.IN, ttl,
DNSKEYRecord.Flags.ZONE_KEY,
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java
index ab5cd0608d..4749890ce1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java
@@ -30,6 +30,7 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@@ -59,7 +60,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses;
@@ -147,7 +147,7 @@ public static Map createSaslPropertiesForEncryption(
* @return key encoded as SASL password
*/
public static char[] encryptionKeyToPassword(byte[] encryptionKey) {
- return new String(Base64.encodeBase64(encryptionKey, false), Charsets.UTF_8)
+ return new String(Base64.encodeBase64(encryptionKey, false), StandardCharsets.UTF_8)
.toCharArray();
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java
index 641c7a0ff4..043439130d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferClient.java
@@ -30,6 +30,7 @@
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.Socket;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
@@ -65,7 +66,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* Negotiates SASL for DataTransferProtocol on behalf of a client. There are
@@ -347,7 +347,7 @@ private static String getUserNameFromEncryptionKey(
return encryptionKey.keyId + NAME_DELIMITER +
encryptionKey.blockPoolId + NAME_DELIMITER +
new String(Base64.encodeBase64(encryptionKey.nonce, false),
- Charsets.UTF_8);
+ StandardCharsets.UTF_8);
}
/**
@@ -450,7 +450,7 @@ private IOStreamPair getSaslStreams(InetAddress addr,
private void updateToken(Token accessToken,
SecretKey secretKey, Map saslProps)
throws IOException {
- byte[] newSecret = saslProps.get(Sasl.QOP).getBytes(Charsets.UTF_8);
+ byte[] newSecret = saslProps.get(Sasl.QOP).getBytes(StandardCharsets.UTF_8);
BlockTokenIdentifier bkid = accessToken.decodeIdentifier();
bkid.setHandshakeMsg(newSecret);
byte[] bkidBytes = bkid.getBytes();
@@ -471,7 +471,7 @@ private void updateToken(Token accessToken,
*/
private static String buildUserName(Token blockToken) {
return new String(Base64.encodeBase64(blockToken.getIdentifier(), false),
- Charsets.UTF_8);
+ StandardCharsets.UTF_8);
}
/**
@@ -483,7 +483,7 @@ private static String buildUserName(Token blockToken) {
*/
private char[] buildClientPassword(Token blockToken) {
return new String(Base64.encodeBase64(blockToken.getPassword(), false),
- Charsets.UTF_8).toCharArray();
+ StandardCharsets.UTF_8).toCharArray();
}
/**
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
index 91ab48fa9d..c7724ce6db 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileReader.java
@@ -27,6 +27,7 @@
import java.io.InputStreamReader;
import java.io.IOException;
import java.io.Reader;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
@@ -84,7 +85,7 @@ private CombinedHostsFileReader() {
if (hostFile.length() > 0) {
try (Reader input =
new InputStreamReader(
- Files.newInputStream(hostFile.toPath()), "UTF-8")) {
+ Files.newInputStream(hostFile.toPath()), StandardCharsets.UTF_8)) {
allDNs = objectMapper.readValue(input, DatanodeAdminProperties[].class);
} catch (JsonMappingException jme) {
// The old format doesn't have json top-level token to enclose
@@ -103,7 +104,7 @@ private CombinedHostsFileReader() {
List all = new ArrayList<>();
try (Reader input =
new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)),
- "UTF-8")) {
+ StandardCharsets.UTF_8)) {
Iterator iterator =
objectReader.readValues(jsonFactory.createParser(input));
while (iterator.hasNext()) {
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java
index 7897dc1ebf..de4c12d556 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/CombinedHostsFileWriter.java
@@ -21,6 +21,7 @@
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Set;
@@ -62,7 +63,7 @@ public static void writeFile(final String hostsFile,
try (Writer output =
new OutputStreamWriter(Files.newOutputStream(Paths.get(hostsFile)),
- "UTF-8")) {
+ StandardCharsets.UTF_8)) {
objectMapper.writeValue(output, allDNs);
}
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index 689a7c78d4..a8bd95b32a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -137,7 +137,6 @@
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.VisibleForTesting;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.util.Preconditions;
/** A FileSystem for HDFS over the web. */
@@ -1792,7 +1791,7 @@ public DirectoryEntries listStatusBatch(Path f, byte[] token) throws
}
DirectoryListing listing = new FsPathResponseRunner(
GetOpParam.Op.LISTSTATUS_BATCH,
- f, new StartAfterParam(new String(prevKey, Charsets.UTF_8))) {
+ f, new StartAfterParam(new String(prevKey, StandardCharsets.UTF_8))) {
@Override
DirectoryListing decodeResponse(Map, ?> json) throws IOException {
return JsonUtilClient.toDirectoryListing(json);
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsContentLength.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsContentLength.java
index 6ee8858df9..5577bb6266 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsContentLength.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsContentLength.java
@@ -22,6 +22,7 @@
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
+import java.nio.charset.StandardCharsets;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@@ -204,7 +205,7 @@ public String call() throws Exception {
if (n <= 0) {
break;
}
- sb.append(new String(buf, 0, n, "UTF-8"));
+ sb.append(new String(buf, 0, n, StandardCharsets.UTF_8));
}
return sb.toString();
} finally {
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
index 65b49cc9cf..dab4776575 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs.http.client;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -24,7 +25,6 @@
import java.util.List;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.MapType;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -796,7 +796,7 @@ public DirectoryEntries listStatusBatch(Path f, byte[] token) throws
Map params = new HashMap();
params.put(OP_PARAM, Operation.LISTSTATUS_BATCH.toString());
if (token != null) {
- params.put(START_AFTER_PARAM, new String(token, Charsets.UTF_8));
+ params.put(START_AFTER_PARAM, new String(token, StandardCharsets.UTF_8));
}
HttpURLConnection conn = getConnection(
Operation.LISTSTATUS_BATCH.getMethod(),
@@ -811,7 +811,7 @@ public DirectoryEntries listStatusBatch(Path f, byte[] token) throws
byte[] newToken = null;
if (statuses.length > 0) {
newToken = statuses[statuses.length - 1].getPath().getName().toString()
- .getBytes(Charsets.UTF_8);
+ .getBytes(StandardCharsets.UTF_8);
}
// Parse the remainingEntries boolean into hasMore
final long remainingEntries = (Long) listing.get(REMAINING_ENTRIES_JSON);
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
index 57a79a1847..de4cf83248 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.fs.http.server;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -91,6 +90,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
+import java.nio.charset.StandardCharsets;
import java.security.AccessControlException;
import java.security.PrivilegedExceptionAction;
import java.text.MessageFormat;
@@ -422,7 +422,7 @@ public InputStream run() throws Exception {
HttpFSParametersProvider.StartAfterParam.class);
byte[] token = HttpFSUtils.EMPTY_BYTES;
if (startAfter != null) {
- token = startAfter.getBytes(Charsets.UTF_8);
+ token = startAfter.getBytes(StandardCharsets.UTF_8);
}
FSOperations.FSListStatusBatch command = new FSOperations
.FSListStatusBatch(path, token);
diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java
index 30a6a844e2..9b96a92cb4 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java
@@ -25,7 +25,7 @@
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.EnumSet;
import io.netty.buffer.ByteBuf;
@@ -681,15 +681,15 @@ READLINK3Response readlink(XDR xdr, SecurityHandler securityHandler,
}
int rtmax = config.getInt(NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_KEY,
NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_DEFAULT);
- if (rtmax < target.getBytes(Charset.forName("UTF-8")).length) {
+ if (rtmax < target.getBytes(StandardCharsets.UTF_8).length) {
LOG.error("Link size: {} is larger than max transfer size: {}",
- target.getBytes(Charset.forName("UTF-8")).length, rtmax);
+ target.getBytes(StandardCharsets.UTF_8).length, rtmax);
return new READLINK3Response(Nfs3Status.NFS3ERR_IO, postOpAttr,
new byte[0]);
}
return new READLINK3Response(Nfs3Status.NFS3_OK, postOpAttr,
- target.getBytes(Charset.forName("UTF-8")));
+ target.getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
LOG.warn("Readlink error", e);
@@ -1515,7 +1515,7 @@ private DirectoryListing listPaths(DFSClient dfsClient, String dirFileIdPath,
}
// This happens when startAfter was just deleted
LOG.info("Cookie couldn't be found: {}, do listing from beginning",
- new String(startAfter, Charset.forName("UTF-8")));
+ new String(startAfter, StandardCharsets.UTF_8));
dlisting = dfsClient
.listPaths(dirFileIdPath, HdfsFileStatus.EMPTY_NAME);
}
@@ -1628,7 +1628,7 @@ public READDIR3Response readdir(XDR xdr, SecurityHandler securityHandler,
startAfter = HdfsFileStatus.EMPTY_NAME;
} else {
String inodeIdPath = Nfs3Utils.getFileIdPath(cookie);
- startAfter = inodeIdPath.getBytes(Charset.forName("UTF-8"));
+ startAfter = inodeIdPath.getBytes(StandardCharsets.UTF_8);
}
dlisting = listPaths(dfsClient, dirFileIdPath, startAfter);
@@ -1800,7 +1800,7 @@ READDIRPLUS3Response readdirplus(XDR xdr, SecurityHandler securityHandler,
startAfter = HdfsFileStatus.EMPTY_NAME;
} else {
String inodeIdPath = Nfs3Utils.getFileIdPath(cookie);
- startAfter = inodeIdPath.getBytes(Charset.forName("UTF-8"));
+ startAfter = inodeIdPath.getBytes(StandardCharsets.UTF_8);
}
dlisting = listPaths(dfsClient, dirFileIdPath, startAfter);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java
index fcb6b7d7bc..1d8928f75c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java
@@ -28,6 +28,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
@@ -62,7 +63,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* Negotiates SASL for DataTransferProtocol on behalf of a server. There are
@@ -326,7 +326,7 @@ private char[] buildServerPassword(String userName) throws IOException {
byte[] tokenPassword = blockPoolTokenSecretManager.retrievePassword(
identifier);
return (new String(Base64.encodeBase64(tokenPassword, false),
- Charsets.UTF_8)).toCharArray();
+ StandardCharsets.UTF_8)).toCharArray();
}
/**
@@ -381,7 +381,7 @@ private IOStreamPair doSaslHandshake(Peer peer, OutputStream underlyingOut,
if (secret != null || bpid != null) {
// sanity check, if one is null, the other must also not be null
assert(secret != null && bpid != null);
- String qop = new String(secret, Charsets.UTF_8);
+ String qop = new String(secret, StandardCharsets.UTF_8);
saslProps.put(Sasl.QOP, qop);
}
SaslParticipant sasl = SaslParticipant.createServerSaslParticipant(
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
index ffa613018c..e0ac692e60 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java
@@ -25,6 +25,7 @@
import java.io.OutputStreamWriter;
import java.net.URL;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.security.PrivilegedExceptionAction;
@@ -72,7 +73,6 @@
import org.apache.hadoop.util.Time;
import org.apache.hadoop.classification.VisibleForTesting;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
import org.apache.hadoop.thirdparty.protobuf.TextFormat;
@@ -1105,7 +1105,7 @@ private void persistPaxosData(long segmentTxId,
// Write human-readable data after the protobuf. This is only
// to assist in debugging -- it's not parsed at all.
try(OutputStreamWriter writer =
- new OutputStreamWriter(fos, Charsets.UTF_8)) {
+ new OutputStreamWriter(fos, StandardCharsets.UTF_8)) {
writer.write(String.valueOf(newData));
writer.write('\n');
writer.flush();
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
index 18cdeaebfd..b9f817db51 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/security/token/block/BlockTokenSecretManager.java
@@ -18,10 +18,10 @@
package org.apache.hadoop.hdfs.security.token.block;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.SecureRandom;
import java.util.Arrays;
@@ -293,7 +293,7 @@ public Token generateToken(String userId,
if (shouldWrapQOP) {
String qop = Server.getAuxiliaryPortEstablishedQOP();
if (qop != null) {
- id.setHandshakeMsg(qop.getBytes(Charsets.UTF_8));
+ id.setHandshakeMsg(qop.getBytes(StandardCharsets.UTF_8));
}
}
return new Token(id, this);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java
index 16afb8496f..93303bcf80 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java
@@ -25,6 +25,7 @@
import java.lang.management.ManagementFactory;
import java.nio.channels.FileLock;
import java.nio.channels.OverlappingFileLockException;
+import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.attribute.PosixFilePermission;
@@ -53,7 +54,6 @@
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.util.VersionInfo;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -944,7 +944,7 @@ FileLock tryLock() throws IOException {
LOG.error("Unable to acquire file lock on path {}", lockF);
throw new OverlappingFileLockException();
}
- file.write(jvmName.getBytes(Charsets.UTF_8));
+ file.write(jvmName.getBytes(StandardCharsets.UTF_8));
LOG.info("Lock on {} acquired by nodename {}", lockF, jvmName);
} catch(OverlappingFileLockException oe) {
// Cannot read from the locked file on Windows.
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java
index e2f9877483..2a1e431056 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DiskBalancer.java
@@ -42,7 +42,7 @@
import org.slf4j.LoggerFactory;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
@@ -450,7 +450,7 @@ private NodePlan verifyPlanHash(String planID, String plan)
if ((planID == null) ||
(planID.length() != sha1Length) ||
- !DigestUtils.sha1Hex(plan.getBytes(Charset.forName("UTF-8")))
+ !DigestUtils.sha1Hex(plan.getBytes(StandardCharsets.UTF_8))
.equalsIgnoreCase(planID)) {
LOG.error("Disk Balancer - Invalid plan hash.");
throw new DiskBalancerException("Invalid or mis-matched hash.",
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemVolumeManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemVolumeManager.java
index 601eb19fe2..22172f4a01 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemVolumeManager.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/PmemVolumeManager.java
@@ -35,6 +35,7 @@
import java.io.RandomAccessFile;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
@@ -333,7 +334,7 @@ static File verifyIfValidPmemVolume(File pmemDir)
String uuidStr = UUID.randomUUID().toString();
String testFilePath = realPmemDir.getPath() + "/.verify.pmem." + uuidStr;
- byte[] contents = uuidStr.getBytes("UTF-8");
+ byte[] contents = uuidStr.getBytes(StandardCharsets.UTF_8);
RandomAccessFile testFile = null;
MappedByteBuffer out = null;
try {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ExceptionHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ExceptionHandler.java
index 6fe0851bb3..a222af7c1c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ExceptionHandler.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ExceptionHandler.java
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hdfs.server.datanode.web.webhdfs;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import com.sun.jersey.api.ParamException;
import com.sun.jersey.api.container.ContainerException;
import io.netty.buffer.Unpooled;
@@ -32,6 +31,7 @@
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH;
import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_TYPE;
@@ -83,7 +83,7 @@ static DefaultFullHttpResponse exceptionCaught(Throwable cause) {
s = INTERNAL_SERVER_ERROR;
}
- final byte[] js = JsonUtil.toJsonString(e).getBytes(Charsets.UTF_8);
+ final byte[] js = JsonUtil.toJsonString(e).getBytes(StandardCharsets.UTF_8);
DefaultFullHttpResponse resp =
new DefaultFullHttpResponse(HTTP_1_1, s, Unpooled.wrappedBuffer(js));
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index dea9c57f65..c14526b6a1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -96,6 +96,7 @@
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SNAPSHOT_DIFF_LISTING_LIMIT_DEFAULT;
import static org.apache.hadoop.hdfs.DFSUtil.isParentEntry;
+import java.nio.charset.StandardCharsets;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.text.CaseUtils;
@@ -343,7 +344,6 @@
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.VisibleForTesting;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
@@ -1982,7 +1982,7 @@ void metaSave(String filename) throws IOException {
File file = new File(System.getProperty("hadoop.log.dir"), filename);
PrintWriter out = new PrintWriter(new BufferedWriter(
new OutputStreamWriter(Files.newOutputStream(file.toPath()),
- Charsets.UTF_8)));
+ StandardCharsets.UTF_8)));
metaSave(out);
out.flush();
out.close();
@@ -4217,7 +4217,7 @@ DirectoryListing getListing(String src, byte[] startAfter,
public byte[] getSrcPathsHash(String[] srcs) {
synchronized (digest) {
for (String src : srcs) {
- digest.update(src.getBytes(Charsets.UTF_8));
+ digest.update(src.getBytes(StandardCharsets.UTF_8));
}
byte[] result = digest.digest();
digest.reset();
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
index affab74e4f..40cdbaa637 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
@@ -26,6 +26,7 @@
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
+import java.nio.charset.StandardCharsets;
import java.security.Principal;
import java.security.PrivilegedExceptionAction;
import java.util.Base64;
@@ -124,7 +125,6 @@
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.classification.VisibleForTesting;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import com.sun.jersey.spi.container.ResourceFilters;
/** Web-hdfs NameNode implementation. */
@@ -1339,7 +1339,7 @@ protected Response get(
{
byte[] start = HdfsFileStatus.EMPTY_NAME;
if (startAfter != null && startAfter.getValue() != null) {
- start = startAfter.getValue().getBytes(Charsets.UTF_8);
+ start = startAfter.getValue().getBytes(StandardCharsets.UTF_8);
}
final DirectoryListing listing = getDirectoryListing(cp, fullpath, start);
final String js = JsonUtil.toJsonString(listing);
@@ -1532,7 +1532,7 @@ private static StreamingOutput getListingStream(final ClientProtocol cp,
@Override
public void write(final OutputStream outstream) throws IOException {
final PrintWriter out = new PrintWriter(new OutputStreamWriter(
- outstream, Charsets.UTF_8));
+ outstream, StandardCharsets.UTF_8));
out.println("{\"" + FileStatus.class.getSimpleName() + "es\":{\""
+ FileStatus.class.getSimpleName() + "\":[");
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
index fc5f30e883..ac43b21d84 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
@@ -22,6 +22,7 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
import java.util.Stack;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -41,7 +42,6 @@
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* OfflineEditsXmlLoader walks an EditsVisitor over an OEV XML file
@@ -75,7 +75,7 @@ public OfflineEditsXmlLoader(OfflineEditsVisitor visitor,
File inputFile, OfflineEditsViewer.Flags flags) throws FileNotFoundException {
this.visitor = visitor;
this.fileReader =
- new InputStreamReader(new FileInputStream(inputFile), Charsets.UTF_8);
+ new InputStreamReader(new FileInputStream(inputFile), StandardCharsets.UTF_8);
this.fixTxIds = flags.getFixTxIds();
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java
index 411df10627..fe477058a0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java
@@ -21,6 +21,7 @@
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.HashMap;
@@ -30,7 +31,6 @@
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* StatisticsEditsVisitor implements text version of EditsVisitor
@@ -53,7 +53,7 @@ public class StatisticsEditsVisitor implements OfflineEditsVisitor {
* @param out Name of file to write output to
*/
public StatisticsEditsVisitor(OutputStream out) throws IOException {
- this.out = new PrintWriter(new OutputStreamWriter(out, Charsets.UTF_8));
+ this.out = new PrintWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8));
}
/** Start the visitor */
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
index 963406771b..e32cadcee0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/FSImageHandler.java
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hdfs.tools.offlineImageViewer;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFutureListener;
@@ -37,6 +36,7 @@
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
@@ -124,7 +124,7 @@ public void channelRead0(ChannelHandlerContext ctx, HttpRequest request)
DefaultFullHttpResponse resp = new DefaultFullHttpResponse(HTTP_1_1,
HttpResponseStatus.OK, Unpooled.wrappedBuffer(content
- .getBytes(Charsets.UTF_8)));
+ .getBytes(StandardCharsets.UTF_8)));
resp.headers().set(CONTENT_TYPE, APPLICATION_JSON_UTF8);
resp.headers().set(CONTENT_LENGTH, resp.content().readableBytes());
resp.headers().set(CONNECTION, CLOSE);
@@ -142,7 +142,7 @@ public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause)
Exception e = cause instanceof Exception ? (Exception) cause : new
Exception(cause);
final String output = JsonUtil.toJsonString(e);
- ByteBuf content = Unpooled.wrappedBuffer(output.getBytes(Charsets.UTF_8));
+ ByteBuf content = Unpooled.wrappedBuffer(output.getBytes(StandardCharsets.UTF_8));
final DefaultFullHttpResponse resp = new DefaultFullHttpResponse(
HTTP_1_1, INTERNAL_SERVER_ERROR, content);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java
index 2e2eaf4e4d..6b55c7f205 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java
@@ -19,6 +19,7 @@
import java.io.DataInputStream;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
@@ -320,7 +321,7 @@ private void processINodesUC(DataInputStream in, ImageVisitor v,
for(int i = 0; i < numINUC; i++) {
v.visitEnclosingElement(ImageElement.INODE_UNDER_CONSTRUCTION);
byte [] name = FSImageSerialization.readBytes(in);
- String n = new String(name, "UTF8");
+ String n = new String(name, StandardCharsets.UTF_8);
v.visit(ImageElement.INODE_PATH, n);
if (NameNodeLayoutVersion.supports(Feature.ADD_INODE_ID, imageVersion)) {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java
index 6a2049acb4..0c075ff6da 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageReconstructor.java
@@ -36,7 +36,7 @@
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.security.DigestOutputStream;
@@ -1840,7 +1840,7 @@ public static void run(String inputPath, String outputPath)
Files.deleteIfExists(Paths.get(outputPath));
fout = Files.newOutputStream(Paths.get(outputPath));
fis = Files.newInputStream(Paths.get(inputPath));
- reader = new InputStreamReader(fis, Charset.forName("UTF-8"));
+ reader = new InputStreamReader(fis, StandardCharsets.UTF_8);
out = new CountingOutputStream(
new DigestOutputStream(
new BufferedOutputStream(fout), digester));
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageTextWriter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageTextWriter.java
index f2b329fa2f..20affa93c3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageTextWriter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageTextWriter.java
@@ -26,11 +26,11 @@
import java.io.InputStream;
import java.io.PrintStream;
import java.io.RandomAccessFile;
-import java.io.UnsupportedEncodingException;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
@@ -419,9 +419,8 @@ private static byte[] toBytes(long value) {
return ByteBuffer.allocate(8).putLong(value).array();
}
- private static byte[] toBytes(String value)
- throws UnsupportedEncodingException {
- return value.getBytes("UTF-8");
+ private static byte[] toBytes(String value) {
+ return value.getBytes(StandardCharsets.UTF_8);
}
private static long toLong(byte[] bytes) {
@@ -430,11 +429,7 @@ private static long toLong(byte[] bytes) {
}
private static String toString(byte[] bytes) throws IOException {
- try {
- return new String(bytes, "UTF-8");
- } catch (UnsupportedEncodingException e) {
- throw new IOException(e);
- }
+ return new String(bytes, StandardCharsets.UTF_8);
}
@Override
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java
index f732bd6a7f..1d7c8a4a13 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java
@@ -19,10 +19,10 @@
import java.io.IOException;
import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* TextWriterImageProcessor mixes in the ability for ImageVisitor
@@ -61,7 +61,7 @@ public TextWriterImageVisitor(String filename, boolean printToScreen)
super();
this.printToScreen = printToScreen;
fw = new OutputStreamWriter(Files.newOutputStream(Paths.get(filename)),
- Charsets.UTF_8);
+ StandardCharsets.UTF_8);
okToWrite = true;
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java
index 77ec789058..02ef9ba88c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java
@@ -23,6 +23,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.security.DigestInputStream;
import java.security.MessageDigest;
@@ -35,7 +36,6 @@
import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* Static functions for dealing with files of the same format
@@ -75,7 +75,7 @@ public static void verifySavedMD5(File dataFile, MD5Hash expectedMD5)
private static Matcher readStoredMd5(File md5File) throws IOException {
BufferedReader reader =
new BufferedReader(new InputStreamReader(
- Files.newInputStream(md5File.toPath()), Charsets.UTF_8));
+ Files.newInputStream(md5File.toPath()), StandardCharsets.UTF_8));
String md5Line;
try {
md5Line = reader.readLine();
@@ -155,7 +155,7 @@ private static void saveMD5File(File dataFile, String digestString)
String md5Line = digestString + " *" + dataFile.getName() + "\n";
AtomicFileOutputStream afos = new AtomicFileOutputStream(md5File);
- afos.write(md5Line.getBytes(Charsets.UTF_8));
+ afos.write(md5Line.getBytes(StandardCharsets.UTF_8));
afos.close();
if (LOG.isDebugEnabled()) {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java
index 78834e0161..309e27d82c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java
@@ -22,14 +22,13 @@
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
-
/**
* Class that represents a file on disk which persistently stores
* a single long
value. The file is updated atomically
@@ -77,7 +76,7 @@ public void set(long newVal) throws IOException {
public static void writeFile(File file, long val) throws IOException {
AtomicFileOutputStream fos = new AtomicFileOutputStream(file);
try {
- fos.write(String.valueOf(val).getBytes(Charsets.UTF_8));
+ fos.write(String.valueOf(val).getBytes(StandardCharsets.UTF_8));
fos.write('\n');
fos.close();
fos = null;
@@ -93,7 +92,7 @@ public static long readFile(File file, long defaultVal) throws IOException {
if (file.exists()) {
BufferedReader br =
new BufferedReader(new InputStreamReader(new FileInputStream(
- file), Charsets.UTF_8));
+ file), StandardCharsets.UTF_8));
try {
val = Long.parseLong(br.readLine());
br.close();
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
index e816edd311..4ff57dd7e1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
@@ -53,6 +53,7 @@
import java.net.URL;
import java.net.URLConnection;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
@@ -70,7 +71,6 @@
import java.util.UUID;
import java.util.concurrent.TimeoutException;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
@@ -985,7 +985,7 @@ public static void appendFileNewBlock(DistributedFileSystem fs,
* @return url content as string (UTF-8 encoding assumed)
*/
public static String urlGet(URL url) throws IOException {
- return new String(urlGetBytes(url), Charsets.UTF_8);
+ return new String(urlGetBytes(url), StandardCharsets.UTF_8);
}
/**
@@ -1438,7 +1438,7 @@ public static void runOperations(MiniDFSCluster cluster,
Short permission = 0777;
filesystem.setPermission(pathFileCreate, new FsPermission(permission));
// OP_SET_OWNER 8
- filesystem.setOwner(pathFileCreate, new String("newOwner"), null);
+ filesystem.setOwner(pathFileCreate, "newOwner", null);
// OP_CLOSE 9 see above
// OP_SET_GENSTAMP 10 see above
// OP_SET_NS_QUOTA 11 obsolete
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBalancerBandwidth.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBalancerBandwidth.java
index 3191fbdf8f..bc83a70472 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBalancerBandwidth.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBalancerBandwidth.java
@@ -23,6 +23,7 @@
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.concurrent.TimeoutException;
@@ -46,7 +47,7 @@ public class TestBalancerBandwidth {
final static private int DEFAULT_BANDWIDTH = 1024*1024;
public static final Logger LOG =
LoggerFactory.getLogger(TestBalancerBandwidth.class);
- private static final Charset UTF8 = Charset.forName("UTF-8");
+ private static final Charset UTF8 = StandardCharsets.UTF_8;
private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
private final PrintStream outStream = new PrintStream(outContent);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java
index 1092f74a3b..7fc6cd9cfa 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSRollback.java
@@ -23,6 +23,7 @@
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.List;
@@ -41,7 +42,6 @@
import org.junit.After;
import org.junit.Test;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* This test ensures the appropriate response (successful or failure) from
@@ -312,8 +312,8 @@ public void testRollback() throws Exception {
for (File f : baseDirs) {
UpgradeUtilities.corruptFile(
new File(f,"VERSION"),
- "layoutVersion".getBytes(Charsets.UTF_8),
- "xxxxxxxxxxxxx".getBytes(Charsets.UTF_8));
+ "layoutVersion".getBytes(StandardCharsets.UTF_8),
+ "xxxxxxxxxxxxx".getBytes(StandardCharsets.UTF_8));
}
startNameNodeShouldFail("file VERSION has layoutVersion missing");
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java
index ac58f320b2..ebc0379bc6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgrade.java
@@ -29,6 +29,7 @@
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.regex.Pattern;
import org.slf4j.Logger;
@@ -49,7 +50,6 @@
import org.junit.Ignore;
import org.junit.Test;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
/**
@@ -335,8 +335,8 @@ public void testUpgrade() throws Exception {
for (File f : baseDirs) {
UpgradeUtilities.corruptFile(
new File(f,"VERSION"),
- "layoutVersion".getBytes(Charsets.UTF_8),
- "xxxxxxxxxxxxx".getBytes(Charsets.UTF_8));
+ "layoutVersion".getBytes(StandardCharsets.UTF_8),
+ "xxxxxxxxxxxxx".getBytes(StandardCharsets.UTF_8));
}
startNameNodeShouldFail(StartupOption.UPGRADE);
UpgradeUtilities.createEmptyDirs(nameNodeDirs);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeReport.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeReport.java
index 9638f71ef8..239555a8b0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeReport.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeReport.java
@@ -160,7 +160,7 @@ public void testDatanodeReportMissingBlock() throws Exception {
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
Path p = new Path("/testDatanodeReportMissingBlock");
- DFSTestUtil.writeFile(fs, p, new String("testdata"));
+ DFSTestUtil.writeFile(fs, p, "testdata");
LocatedBlock lb = fs.getClient().getLocatedBlocks(p.toString(), 0).get(0);
assertEquals(3, lb.getLocations().length);
ExtendedBlock b = lb.getBlock();
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultipleNNPortQOP.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultipleNNPortQOP.java
index d536c5e8a9..8121d8454a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultipleNNPortQOP.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMultipleNNPortQOP.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hdfs;
import java.net.URI;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -287,7 +288,7 @@ public void testMultipleNNPortOverwriteDownStream() throws Exception {
private void doTest(FileSystem fs, Path path) throws Exception {
FileSystemTestHelper.createFile(fs, path, NUM_BLOCKS, BLOCK_SIZE);
assertArrayEquals(FileSystemTestHelper.getFileData(NUM_BLOCKS, BLOCK_SIZE),
- DFSTestUtil.readFile(fs, path).getBytes("UTF-8"));
+ DFSTestUtil.readFile(fs, path).getBytes(StandardCharsets.UTF_8));
BlockLocation[] blockLocations = fs.getFileBlockLocations(path, 0,
Long.MAX_VALUE);
assertNotNull(blockLocations);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
index a11af72204..2e6feaf544 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestQuota.java
@@ -32,6 +32,7 @@
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.List;
import java.util.Scanner;
@@ -66,7 +67,6 @@
import org.junit.Rule;
import org.junit.Test;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.junit.rules.Timeout;
import org.slf4j.Logger;
import org.slf4j.event.Level;
@@ -1216,7 +1216,7 @@ public void testSetSpaceQuotaWhenStorageTypeIsWrong() throws Exception {
String[] args =
{ "-setSpaceQuota", "100", "-storageType", "COLD", "/testDir" };
admin.run(args);
- String errOutput = new String(err.toByteArray(), Charsets.UTF_8);
+ String errOutput = new String(err.toByteArray(), StandardCharsets.UTF_8);
assertTrue(
errOutput.contains(StorageType.getTypesSupportingQuota().toString()));
} finally {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestSaslDataTransfer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestSaslDataTransfer.java
index 3dd0b7eb99..85e43f65c3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestSaslDataTransfer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/TestSaslDataTransfer.java
@@ -32,6 +32,7 @@
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketTimeoutException;
+import java.nio.charset.StandardCharsets;
import java.util.concurrent.atomic.AtomicBoolean;
import org.slf4j.LoggerFactory;
@@ -200,7 +201,7 @@ private void doTest(HdfsConfiguration conf) throws IOException {
fs = FileSystem.get(cluster.getURI(), conf);
FileSystemTestHelper.createFile(fs, PATH, NUM_BLOCKS, BLOCK_SIZE);
assertArrayEquals(FileSystemTestHelper.getFileData(NUM_BLOCKS, BLOCK_SIZE),
- DFSTestUtil.readFile(fs, PATH).getBytes("UTF-8"));
+ DFSTestUtil.readFile(fs, PATH).getBytes(StandardCharsets.UTF_8));
BlockLocation[] blockLocations = fs.getFileBlockLocations(PATH, 0,
Long.MAX_VALUE);
assertNotNull(blockLocations);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java
index 064dd9e5dd..f959b142a3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/server/TestJournalNode.java
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hdfs.qjournal.server;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.primitives.Bytes;
import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
import org.apache.hadoop.conf.Configuration;
@@ -54,6 +53,7 @@
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
+import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
@@ -278,7 +278,7 @@ public void testJournal() throws Exception {
ch.newEpoch(1).get();
ch.setEpoch(1);
ch.startLogSegment(1, NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION).get();
- ch.sendEdits(1L, 1, 1, "hello".getBytes(Charsets.UTF_8)).get();
+ ch.sendEdits(1L, 1, 1, "hello".getBytes(StandardCharsets.UTF_8)).get();
metrics = MetricsAsserts.getMetrics(
journal.getMetrics().getName());
@@ -291,7 +291,7 @@ public void testJournal() throws Exception {
beginTimestamp = lastJournalTimestamp;
ch.setCommittedTxId(100L);
- ch.sendEdits(1L, 2, 1, "goodbye".getBytes(Charsets.UTF_8)).get();
+ ch.sendEdits(1L, 2, 1, "goodbye".getBytes(StandardCharsets.UTF_8)).get();
metrics = MetricsAsserts.getMetrics(
journal.getMetrics().getName());
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java
index 35f7924be1..89a9fdd1a8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java
@@ -553,7 +553,7 @@ public void testDNShouldNotDeleteBlockONTooManyOpenFiles()
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
Path p = new Path("/testShouldThrowTMP");
- DFSTestUtil.writeFile(fs, p, new String("testdata"));
+ DFSTestUtil.writeFile(fs, p, "testdata");
//Before DN throws too many open files
verifyBlockLocations(fs, p, 1);
Mockito.doThrow(new FileNotFoundException("Too many open files")).
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFavoredNodesEndToEnd.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFavoredNodesEndToEnd.java
index b8cc32e43e..14fd75c72c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFavoredNodesEndToEnd.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFavoredNodesEndToEnd.java
@@ -60,7 +60,7 @@ public class TestFavoredNodesEndToEnd {
private static Configuration conf;
private final static int NUM_DATA_NODES = 10;
private final static int NUM_FILES = 10;
- private final static byte[] SOME_BYTES = new String("foo").getBytes();
+ private final static byte[] SOME_BYTES = "foo".getBytes();
private static DistributedFileSystem dfs;
private static ArrayList datanodes;
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
index 1782a4644b..892c5ce020 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
@@ -919,7 +919,7 @@ public void testFsckReplicaDetails() throws Exception {
dfs = cluster.getFileSystem();
// create files
- final String testFile = new String("/testfile");
+ final String testFile = "/testfile";
final Path path = new Path(testFile);
DFSTestUtil.createFile(dfs, path, fileSize, replFactor, 1000L);
DFSTestUtil.waitReplication(dfs, path, replFactor);
@@ -1202,7 +1202,7 @@ public void testFsckMissingReplicas() throws IOException {
assertNotNull("Failed to get FileSystem", dfs);
// Create a file that will be intentionally under-replicated
- final String pathString = new String("/testfile");
+ final String pathString = "/testfile";
final Path path = new Path(pathString);
long fileLen = blockSize * numBlocks;
DFSTestUtil.createFile(dfs, path, fileLen, replFactor, 1);
@@ -1263,7 +1263,7 @@ public void testFsckMisPlacedReplicas() throws IOException {
assertNotNull("Failed to get FileSystem", dfs);
// Create a file that will be intentionally under-replicated
- final String pathString = new String("/testfile");
+ final String pathString = "/testfile";
final Path path = new Path(pathString);
long fileLen = blockSize * numBlocks;
DFSTestUtil.createFile(dfs, path, fileLen, replFactor, 1);
@@ -1436,7 +1436,7 @@ public void testBlockIdCK() throws Exception {
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
- final String pathString = new String("/testfile");
+ final String pathString = "/testfile";
final Path path = new Path(pathString);
util.createFile(dfs, path, 1024, replFactor, 1000L);
util.waitReplication(dfs, path, replFactor);
@@ -1490,7 +1490,7 @@ public void testBlockIdCKDecommission() throws Exception {
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
- final String pathString = new String("/testfile");
+ final String pathString = "/testfile";
final Path path = new Path(pathString);
util.createFile(dfs, path, 1024, replFactor, 1000L);
util.waitReplication(dfs, path, replFactor);
@@ -1577,7 +1577,7 @@ public void testBlockIdCKMaintenance() throws Exception {
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
- final String pathString = new String("/testfile");
+ final String pathString = "/testfile";
final Path path = new Path(pathString);
util.createFile(dfs, path, 1024, replFactor, 1000L);
util.waitReplication(dfs, path, replFactor);
@@ -1694,7 +1694,7 @@ public void testBlockIdCKStaleness() throws Exception {
setName(getClass().getSimpleName()).setNumFiles(1).build();
// Create one file.
- final String pathString = new String("/testfile");
+ final String pathString = "/testfile";
final Path path = new Path(pathString);
util.createFile(fs, path, 1024L, replFactor, 1024L);
util.waitReplication(fs, path, replFactor);
@@ -1780,7 +1780,7 @@ public void testBlockIdCKCorruption() throws Exception {
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
- final String pathString = new String("/testfile");
+ final String pathString = "/testfile";
final Path path = new Path(pathString);
util.createFile(dfs, path, 1024, repFactor, 1000L);
util.waitReplication(dfs, path, repFactor);
@@ -1937,7 +1937,7 @@ public void testFsckWithDecommissionedReplicas() throws Exception {
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
- final String testFile = new String("/testfile");
+ final String testFile = "/testfile";
final Path path = new Path(testFile);
util.createFile(dfs, path, fileSize, replFactor, 1000L);
util.waitReplication(dfs, path, replFactor);
@@ -2020,7 +2020,7 @@ public void testFsckWithMaintenanceReplicas() throws Exception {
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
- final String testFile = new String("/testfile");
+ final String testFile = "/testfile";
final Path path = new Path(testFile);
util.createFile(dfs, path, 1024, replFactor, 1000L);
util.waitReplication(dfs, path, replFactor);
@@ -2394,7 +2394,7 @@ private void testUpgradeDomain(boolean defineUpgradeDomain,
}
// create files
- final String testFile = new String("/testfile");
+ final String testFile = "/testfile";
final Path path = new Path(testFile);
DFSTestUtil.createFile(dfs, path, fileSize, replFactor, 1000L);
DFSTestUtil.waitReplication(dfs, path, replFactor);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestINodeFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestINodeFile.java
index b32f8fe759..4f18baf1ae 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestINodeFile.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestINodeFile.java
@@ -1163,7 +1163,7 @@ public void testFilesInGetListingOps() throws Exception {
HdfsFileStatus.EMPTY_NAME, false);
assertTrue(dl.getPartialListing().length == 3);
- String f2 = new String("f2");
+ String f2 = "f2";
dl = cluster.getNameNodeRpc().getListing("/tmp", f2.getBytes(), false);
assertTrue(dl.getPartialListing().length == 1);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java
index 245602ee9b..d28f0a4c29 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStartupProgressServlet.java
@@ -24,6 +24,7 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
@@ -245,7 +246,7 @@ public void testFinalState() throws Exception {
*/
private String doGetAndReturnResponseBody() throws IOException {
servlet.doGet(req, resp);
- return new String(respOut.toByteArray(), "UTF-8");
+ return new String(respOut.toByteArray(), StandardCharsets.UTF_8);
}
/**
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
index 5681f3bdb2..3d4ddfe88e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSAdminWithHA.java
@@ -19,8 +19,8 @@
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -56,8 +56,8 @@ public class TestDFSAdminWithHA {
private static String newLine = System.getProperty("line.separator");
private void assertOutputMatches(String string) {
- String errOutput = new String(err.toByteArray(), Charsets.UTF_8);
- String output = new String(out.toByteArray(), Charsets.UTF_8);
+ String errOutput = new String(err.toByteArray(), StandardCharsets.UTF_8);
+ String output = new String(out.toByteArray(), StandardCharsets.UTF_8);
if (!errOutput.matches(string) && !output.matches(string)) {
fail("Expected output to match '" + string +
@@ -70,8 +70,8 @@ private void assertOutputMatches(String string) {
}
private void assertOutputMatches(String outMessage, String errMessage) {
- String errOutput = new String(err.toByteArray(), Charsets.UTF_8);
- String output = new String(out.toByteArray(), Charsets.UTF_8);
+ String errOutput = new String(err.toByteArray(), StandardCharsets.UTF_8);
+ String output = new String(out.toByteArray(), StandardCharsets.UTF_8);
if (!errOutput.matches(errMessage) || !output.matches(outMessage)) {
fail("Expected output to match '" + outMessage + " and " + errMessage +
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdmin.java
index 0086134d78..51d5b90e38 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdmin.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdmin.java
@@ -26,6 +26,7 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -48,7 +49,6 @@
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
public class TestDFSHAAdmin {
@@ -435,8 +435,8 @@ private Object runTool(String ... args) throws Exception {
outBytes.reset();
LOG.info("Running: DFSHAAdmin " + Joiner.on(" ").join(args));
int ret = tool.run(args);
- errOutput = new String(errOutBytes.toByteArray(), Charsets.UTF_8);
- output = new String(outBytes.toByteArray(), Charsets.UTF_8);
+ errOutput = new String(errOutBytes.toByteArray(), StandardCharsets.UTF_8);
+ output = new String(outBytes.toByteArray(), StandardCharsets.UTF_8);
LOG.info("Err_output:\n" + errOutput + "\nOutput:\n" + output);
return ret;
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
index d0edd175ec..bf557deaea 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
@@ -27,6 +27,7 @@
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -45,7 +46,6 @@
import org.junit.Before;
import org.junit.Test;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
@@ -232,7 +232,7 @@ public void testFencer() throws Exception {
assertEquals(0, runTool("-ns", "minidfs-ns", "-failover", "nn2", "nn1"));
// Fencer has not run yet, since none of the above required fencing
- assertEquals("", Files.asCharSource(tmpFile, Charsets.UTF_8).read());
+ assertEquals("", Files.asCharSource(tmpFile, StandardCharsets.UTF_8).read());
// Test failover with fencer and forcefence option
assertEquals(0, runTool("-failover", "nn1", "nn2", "--forcefence"));
@@ -240,7 +240,7 @@ public void testFencer() throws Exception {
// The fence script should run with the configuration from the target
// node, rather than the configuration from the fencing node. Strip
// out any trailing spaces and CR/LFs which may be present on Windows.
- String fenceCommandOutput = Files.asCharSource(tmpFile, Charsets.UTF_8)
+ String fenceCommandOutput = Files.asCharSource(tmpFile, StandardCharsets.UTF_8)
.read().replaceAll(" *[\r\n]+", "");
assertEquals("minidfs-ns.nn1 " + nn1Port + " nn1", fenceCommandOutput);
tmpFile.delete();
@@ -325,7 +325,7 @@ private int runTool(String ... args) throws Exception {
errOutBytes.reset();
LOG.info("Running: DFSHAAdmin " + Joiner.on(" ").join(args));
int ret = tool.run(args);
- errOutput = new String(errOutBytes.toByteArray(), Charsets.UTF_8);
+ errOutput = new String(errOutBytes.toByteArray(), StandardCharsets.UTF_8);
LOG.info("Output:\n" + errOutput);
return ret;
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
index c7df1f7c1d..2a9eda3ca5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
@@ -27,6 +27,7 @@
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
+import java.nio.charset.StandardCharsets;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.Map;
@@ -341,7 +342,7 @@ public void testLengthParamLongerThanFile() throws IOException {
byte[] respBody = new byte[content.length()];
is = conn.getInputStream();
IOUtils.readFully(is, respBody, 0, content.length());
- assertEquals(content, new String(respBody, "US-ASCII"));
+ assertEquals(content, new String(respBody, StandardCharsets.US_ASCII));
} finally {
IOUtils.closeStream(is);
if (conn != null) {
@@ -392,7 +393,7 @@ public void testOffsetPlusLengthParamsLongerThanFile() throws IOException {
byte[] respBody = new byte[content.length() - 1];
is = conn.getInputStream();
IOUtils.readFully(is, respBody, 0, content.length() - 1);
- assertEquals(content.substring(1), new String(respBody, "US-ASCII"));
+ assertEquals(content.substring(1), new String(respBody, StandardCharsets.US_ASCII));
} finally {
IOUtils.closeStream(is);
if (conn != null) {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java
index a693ac3d5e..1f5c89a03e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java
@@ -31,6 +31,7 @@
import java.net.Socket;
import java.net.SocketTimeoutException;
import java.nio.channels.SocketChannel;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -332,7 +333,7 @@ public void run() {
// Write response.
out = clientSocket.getOutputStream();
- out.write(temporaryRedirect().getBytes("UTF-8"));
+ out.write(temporaryRedirect().getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
// Fail the test on any I/O error in the server thread.
LOG.error("unexpected IOException in server thread", e);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestHdfsConfigFields.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestHdfsConfigFields.java
index fab4fcd040..da57cab60a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestHdfsConfigFields.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestHdfsConfigFields.java
@@ -39,7 +39,7 @@ public class TestHdfsConfigFields extends TestConfigurationFieldsBase {
@Override
public void initializeMemberVariables() {
- xmlFilename = new String("hdfs-default.xml");
+ xmlFilename = "hdfs-default.xml";
configurationClasses = new Class[] { HdfsClientConfigKeys.class,
HdfsClientConfigKeys.Failover.class,
HdfsClientConfigKeys.StripedRead.class, DFSConfigKeys.class,
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java
index 5a23b58875..d0b9acee8e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java
@@ -28,6 +28,7 @@
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
+import java.nio.charset.StandardCharsets;
import java.util.function.Supplier;
import java.io.File;
import java.io.FileInputStream;
@@ -2097,7 +2098,7 @@ public static String slurp(File f) throws IOException {
String contents = null;
try {
in.read(buf, 0, len);
- contents = new String(buf, "UTF-8");
+ contents = new String(buf, StandardCharsets.UTF_8);
} finally {
in.close();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java
index 9158ec3b33..3b41f41864 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobQueueClient.java
@@ -21,6 +21,7 @@
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.Writer;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.ArrayList;
import java.util.Arrays;
@@ -31,7 +32,6 @@
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* JobQueueClient
is interface provided to the user in order to get
@@ -148,7 +148,7 @@ private void displayQueueList() throws IOException {
JobQueueInfo[] rootQueues = jc.getRootQueues();
for (JobQueueInfo queue : rootQueues) {
printJobQueueInfo(queue, new PrintWriter(new OutputStreamWriter(
- System.out, Charsets.UTF_8)));
+ System.out, StandardCharsets.UTF_8)));
}
}
@@ -187,7 +187,7 @@ private void displayQueueInfo(String queue, boolean showJobs)
return;
}
printJobQueueInfo(jobQueueInfo, new PrintWriter(new OutputStreamWriter(
- System.out, Charsets.UTF_8)));
+ System.out, StandardCharsets.UTF_8)));
if (showJobs && (jobQueueInfo.getChildren() == null ||
jobQueueInfo.getChildren().size() == 0)) {
JobStatus[] jobs = jobQueueInfo.getJobStatuses();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java
index a0223dedd6..36405328f5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskLog.java
@@ -27,6 +27,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
@@ -56,7 +57,6 @@
import org.apache.log4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* A simple logger to handle the task-specific user logs.
@@ -114,7 +114,7 @@ private static LogFileDetail getLogFileDetail(TaskAttemptID taskid,
File indexFile = getIndexFile(taskid, isCleanup);
BufferedReader fis = new BufferedReader(new InputStreamReader(
SecureIOUtils.openForRead(indexFile, obtainLogDirOwner(taskid), null),
- Charsets.UTF_8));
+ StandardCharsets.UTF_8));
//the format of the index file is
//LOG_DIR:
//stdout:
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java
index 45b4fd6f71..62f4031566 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TextInputFormat.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.mapred;
import java.io.*;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -27,7 +28,6 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.*;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* An {@link InputFormat} for plain text files. Files are broken into lines.
@@ -62,7 +62,7 @@ public RecordReader getRecordReader(
String delimiter = job.get("textinputformat.record.delimiter");
byte[] recordDelimiterBytes = null;
if (null != delimiter) {
- recordDelimiterBytes = delimiter.getBytes(Charsets.UTF_8);
+ recordDelimiterBytes = delimiter.getBytes(StandardCharsets.UTF_8);
}
return new LineRecordReader(job, (FileSplit) genericSplit,
recordDelimiterBytes);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
index 232b8bf13b..0f7f9dfc14 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/JobSubmitter.java
@@ -22,6 +22,7 @@
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
+import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
@@ -63,7 +64,6 @@
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.yarn.api.records.ReservationId;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
@InterfaceAudience.Private
@InterfaceStability.Unstable
@@ -409,7 +409,7 @@ private void readTokensFromFiles(Configuration conf, Credentials credentials)
for(Map.Entry ent: nm.entrySet()) {
credentials.addSecretKey(new Text(ent.getKey()), ent.getValue()
- .getBytes(Charsets.UTF_8));
+ .getBytes(StandardCharsets.UTF_8));
}
} catch (JsonMappingException | JsonParseException e) {
LOG.warn("couldn't parse Token Cache JSON file with user secret keys");
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JSONHistoryViewerPrinter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JSONHistoryViewerPrinter.java
index 5f8e9ad4b6..88e8d07a29 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JSONHistoryViewerPrinter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JSONHistoryViewerPrinter.java
@@ -33,6 +33,7 @@
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.Writer;
+import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import java.util.Map;
@@ -72,7 +73,7 @@ public void print(PrintStream ps) throws IOException {
printTaskSummary();
printTasks();
- writer = new OutputStreamWriter(ps, "UTF-8");
+ writer = new OutputStreamWriter(ps, StandardCharsets.UTF_8);
json.write(writer);
writer.flush();
} catch (JSONException je) {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java
index d15ee7ca4c..77bea97e14 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/TextInputFormat.java
@@ -32,7 +32,8 @@
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
+import java.nio.charset.StandardCharsets;
+
/** An {@link InputFormat} for plain text files. Files are broken into lines.
* Either linefeed or carriage-return are used to signal end of line. Keys are
@@ -49,7 +50,7 @@ public class TextInputFormat extends FileInputFormat {
"textinputformat.record.delimiter");
byte[] recordDelimiterBytes = null;
if (null != delimiter)
- recordDelimiterBytes = delimiter.getBytes(Charsets.UTF_8);
+ recordDelimiterBytes = delimiter.getBytes(StandardCharsets.UTF_8);
return new LineRecordReader(recordDelimiterBytes);
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedPartitioner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedPartitioner.java
index 69377e73e6..f7b68f75e8 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedPartitioner.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldBasedPartitioner.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.mapreduce.lib.partition;
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -90,12 +90,7 @@ public int getPartition(K2 key, V2 value, int numReduceTasks) {
return getPartition(key.toString().hashCode(), numReduceTasks);
}
- try {
- keyBytes = key.toString().getBytes("UTF-8");
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException("The current system does not " +
- "support UTF-8 encoding!", e);
- }
+ keyBytes = key.toString().getBytes(StandardCharsets.UTF_8);
// return 0 if the key is empty
if (keyBytes.length == 0) {
return 0;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldHelper.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldHelper.java
index 21ca3fae33..03ef14c9ba 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldHelper.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/KeyFieldHelper.java
@@ -18,7 +18,7 @@
package org.apache.hadoop.mapreduce.lib.partition;
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.ArrayList;
import java.util.StringTokenizer;
@@ -61,13 +61,8 @@ public String toString() {
private boolean keySpecSeen = false;
public void setKeyFieldSeparator(String keyFieldSeparator) {
- try {
- this.keyFieldSeparator =
- keyFieldSeparator.getBytes("UTF-8");
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException("The current system does not " +
- "support UTF-8 encoding!", e);
- }
+ this.keyFieldSeparator =
+ keyFieldSeparator.getBytes(StandardCharsets.UTF_8);
}
/** Required for backcompatibility with num.key.fields.for.partition in
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java
index cdd656ca48..47c8d12cb4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/security/SecureShuffleUtils.java
@@ -23,6 +23,7 @@
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
+import java.nio.charset.StandardCharsets;
import javax.crypto.SecretKey;
import javax.servlet.http.HttpServletRequest;
@@ -34,7 +35,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
*
@@ -56,7 +56,7 @@ public class SecureShuffleUtils {
*/
public static String generateHash(byte[] msg, SecretKey key) {
return new String(Base64.encodeBase64(generateByteHash(msg, key)),
- Charsets.UTF_8);
+ StandardCharsets.UTF_8);
}
/**
@@ -70,7 +70,6 @@ private static byte[] generateByteHash(byte[] msg, SecretKey key) {
/**
* verify that hash equals to HMacHash(msg)
- * @param newHash
* @return true if is the same
*/
private static boolean verifyHash(byte[] hash, byte[] msg, SecretKey key) {
@@ -87,7 +86,7 @@ private static boolean verifyHash(byte[] hash, byte[] msg, SecretKey key) {
*/
public static String hashFromString(String enc_str, SecretKey key)
throws IOException {
- return generateHash(enc_str.getBytes(Charsets.UTF_8), key);
+ return generateHash(enc_str.getBytes(StandardCharsets.UTF_8), key);
}
/**
@@ -98,9 +97,9 @@ public static String hashFromString(String enc_str, SecretKey key)
*/
public static void verifyReply(String base64Hash, String msg, SecretKey key)
throws IOException {
- byte[] hash = Base64.decodeBase64(base64Hash.getBytes(Charsets.UTF_8));
+ byte[] hash = Base64.decodeBase64(base64Hash.getBytes(StandardCharsets.UTF_8));
- boolean res = verifyHash(hash, msg.getBytes(Charsets.UTF_8), key);
+ boolean res = verifyHash(hash, msg.getBytes(StandardCharsets.UTF_8), key);
if(res != true) {
throw new IOException("Verification of the hashReply failed");
@@ -148,7 +147,7 @@ public static String toHex(byte[] ba) {
for (byte b : ba) {
ps.printf("%x", b);
}
- strHex = baos.toString("UTF-8");
+ strHex = new String(baos.toByteArray(), StandardCharsets.UTF_8);
} catch (UnsupportedEncodingException e) {
}
return strHex;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplit.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplit.java
index 7d08fb3acd..d725196b9b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplit.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplit.java
@@ -20,7 +20,7 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
@@ -47,15 +47,8 @@
@InterfaceStability.Unstable
public class JobSplit {
static final int META_SPLIT_VERSION = 1;
- static final byte[] META_SPLIT_FILE_HEADER;
- static {
- try {
- META_SPLIT_FILE_HEADER = "META-SPL".getBytes("UTF-8");
- } catch (UnsupportedEncodingException u) {
- throw new RuntimeException(u);
- }
- }
- public static final TaskSplitMetaInfo EMPTY_TASK_SPLIT =
+ static final byte[] META_SPLIT_FILE_HEADER = "META-SPL".getBytes(StandardCharsets.UTF_8);
+ public static final TaskSplitMetaInfo EMPTY_TASK_SPLIT =
new TaskSplitMetaInfo();
/**
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java
index 76234bd17f..a32e254af4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplitWriter.java
@@ -19,7 +19,7 @@
package org.apache.hadoop.mapreduce.split;
import java.io.IOException;
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
@@ -54,16 +54,8 @@ public class JobSplitWriter {
private static final Logger LOG =
LoggerFactory.getLogger(JobSplitWriter.class);
private static final int splitVersion = JobSplit.META_SPLIT_VERSION;
- private static final byte[] SPLIT_FILE_HEADER;
+ private static final byte[] SPLIT_FILE_HEADER = "SPL".getBytes(StandardCharsets.UTF_8);
- static {
- try {
- SPLIT_FILE_HEADER = "SPL".getBytes("UTF-8");
- } catch (UnsupportedEncodingException u) {
- throw new RuntimeException(u);
- }
- }
-
@SuppressWarnings("unchecked")
public static void createSplitFiles(Path jobSubmitDir,
Configuration conf, FileSystem fs, List splits)
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
index dcd4edd6ff..0f65a29b13 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
@@ -24,6 +24,7 @@
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
@@ -64,7 +65,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* Interprets the map reduce cli options
@@ -767,7 +767,7 @@ protected void displayTasks(Job job, String type, String state)
public void displayJobList(JobStatus[] jobs)
throws IOException, InterruptedException {
displayJobList(jobs, new PrintWriter(new OutputStreamWriter(System.out,
- Charsets.UTF_8)));
+ StandardCharsets.UTF_8)));
}
@Private
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java
index bb5c30e951..b646b04b74 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileOutputCommitter.java
@@ -22,6 +22,7 @@
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URI;
+import java.nio.charset.StandardCharsets;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
@@ -571,7 +572,7 @@ public static String slurp(File f) throws IOException {
String contents = null;
try {
in.read(buf, 0, len);
- contents = new String(buf, "UTF-8");
+ contents = new String(buf, StandardCharsets.UTF_8);
} finally {
in.close();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestLineRecordReader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestLineRecordReader.java
index f4f2d18c38..1b3ebdfa31 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestLineRecordReader.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestLineRecordReader.java
@@ -238,7 +238,7 @@ public String[] readRecordsDirectly(URL testFileUrl, boolean bzip)
}
fis.close();
assertTrue("Test file data too big for buffer", count < data.length);
- return new String(data, 0, count, "UTF-8").split("\n");
+ return new String(data, 0, count, StandardCharsets.UTF_8).split("\n");
}
public void checkRecordSpanningMultipleSplits(String testFile,
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestLineRecordReader.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestLineRecordReader.java
index f6fbbd58da..b2b2dd6258 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestLineRecordReader.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestLineRecordReader.java
@@ -183,7 +183,7 @@ public String[] readRecordsDirectly(URL testFileUrl, boolean bzip)
}
fis.close();
assertTrue("Test file data too big for buffer", count < data.length);
- return new String(data, 0, count, "UTF-8").split("\n");
+ return new String(data, 0, count, StandardCharsets.UTF_8).split("\n");
}
public void checkRecordSpanningMultipleSplits(String testFile,
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
index 9c58c0d773..2aa7b34a00 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestFileOutputCommitter.java
@@ -23,6 +23,7 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
+import java.nio.charset.StandardCharsets;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
@@ -843,7 +844,7 @@ public static String slurp(File f) throws IOException {
String contents = null;
try {
in.read(buf, 0, len);
- contents = new String(buf, "UTF-8");
+ contents = new String(buf, StandardCharsets.UTF_8);
} finally {
in.close();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
index af6b9529e0..ae68d74d8d 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestConcatenatedCompressedInput.java
@@ -38,6 +38,7 @@
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.Inflater;
@@ -295,7 +296,7 @@ public void testPrototypeInflaterGzip() throws IOException {
try {
int numBytesUncompressed = inflater.inflate(uncompressedBuf);
String outString =
- new String(uncompressedBuf, 0, numBytesUncompressed, "UTF-8");
+ new String(uncompressedBuf, 0, numBytesUncompressed, StandardCharsets.UTF_8);
System.out.println("uncompressed data of first gzip member = [" +
outString + "]");
} catch (java.util.zip.DataFormatException ex) {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java
index 51347296a8..5fec24a1b1 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestFixedLengthInputFormat.java
@@ -97,7 +97,7 @@ public void testFormatCompressedIn() throws IOException {
@Test (timeout=5000)
public void testNoRecordLength() throws IOException {
localFs.delete(workDir, true);
- Path file = new Path(workDir, new String("testFormat.txt"));
+ Path file = new Path(workDir, "testFormat.txt");
createFile(file, null, 10, 10);
// Set the fixed length record length config property
JobConf job = new JobConf(defaultConf);
@@ -124,7 +124,7 @@ public void testNoRecordLength() throws IOException {
@Test (timeout=5000)
public void testZeroRecordLength() throws IOException {
localFs.delete(workDir, true);
- Path file = new Path(workDir, new String("testFormat.txt"));
+ Path file = new Path(workDir, "testFormat.txt");
createFile(file, null, 10, 10);
// Set the fixed length record length config property
JobConf job = new JobConf(defaultConf);
@@ -152,7 +152,7 @@ public void testZeroRecordLength() throws IOException {
@Test (timeout=5000)
public void testNegativeRecordLength() throws IOException {
localFs.delete(workDir, true);
- Path file = new Path(workDir, new String("testFormat.txt"));
+ Path file = new Path(workDir, "testFormat.txt");
createFile(file, null, 10, 10);
// Set the fixed length record length config property
JobConf job = new JobConf(defaultConf);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
index 4a7c3283d4..fd73410918 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
@@ -24,6 +24,7 @@
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Arrays;
@@ -180,7 +181,7 @@ public static String slurp(File f) throws IOException {
String contents = null;
try {
in.read(buf, 0, len);
- contents = new String(buf, "UTF-8");
+ contents = new String(buf, StandardCharsets.UTF_8);
} finally {
in.close();
}
@@ -194,7 +195,7 @@ public static String slurpHadoop(Path p, FileSystem fs) throws IOException {
String contents = null;
try {
in.read(buf, 0, len);
- contents = new String(buf, "UTF-8");
+ contents = new String(buf, StandardCharsets.UTF_8);
} finally {
in.close();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java
index d160de5db6..c971ccc6c0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/jobcontrol/JobControlTestUtils.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.mapred.jobcontrol;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.text.NumberFormat;
import java.util.Iterator;
import java.util.List;
@@ -100,7 +101,7 @@ static void generateData(FileSystem fs, Path dirPath) throws IOException {
FSDataOutputStream out = fs.create(new Path(dirPath, "data.txt"));
for (int i = 0; i < 10000; i++) {
String line = generateRandomLine();
- out.write(line.getBytes("UTF-8"));
+ out.write(line.getBytes(StandardCharsets.UTF_8));
}
out.close();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MapReduceTestUtil.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MapReduceTestUtil.java
index 2f30bb5ec0..4141d26933 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MapReduceTestUtil.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MapReduceTestUtil.java
@@ -25,6 +25,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Iterator;
@@ -116,7 +117,7 @@ public static void generateData(FileSystem fs, Path dirPath)
FSDataOutputStream out = fs.create(new Path(dirPath, "data.txt"));
for (int i = 0; i < 10000; i++) {
String line = generateRandomLine();
- out.write(line.getBytes("UTF-8"));
+ out.write(line.getBytes(StandardCharsets.UTF_8));
}
out.close();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java
index 31b90aa0e5..17cd5bfaac 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java
@@ -47,6 +47,7 @@
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static org.junit.Assert.assertEquals;
@@ -193,7 +194,7 @@ private void testfailTask(Configuration conf) throws Exception {
assertEquals("Exit code", -1, exitCode);
runTool(conf, jc, new String[] { "-fail-task", taid.toString() }, out);
- String answer = new String(out.toByteArray(), "UTF-8");
+ String answer = new String(out.toByteArray(), StandardCharsets.UTF_8);
assertTrue(answer.contains("Killed task " + taid + " by failing it"));
}
@@ -211,7 +212,7 @@ private void testKillTask(Configuration conf) throws Exception {
assertEquals("Exit code", -1, exitCode);
runTool(conf, jc, new String[] { "-kill-task", taid.toString() }, out);
- String answer = new String(out.toByteArray(), "UTF-8");
+ String answer = new String(out.toByteArray(), StandardCharsets.UTF_8);
assertTrue(answer.contains("Killed task " + taid));
}
@@ -231,7 +232,7 @@ private void testKillJob(Configuration conf) throws Exception {
exitCode = runTool(conf, jc, new String[] { "-kill", jobId }, out);
assertEquals("Exit code", 0, exitCode);
- String answer = new String(out.toByteArray(), "UTF-8");
+ String answer = new String(out.toByteArray(), StandardCharsets.UTF_8);
assertTrue(answer.contains("Killed job " + jobId));
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java
index 684d3e13d1..be9e6deff3 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFixedLengthInputFormat.java
@@ -102,7 +102,7 @@ public void testFormatCompressedIn() throws Exception {
@Test (timeout=5000)
public void testNoRecordLength() throws Exception {
localFs.delete(workDir, true);
- Path file = new Path(workDir, new String("testFormat.txt"));
+ Path file = new Path(workDir, "testFormat.txt");
createFile(file, null, 10, 10);
// Create the job and do not set fixed record length
Job job = Job.getInstance(defaultConf);
@@ -136,7 +136,7 @@ public void testNoRecordLength() throws Exception {
@Test (timeout=5000)
public void testZeroRecordLength() throws Exception {
localFs.delete(workDir, true);
- Path file = new Path(workDir, new String("testFormat.txt"));
+ Path file = new Path(workDir, "testFormat.txt");
createFile(file, null, 10, 10);
Job job = Job.getInstance(defaultConf);
// Set the fixed length record length config property
@@ -172,7 +172,7 @@ public void testZeroRecordLength() throws Exception {
@Test (timeout=5000)
public void testNegativeRecordLength() throws Exception {
localFs.delete(workDir, true);
- Path file = new Path(workDir, new String("testFormat.txt"));
+ Path file = new Path(workDir, "testFormat.txt");
createFile(file, null, 10, 10);
// Set the fixed length record length config property
Job job = Job.getInstance(defaultConf);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java
index 2c8d13fff1..12d449ebba 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java
@@ -18,8 +18,8 @@
package org.apache.hadoop.mapred.nativetask;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.RawComparator;
@@ -131,7 +131,7 @@ public void init(Context context) throws IOException, ClassNotFoundException {
if (ret) {
if (job.getBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, false)) {
String codec = job.get(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC);
- if (!NativeRuntime.supportsCompressionCodec(codec.getBytes(Charsets.UTF_8))) {
+ if (!NativeRuntime.supportsCompressionCodec(codec.getBytes(StandardCharsets.UTF_8))) {
String message = "Native output collector doesn't support compression codec " + codec;
LOG.error(message);
throw new InvalidJobConfException(message);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeRuntime.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeRuntime.java
index 311ee223b9..df1c7ade18 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeRuntime.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeRuntime.java
@@ -19,8 +19,8 @@
package org.apache.hadoop.mapred.nativetask;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DataInputBuffer;
@@ -83,7 +83,7 @@ public static void configure(Configuration jobConf) {
*/
public synchronized static long createNativeObject(String clazz) {
assertNativeLibraryLoaded();
- final long ret = JNICreateNativeObject(clazz.getBytes(Charsets.UTF_8));
+ final long ret = JNICreateNativeObject(clazz.getBytes(StandardCharsets.UTF_8));
if (ret == 0) {
LOG.warn("Can't create NativeObject for class " + clazz + ", probably not exist.");
}
@@ -95,8 +95,8 @@ public synchronized static long createNativeObject(String clazz) {
*/
public synchronized static long registerLibrary(String libraryName, String clazz) {
assertNativeLibraryLoaded();
- final long ret = JNIRegisterModule(libraryName.getBytes(Charsets.UTF_8),
- clazz.getBytes(Charsets.UTF_8));
+ final long ret = JNIRegisterModule(libraryName.getBytes(StandardCharsets.UTF_8),
+ clazz.getBytes(StandardCharsets.UTF_8));
if (ret != 0) {
LOG.warn("Can't create NativeObject for class " + clazz + ", probably not exist.");
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/ConfigUtil.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/ConfigUtil.java
index 5f7be806ed..57476836cc 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/ConfigUtil.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/ConfigUtil.java
@@ -17,11 +17,11 @@
*/
package org.apache.hadoop.mapred.nativetask.util;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -31,8 +31,8 @@ public abstract class ConfigUtil {
public static byte[][] toBytes(Configuration conf) {
List nativeConfigs = new ArrayList();
for (Map.Entry e : conf) {
- nativeConfigs.add(e.getKey().getBytes(Charsets.UTF_8));
- nativeConfigs.add(e.getValue().getBytes(Charsets.UTF_8));
+ nativeConfigs.add(e.getKey().getBytes(StandardCharsets.UTF_8));
+ nativeConfigs.add(e.getValue().getBytes(StandardCharsets.UTF_8));
}
return nativeConfigs.toArray(new byte[nativeConfigs.size()][]);
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/ReadWriteBuffer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/ReadWriteBuffer.java
index af2c496eb6..c016143998 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/ReadWriteBuffer.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/ReadWriteBuffer.java
@@ -17,9 +17,10 @@
*/
package org.apache.hadoop.mapred.nativetask.util;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.classification.InterfaceAudience;
+import java.nio.charset.StandardCharsets;
+
@InterfaceAudience.Private
public class ReadWriteBuffer {
private byte[] _buff;
@@ -135,13 +136,13 @@ public byte[] readBytes() {
}
public void writeString(String str) {
- final byte[] bytes = str.getBytes(Charsets.UTF_8);
+ final byte[] bytes = str.getBytes(StandardCharsets.UTF_8);
writeBytes(bytes, 0, bytes.length);
}
public String readString() {
final byte[] bytes = readBytes();
- return new String(bytes, Charsets.UTF_8);
+ return new String(bytes, StandardCharsets.UTF_8);
}
private void checkWriteSpaceAndResizeIfNecessary(int toBeWritten) {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestByteBufferReadWrite.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestByteBufferReadWrite.java
index 98d0697100..8dfa5322e8 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestByteBufferReadWrite.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestByteBufferReadWrite.java
@@ -19,7 +19,7 @@
import java.io.ByteArrayInputStream;
import java.io.IOException;
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.mapred.nativetask.NativeDataTarget;
@@ -143,8 +143,8 @@ public void testFlush() throws IOException {
Mockito.verify(target).finishSendData();
}
- private static String toString(byte[] str) throws UnsupportedEncodingException {
- return new String(str, 0, str.length, "UTF-8");
+ private static String toString(byte[] str) {
+ return new String(str, 0, str.length, StandardCharsets.UTF_8);
}
private static class MockDataTarget implements NativeDataTarget {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleChannelHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleChannelHandler.java
index 49c0bb288b..d0d0e74c99 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleChannelHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/ShuffleChannelHandler.java
@@ -45,6 +45,7 @@
import java.io.RandomAccessFile;
import java.net.URL;
import java.nio.channels.ClosedChannelException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -60,7 +61,6 @@
import org.apache.hadoop.io.SecureIOUtils;
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils;
import org.apache.hadoop.mapreduce.task.reduce.ShuffleHeader;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.eclipse.jetty.http.HttpHeader;
import static io.netty.buffer.Unpooled.wrappedBuffer;
@@ -469,7 +469,7 @@ protected void verifyRequest(String appid, ChannelHandlerContext ctx,
// verify - throws exception
SecureShuffleUtils.verifyReply(urlHashStr, encryptedURL, tokenSecret);
// verification passed - encode the reply
- String reply = SecureShuffleUtils.generateHash(urlHashStr.getBytes(Charsets.UTF_8),
+ String reply = SecureShuffleUtils.generateHash(urlHashStr.getBytes(StandardCharsets.UTF_8),
tokenSecret);
response.headers().set(
SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH, reply);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleChannelHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleChannelHandler.java
index 66fa3de94f..f052e4bc70 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleChannelHandler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleChannelHandler.java
@@ -79,7 +79,6 @@
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.eclipse.jetty.http.HttpHeader;
import org.junit.Test;
import org.slf4j.LoggerFactory;
@@ -336,7 +335,7 @@ public DefaultHttpResponse getExpectedHttpResponse(
SecretKey tokenSecret = ctx.secretManager.retrieveTokenSecret(TEST_JOB_ID);
headers.set(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH,
SecureShuffleUtils.generateHash(
- request.headers().get(HTTP_HEADER_URL_HASH).getBytes(Charsets.UTF_8),
+ request.headers().get(HTTP_HEADER_URL_HASH).getBytes(StandardCharsets.UTF_8),
tokenSecret));
} catch (SecretManager.InvalidToken e) {
fail("Could not generate reply hash");
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java
index f0f9a34f55..1cc099e14b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/BaileyBorweinPlouffe.java
@@ -25,6 +25,7 @@
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
@@ -53,7 +54,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* A map/reduce program that uses Bailey-Borwein-Plouffe to compute exact
@@ -158,7 +158,7 @@ protected void cleanup(Context context
final OutputStream outputstream = fs.create(outfile);
try {
final PrintWriter out = new PrintWriter(
- new OutputStreamWriter(outputstream, Charsets.UTF_8), true);
+ new OutputStreamWriter(outputstream, StandardCharsets.UTF_8), true);
// write hex text
print(out, hex.iterator(), "Pi = 0x3.", "%02X", 5, 5);
out.println("Total number of hexadecimal digits is "
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMean.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMean.java
index 26a3009918..58518de084 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMean.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMean.java
@@ -21,6 +21,7 @@
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
@@ -37,8 +38,6 @@
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
-
public class WordMean extends Configured implements Tool {
private double mean = 0;
@@ -96,7 +95,7 @@ public static class WordMeanReducer extends
public void reduce(Text key, Iterable values, Context context)
throws IOException, InterruptedException {
- int theSum = 0;
+ long theSum = 0;
for (LongWritable val : values) {
theSum += val.get();
}
@@ -127,7 +126,7 @@ private double readAndCalcMean(Path path, Configuration conf)
// average = total sum / number of elements;
try {
- br = new BufferedReader(new InputStreamReader(fs.open(file), Charsets.UTF_8));
+ br = new BufferedReader(new InputStreamReader(fs.open(file), StandardCharsets.UTF_8));
long count = 0;
long length = 0;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMedian.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMedian.java
index 9acf62bd17..c209da4ee0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMedian.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordMedian.java
@@ -21,6 +21,7 @@
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
@@ -39,7 +40,6 @@
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
public class WordMedian extends Configured implements Tool {
@@ -130,7 +130,7 @@ private double readAndFindMedian(String path, int medianIndex1,
BufferedReader br = null;
try {
- br = new BufferedReader(new InputStreamReader(fs.open(file), Charsets.UTF_8));
+ br = new BufferedReader(new InputStreamReader(fs.open(file), StandardCharsets.UTF_8));
int num = 0;
String line;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java
index 2a7733b875..57c35eb6e0 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/WordStandardDeviation.java
@@ -21,6 +21,7 @@
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
@@ -37,7 +38,6 @@
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
public class WordStandardDeviation extends Configured implements Tool {
@@ -137,7 +137,7 @@ private double readAndCalcStdDev(Path path, Configuration conf)
double stddev = 0;
BufferedReader br = null;
try {
- br = new BufferedReader(new InputStreamReader(fs.open(file), Charsets.UTF_8));
+ br = new BufferedReader(new InputStreamReader(fs.open(file), StandardCharsets.UTF_8));
long count = 0;
long length = 0;
long square = 0;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java
index e2d034193b..56b580ecc6 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/DistributedPentomino.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.examples.dancing;
import java.io.*;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.StringTokenizer;
@@ -33,7 +34,6 @@
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.*;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* Launch a distributed pentomino solver.
@@ -141,7 +141,7 @@ private static long createInputDirectory(FileSystem fs,
Path input = new Path(dir, "part1");
PrintWriter file =
new PrintWriter(new OutputStreamWriter(new BufferedOutputStream
- (fs.create(input), 64*1024), Charsets.UTF_8));
+ (fs.create(input), 64*1024), StandardCharsets.UTF_8));
for(int[] prefix: splits) {
for(int i=0; i < prefix.length; ++i) {
if (i != 0) {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java
index aa2df72af2..402ff028df 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/dancing/Sudoku.java
@@ -19,9 +19,9 @@
package org.apache.hadoop.examples.dancing;
import java.io.*;
+import java.nio.charset.StandardCharsets;
import java.util.*;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* This class uses the dancing links algorithm from Knuth to solve sudoku
@@ -136,7 +136,7 @@ public void solution(List> names) {
*/
public Sudoku(InputStream stream) throws IOException {
BufferedReader file = new BufferedReader(
- new InputStreamReader(stream, Charsets.UTF_8));
+ new InputStreamReader(stream, StandardCharsets.UTF_8));
String line = file.readLine();
List result = new ArrayList();
while (line != null) {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java
index 16273fd0ba..bffaf8fd15 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java
@@ -25,6 +25,7 @@
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@@ -34,7 +35,6 @@
import org.apache.hadoop.examples.pi.math.Bellard;
import org.apache.hadoop.examples.pi.math.Bellard.Parameter;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/** A class for parsing outputs */
public final class Parser {
@@ -80,7 +80,7 @@ private void parse(File f, Map> sums) throws IOExcep
m.put(p, new ArrayList());
final BufferedReader in = new BufferedReader(
- new InputStreamReader(new FileInputStream(f), Charsets.UTF_8));
+ new InputStreamReader(new FileInputStream(f), StandardCharsets.UTF_8));
try {
for(String line; (line = in.readLine()) != null; )
try {
@@ -137,7 +137,7 @@ Map> parse(String inputpath, String outputdir
final PrintWriter out = new PrintWriter(
new OutputStreamWriter(new FileOutputStream(
- new File(outputdir, p + ".txt")), Charsets.UTF_8), true);
+ new File(outputdir, p + ".txt")), StandardCharsets.UTF_8), true);
try {
for(int i = 0; i < results.size(); i++)
out.println(DistSum.taskResult2string(p + "." + i, results.get(i)));
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java
index ddbbf63343..4e8461525a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Util.java
@@ -25,6 +25,7 @@
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.PrintWriter;
+import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
@@ -46,7 +47,6 @@
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.ToolRunner;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.util.concurrent.HadoopExecutors;
/** Utility methods */
@@ -216,7 +216,8 @@ public static PrintWriter createWriter(File dir, String prefix) throws IOExcepti
final File f = new File(dir,
prefix + dateFormat.format(new Date(System.currentTimeMillis())) + ".txt");
if (!f.exists())
- return new PrintWriter(new OutputStreamWriter(new FileOutputStream(f), Charsets.UTF_8));
+ return new PrintWriter(new OutputStreamWriter(
+ new FileOutputStream(f), StandardCharsets.UTF_8));
try {Thread.sleep(10);} catch (InterruptedException e) {}
}
@@ -291,7 +292,7 @@ static List readJobOutputs(FileSystem fs, Path outdir) throws IOExce
for(FileStatus status : fs.listStatus(outdir)) {
if (status.getPath().getName().startsWith("part-")) {
final BufferedReader in = new BufferedReader(
- new InputStreamReader(fs.open(status.getPath()), Charsets.UTF_8));
+ new InputStreamReader(fs.open(status.getPath()), StandardCharsets.UTF_8));
try {
for(String line; (line = in.readLine()) != null; )
results.add(TaskResult.valueOf(line));
@@ -310,13 +311,14 @@ static List readJobOutputs(FileSystem fs, Path outdir) throws IOExce
static void writeResults(String name, List results, FileSystem fs, String dir) throws IOException {
final Path outfile = new Path(dir, name + ".txt");
Util.out.println(name + "> writing results to " + outfile);
- final PrintWriter out = new PrintWriter(new OutputStreamWriter(fs.create(outfile), Charsets.UTF_8), true);
+ final PrintWriter printWriter = new PrintWriter(new OutputStreamWriter(
+ fs.create(outfile), StandardCharsets.UTF_8), true);
try {
for(TaskResult r : results)
- out.println(r);
+ printWriter.println(r);
}
finally {
- out.close();
+ printWriter.close();
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java
index 7998d4a8f6..6df1f1e497 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/TeraScheduler.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.examples.terasort;
import java.io.*;
+import java.nio.charset.StandardCharsets;
import java.util.*;
import org.apache.hadoop.conf.Configuration;
@@ -28,7 +29,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
class TeraScheduler {
private static final Logger LOG =
@@ -75,7 +75,7 @@ public String toString() {
List readFile(String filename) throws IOException {
List result = new ArrayList(10000);
try (BufferedReader in = new BufferedReader(
- new InputStreamReader(new FileInputStream(filename), Charsets.UTF_8))) {
+ new InputStreamReader(new FileInputStream(filename), StandardCharsets.UTF_8))) {
String line = in.readLine();
while (line != null) {
result.add(line);
diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java
index 528163103d..86e8d9c2a0 100644
--- a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java
+++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java
@@ -19,7 +19,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -108,11 +108,7 @@ public static class OutputBufferThread extends Thread {
public OutputBufferThread(InputStream is) {
this.setDaemon(true);
output = new ArrayList();
- try {
- reader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException("Unsupported encoding " + e.toString());
- }
+ reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));
}
@Override
diff --git a/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java b/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
index 6d082380ff..c72a926b13 100644
--- a/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
+++ b/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
@@ -23,6 +23,7 @@
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
@@ -74,8 +75,6 @@
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
-
/**
* a archive creation utility.
* This class provides methods that can be used
@@ -754,7 +753,7 @@ public void configure(JobConf conf) {
indexStream = fs.create(index);
outStream = fs.create(masterIndex);
String version = VERSION + " \n";
- outStream.write(version.getBytes(Charsets.UTF_8));
+ outStream.write(version.getBytes(StandardCharsets.UTF_8));
} catch(IOException e) {
throw new RuntimeException(e);
@@ -773,7 +772,7 @@ public void reduce(IntWritable key, Iterator values,
while(values.hasNext()) {
Text value = values.next();
String towrite = value.toString() + "\n";
- indexStream.write(towrite.getBytes(Charsets.UTF_8));
+ indexStream.write(towrite.getBytes(StandardCharsets.UTF_8));
written++;
if (written > numIndexes -1) {
// every 1000 indexes we report status
@@ -782,7 +781,7 @@ public void reduce(IntWritable key, Iterator values,
endIndex = keyVal;
String masterWrite = startIndex + " " + endIndex + " " + startPos
+ " " + indexStream.getPos() + " \n" ;
- outStream.write(masterWrite.getBytes(Charsets.UTF_8));
+ outStream.write(masterWrite.getBytes(StandardCharsets.UTF_8));
startPos = indexStream.getPos();
startIndex = endIndex;
written = 0;
@@ -795,7 +794,7 @@ public void close() throws IOException {
if (written > 0) {
String masterWrite = startIndex + " " + keyVal + " " + startPos +
" " + indexStream.getPos() + " \n";
- outStream.write(masterWrite.getBytes(Charsets.UTF_8));
+ outStream.write(masterWrite.getBytes(StandardCharsets.UTF_8));
}
// close the streams
outStream.close();
diff --git a/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java b/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
index b1755affa8..3267a683c2 100644
--- a/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
+++ b/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
@@ -23,6 +23,7 @@
import java.io.IOException;
import java.io.PrintStream;
import java.net.URI;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@@ -81,7 +82,7 @@ public class TestHadoopArchives {
private static String createFile(Path root, FileSystem fs, String... dirsAndFile
) throws IOException {
String fileBaseName = dirsAndFile[dirsAndFile.length - 1];
- return createFile(root, fs, fileBaseName.getBytes("UTF-8"), dirsAndFile);
+ return createFile(root, fs, fileBaseName.getBytes(StandardCharsets.UTF_8), dirsAndFile);
}
private static String createFile(Path root, FileSystem fs, byte[] fileContent, String... dirsAndFile
@@ -395,7 +396,7 @@ public void testReadFileContent() throws Exception {
} else if ("zero-length".equals(baseName)) {
assertEquals(0, actualContentSimple.length);
} else {
- String actual = new String(actualContentSimple, "UTF-8");
+ String actual = new String(actualContentSimple, StandardCharsets.UTF_8);
assertEquals(baseName, actual);
}
readFileCount++;
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/AbstractDelegationTokenBinding.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/AbstractDelegationTokenBinding.java
index 6af413e44d..f33944070d 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/AbstractDelegationTokenBinding.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/AbstractDelegationTokenBinding.java
@@ -20,7 +20,7 @@
import java.io.IOException;
import java.net.URI;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.Optional;
import org.slf4j.Logger;
@@ -304,7 +304,7 @@ public String getUserAgentField() {
* @return a password.
*/
protected static byte[] getSecretManagerPasssword() {
- return "non-password".getBytes(Charset.forName("UTF-8"));
+ return "non-password".getBytes(StandardCharsets.UTF_8);
}
/**
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AMultipartUploader.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AMultipartUploader.java
index b7eae8ead7..58e38c2873 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AMultipartUploader.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/S3AMultipartUploader.java
@@ -25,6 +25,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
@@ -40,8 +41,6 @@
import software.amazon.awssdk.services.s3.model.UploadPartRequest;
import software.amazon.awssdk.services.s3.model.UploadPartResponse;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
-
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -132,7 +131,7 @@ public CompletableFuture startUpload(
PutObjectOptions.keepingDirs());
statistics.uploadStarted();
return BBUploadHandle.from(ByteBuffer.wrap(
- uploadId.getBytes(Charsets.UTF_8)));
+ uploadId.getBytes(StandardCharsets.UTF_8)));
}));
}
@@ -151,7 +150,7 @@ public CompletableFuture putPart(
checkUploadId(uploadIdBytes);
String key = context.pathToKey(dest);
String uploadIdString = new String(uploadIdBytes, 0, uploadIdBytes.length,
- Charsets.UTF_8);
+ StandardCharsets.UTF_8);
return context.submit(new CompletableFuture<>(),
() -> {
UploadPartRequest request = writeOperations.newUploadPartRequestBuilder(key,
@@ -189,7 +188,7 @@ public CompletableFuture complete(
String key = context.pathToKey(dest);
String uploadIdStr = new String(uploadIdBytes, 0, uploadIdBytes.length,
- Charsets.UTF_8);
+ StandardCharsets.UTF_8);
ArrayList eTags = new ArrayList<>();
eTags.ensureCapacity(handles.size());
long totalLength = 0;
@@ -221,7 +220,7 @@ public CompletableFuture complete(
finalLen
);
- byte[] eTag = result.eTag().getBytes(Charsets.UTF_8);
+ byte[] eTag = result.eTag().getBytes(StandardCharsets.UTF_8);
statistics.uploadCompleted();
return (PathHandle) () -> ByteBuffer.wrap(eTag);
}));
@@ -237,7 +236,7 @@ public CompletableFuture abort(
final byte[] uploadIdBytes = uploadId.toByteArray();
checkUploadId(uploadIdBytes);
String uploadIdString = new String(uploadIdBytes, 0, uploadIdBytes.length,
- Charsets.UTF_8);
+ StandardCharsets.UTF_8);
return context.submit(new CompletableFuture<>(),
() -> {
writeOperations.abortMultipartCommit(
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
index eaa84c6086..10bb4a8032 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
@@ -52,7 +52,6 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.service.Service;
import org.apache.hadoop.service.ServiceOperations;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListeningExecutorService;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.MoreExecutors;
import org.apache.hadoop.util.BlockingThreadPoolExecutorService;
@@ -75,6 +74,7 @@
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
+import java.nio.charset.StandardCharsets;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
@@ -912,7 +912,7 @@ private static CompletableFuture put(FileSystem fs,
return submit(EXECUTOR, () -> {
try (DurationInfo ignore =
new DurationInfo(LOG, false, "Creating %s", path)) {
- createFile(fs, path, true, text.getBytes(Charsets.UTF_8));
+ createFile(fs, path, true, text.getBytes(StandardCharsets.UTF_8));
return path;
}
});
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ILoadTestSessionCredentials.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ILoadTestSessionCredentials.java
index c3030aa227..3b21a08e30 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ILoadTestSessionCredentials.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ILoadTestSessionCredentials.java
@@ -21,6 +21,7 @@
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletionService;
@@ -134,7 +135,7 @@ protected String getFilePrefix() {
@Test
public void testCreate10Tokens() throws Throwable {
File file = fetchTokens(10);
- String csv = FileUtils.readFileToString(file, "UTF-8");
+ String csv = FileUtils.readFileToString(file, StandardCharsets.UTF_8);
LOG.info("CSV data\n{}", csv);
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/ITestCommitOperations.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/ITestCommitOperations.java
index 64def00fd2..7499e10da0 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/ITestCommitOperations.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/ITestCommitOperations.java
@@ -21,6 +21,7 @@
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
@@ -495,7 +496,7 @@ public void testUploadEmptyFile() throws Throwable {
public void testUploadSmallFile() throws Throwable {
File tempFile = File.createTempFile("commit", ".txt");
String text = "hello, world";
- FileUtils.write(tempFile, text, "UTF-8");
+ FileUtils.write(tempFile, text, StandardCharsets.UTF_8);
CommitOperations actions = newCommitOperations();
Path dest = methodSubPath("testUploadSmallFile");
S3AFileSystem fs = getFileSystem();
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java
index 3a503ddfa2..45fbf79190 100644
--- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java
@@ -26,7 +26,7 @@
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
@@ -178,7 +178,7 @@ public FolderRenamePending(Path redoFile, NativeAzureFileSystem fs)
"Error reading pending rename file contents -- "
+ "maximum file size exceeded");
}
- String contents = new String(bytes, 0, l, Charset.forName("UTF-8"));
+ String contents = new String(bytes, 0, l, StandardCharsets.UTF_8);
// parse the JSON
JsonNode json = null;
@@ -301,7 +301,7 @@ public void writeFile(NativeAzureFileSystem fs) throws IOException {
// Write file.
try {
output = fs.createInternal(path, FsPermission.getFileDefault(), false, null);
- output.write(contents.getBytes(Charset.forName("UTF-8")));
+ output.write(contents.getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
throw new IOException("Unable to write RenamePending file for folder rename from "
+ srcKey + " to " + dstKey, e);
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java
index dd4ec7c200..1a1a27c53b 100644
--- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/oauth2/AzureADAuthenticator.java
@@ -374,7 +374,7 @@ private static AzureADToken getTokenSingleCall(String authEndpoint,
conn.getRequestProperties());
if (httpMethod.equals("POST")) {
conn.setDoOutput(true);
- conn.getOutputStream().write(payload.getBytes("UTF-8"));
+ conn.getOutputStream().write(payload.getBytes(StandardCharsets.UTF_8));
}
int httpResponseCode = conn.getResponseCode();
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestBlobOperationDescriptor.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestBlobOperationDescriptor.java
index aca5f810b4..598469488a 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestBlobOperationDescriptor.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestBlobOperationDescriptor.java
@@ -32,6 +32,7 @@
import org.junit.Test;
import java.net.HttpURLConnection;
+import java.nio.charset.StandardCharsets;
/**
* Tests for BlobOperationDescriptor
.
@@ -71,7 +72,7 @@ public void testAppendBlockOperations() throws Exception {
assertEquals(0, lastContentLengthReceived);
String message = "this is a test";
- output.write(message.getBytes("UTF-8"));
+ output.write(message.getBytes(StandardCharsets.UTF_8));
output.flush();
assertEquals(BlobOperationDescriptor.OperationType.AppendBlock,
lastOperationTypeSent);
@@ -107,7 +108,7 @@ public void testPutBlockOperations() throws Exception {
assertEquals(0, lastContentLengthReceived);
String message = "this is a test";
- output.write(message.getBytes("UTF-8"));
+ output.write(message.getBytes(StandardCharsets.UTF_8));
output.flush();
assertEquals(BlobOperationDescriptor.OperationType.PutBlock,
lastOperationTypeSent);
@@ -186,7 +187,7 @@ public void testGetBlobOperations() throws Exception {
assertNull(lastOperationTypeReceived);
assertEquals(0, lastContentLengthReceived);
- output.write(message.getBytes("UTF-8"));
+ output.write(message.getBytes(StandardCharsets.UTF_8));
output.flush();
assertEquals(BlobOperationDescriptor.OperationType.PutBlock,
lastOperationTypeSent);
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java
index fe25477beb..f041f4bccd 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/TestAbfsConfigurationFieldsValidation.java
@@ -20,8 +20,8 @@
import java.io.IOException;
import java.lang.reflect.Field;
+import java.nio.charset.StandardCharsets;
-import org.apache.commons.codec.Charsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys;
import org.apache.hadoop.fs.azurebfs.constants.TestConfigurationKeys;
@@ -99,8 +99,8 @@ public class TestAbfsConfigurationFieldsValidation {
public TestAbfsConfigurationFieldsValidation() throws Exception {
super();
this.accountName = "testaccount1.blob.core.windows.net";
- this.encodedString = Base64.encode("base64Value".getBytes(Charsets.UTF_8));
- this.encodedAccountKey = Base64.encode("someAccountKey".getBytes(Charsets.UTF_8));
+ this.encodedString = Base64.encode("base64Value".getBytes(StandardCharsets.UTF_8));
+ this.encodedAccountKey = Base64.encode("someAccountKey".getBytes(StandardCharsets.UTF_8));
Configuration configuration = new Configuration();
configuration.addResource(TestConfigurationKeys.TEST_CONFIGURATION_FILE_NAME);
configuration.set(INT_KEY, "1234565");
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/ClassicDelegationTokenManager.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/ClassicDelegationTokenManager.java
index 1f0cbc0a16..5f131db3d9 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/ClassicDelegationTokenManager.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/ClassicDelegationTokenManager.java
@@ -20,7 +20,7 @@
import java.io.IOException;
import java.net.URI;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.util.Preconditions;
import org.slf4j.Logger;
@@ -249,8 +249,8 @@ public static Configuration useClassicDTManager(Configuration conf) {
* highlighting security risks of shared mutable byte arrays.
* @return a password.
*/
- private static byte[] getSecretManagerPasssword() {
- return "non-password".getBytes(Charset.forName("UTF-8"));
+ private static byte[] getSecretManagerPassword() {
+ return "non-password".getBytes(StandardCharsets.UTF_8);
}
/**
@@ -265,13 +265,13 @@ public TokenSecretManager() {
@Override
protected byte[] createPassword(StubAbfsTokenIdentifier identifier) {
- return getSecretManagerPasssword();
+ return getSecretManagerPassword();
}
@Override
public byte[] retrievePassword(StubAbfsTokenIdentifier identifier)
throws InvalidToken {
- return getSecretManagerPasssword();
+ return getSecretManagerPassword();
}
@Override
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestShellDecryptionKeyProvider.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestShellDecryptionKeyProvider.java
index 1f02741172..f039b60156 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestShellDecryptionKeyProvider.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestShellDecryptionKeyProvider.java
@@ -19,7 +19,7 @@
package org.apache.hadoop.fs.azurebfs.services;
import java.io.File;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import org.junit.Assert;
import org.junit.Test;
@@ -75,7 +75,7 @@ public void testValidScript() throws Exception {
// expected result (so that we validate both script input and output)
File scriptFile = new File(TEST_ROOT_DIR, "testScript.cmd");
FileUtils.writeStringToFile(scriptFile, "@echo %1 " + expectedResult,
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
ShellDecryptionKeyProvider provider = new ShellDecryptionKeyProvider();
Configuration conf = new Configuration();
diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestTextFileBasedIdentityHandler.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestTextFileBasedIdentityHandler.java
index b0a72b2131..1e578670cb 100644
--- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestTextFileBasedIdentityHandler.java
+++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/services/TestTextFileBasedIdentityHandler.java
@@ -20,7 +20,7 @@
import java.io.File;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.nio.file.NoSuchFileException;
import org.junit.Assert;
@@ -71,22 +71,22 @@ public static void init() throws IOException {
groupMappingFile = tempDir.newFile("group-mapping.conf");
//Stage data for user mapping
- FileUtils.writeStringToFile(userMappingFile, testUserDataLine1, Charset.forName("UTF-8"), true);
- FileUtils.writeStringToFile(userMappingFile, testUserDataLine2, Charset.forName("UTF-8"), true);
- FileUtils.writeStringToFile(userMappingFile, testUserDataLine3, Charset.forName("UTF-8"), true);
- FileUtils.writeStringToFile(userMappingFile, testUserDataLine4, Charset.forName("UTF-8"), true);
- FileUtils.writeStringToFile(userMappingFile, testUserDataLine5, Charset.forName("UTF-8"), true);
- FileUtils.writeStringToFile(userMappingFile, testUserDataLine6, Charset.forName("UTF-8"), true);
- FileUtils.writeStringToFile(userMappingFile, testUserDataLine7, Charset.forName("UTF-8"), true);
- FileUtils.writeStringToFile(userMappingFile, NEW_LINE, Charset.forName("UTF-8"), true);
+ FileUtils.writeStringToFile(userMappingFile, testUserDataLine1, StandardCharsets.UTF_8, true);
+ FileUtils.writeStringToFile(userMappingFile, testUserDataLine2, StandardCharsets.UTF_8, true);
+ FileUtils.writeStringToFile(userMappingFile, testUserDataLine3, StandardCharsets.UTF_8, true);
+ FileUtils.writeStringToFile(userMappingFile, testUserDataLine4, StandardCharsets.UTF_8, true);
+ FileUtils.writeStringToFile(userMappingFile, testUserDataLine5, StandardCharsets.UTF_8, true);
+ FileUtils.writeStringToFile(userMappingFile, testUserDataLine6, StandardCharsets.UTF_8, true);
+ FileUtils.writeStringToFile(userMappingFile, testUserDataLine7, StandardCharsets.UTF_8, true);
+ FileUtils.writeStringToFile(userMappingFile, NEW_LINE, StandardCharsets.UTF_8, true);
//Stage data for group mapping
- FileUtils.writeStringToFile(groupMappingFile, testGroupDataLine1, Charset.forName("UTF-8"), true);
- FileUtils.writeStringToFile(groupMappingFile, testGroupDataLine2, Charset.forName("UTF-8"), true);
- FileUtils.writeStringToFile(groupMappingFile, testGroupDataLine3, Charset.forName("UTF-8"), true);
- FileUtils.writeStringToFile(groupMappingFile, testGroupDataLine4, Charset.forName("UTF-8"), true);
- FileUtils.writeStringToFile(groupMappingFile, testGroupDataLine5, Charset.forName("UTF-8"), true);
- FileUtils.writeStringToFile(groupMappingFile, NEW_LINE, Charset.forName("UTF-8"), true);
+ FileUtils.writeStringToFile(groupMappingFile, testGroupDataLine1, StandardCharsets.UTF_8, true);
+ FileUtils.writeStringToFile(groupMappingFile, testGroupDataLine2, StandardCharsets.UTF_8, true);
+ FileUtils.writeStringToFile(groupMappingFile, testGroupDataLine3, StandardCharsets.UTF_8, true);
+ FileUtils.writeStringToFile(groupMappingFile, testGroupDataLine4, StandardCharsets.UTF_8, true);
+ FileUtils.writeStringToFile(groupMappingFile, testGroupDataLine5, StandardCharsets.UTF_8, true);
+ FileUtils.writeStringToFile(groupMappingFile, NEW_LINE, StandardCharsets.UTF_8, true);
}
private void assertUserLookup(TextFileBasedIdentityHandler handler, String userInTest, String expectedUser)
diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/FileBasedCopyListing.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/FileBasedCopyListing.java
index c356edd425..d1dd7617e8 100644
--- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/FileBasedCopyListing.java
+++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/FileBasedCopyListing.java
@@ -27,7 +27,7 @@
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
@@ -76,7 +76,7 @@ private List fetchFileList(Path sourceListing) throws IOException {
BufferedReader input = null;
try {
input = new BufferedReader(new InputStreamReader(fs.open(sourceListing),
- Charset.forName("UTF-8")));
+ StandardCharsets.UTF_8));
String line = input.readLine();
while (line != null) {
result.add(new Path(line));
diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/RegexCopyFilter.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/RegexCopyFilter.java
index f5f23eea05..01787860fc 100644
--- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/RegexCopyFilter.java
+++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/RegexCopyFilter.java
@@ -29,7 +29,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
@@ -66,7 +66,7 @@ public void initialize() {
try {
InputStream is = Files.newInputStream(filtersFile.toPath());
reader = new BufferedReader(new InputStreamReader(is,
- Charset.forName("UTF-8")));
+ StandardCharsets.UTF_8));
String line;
while ((line = reader.readLine()) != null) {
Pattern pattern = Pattern.compile(line);
diff --git a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java
index fb56b90186..7e5b715479 100644
--- a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java
+++ b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistTool.java
@@ -22,7 +22,7 @@
import java.io.DataOutput;
import java.io.IOException;
import java.io.InputStreamReader;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
@@ -96,7 +96,7 @@ protected static List readFile(Configuration conf, Path inputfile
List result = new ArrayList();
FileSystem fs = inputfile.getFileSystem(conf);
try (BufferedReader input = new BufferedReader(new InputStreamReader(fs.open(inputfile),
- Charset.forName("UTF-8")))) {
+ StandardCharsets.UTF_8))) {
for(String line; (line = input.readLine()) != null;) {
result.add(line);
}
diff --git a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java
index 9c8dc1f230..2900139093 100644
--- a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java
+++ b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java
@@ -26,6 +26,7 @@
import java.io.OutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import java.security.DigestOutputStream;
import java.security.MessageDigest;
import java.util.Arrays;
@@ -35,7 +36,6 @@
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.protobuf.CodedOutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -325,7 +325,7 @@ void writeMD5(String imagename) throws IOException {
Path chk = new Path(outdir, imagename + ".md5");
try (OutputStream out = outfs.create(chk)) {
String md5Line = digestString + " *" + imagename + "\n";
- out.write(md5Line.getBytes(Charsets.UTF_8));
+ out.write(md5Line.getBytes(StandardCharsets.UTF_8));
}
}
diff --git a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/CompressionEmulationUtil.java b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/CompressionEmulationUtil.java
index 71db9bfb25..99c621a3e9 100644
--- a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/CompressionEmulationUtil.java
+++ b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/CompressionEmulationUtil.java
@@ -22,6 +22,7 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
@@ -99,7 +100,7 @@ class CompressionEmulationUtil {
private static final CompressionRatioLookupTable COMPRESSION_LOOKUP_TABLE =
new CompressionRatioLookupTable();
- private static final Charset charsetUTF8 = Charset.forName("UTF-8");
+ private static final Charset charsetUTF8 = StandardCharsets.UTF_8;
/**
* This is a {@link Mapper} implementation for generating random text data.
diff --git a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/DistributedCacheEmulator.java b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/DistributedCacheEmulator.java
index 56f67e5a73..a6f986ce26 100644
--- a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/DistributedCacheEmulator.java
+++ b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/DistributedCacheEmulator.java
@@ -42,6 +42,7 @@
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
@@ -113,7 +114,7 @@ class DistributedCacheEmulator {
Configuration conf; // gridmix configuration
- private static final Charset charsetUTF8 = Charset.forName("UTF-8");
+ private static final Charset charsetUTF8 = StandardCharsets.UTF_8;
// Pseudo local file system where local FS based distributed cache files are
// created by gridmix.
diff --git a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GenerateDistCacheData.java b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GenerateDistCacheData.java
index 4a75cdedf7..aa191629cf 100644
--- a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GenerateDistCacheData.java
+++ b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GenerateDistCacheData.java
@@ -19,6 +19,7 @@
import java.io.IOException;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.List;
@@ -96,7 +97,7 @@ class GenerateDistCacheData extends GridmixJob {
*/
static final short GRIDMIX_DISTCACHE_FILE_PERM = 0644;
- private static final Charset charsetUTF8 = Charset.forName("UTF-8");
+ private static final Charset charsetUTF8 = StandardCharsets.UTF_8;
public GenerateDistCacheData(Configuration conf) throws IOException {
super(conf, 0L, JOB_NAME);
diff --git a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixRecord.java b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixRecord.java
index 481799f7b5..afb95cab87 100644
--- a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixRecord.java
+++ b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/GridmixRecord.java
@@ -21,6 +21,7 @@
import java.io.DataOutput;
import java.io.EOFException;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import org.apache.hadoop.io.DataInputBuffer;
@@ -111,7 +112,7 @@ private void writeRandomText(DataOutput out, final int size)
//TODO Should we use long for size. What if the data is more than 4G?
String randomWord = rtg.getRandomWord();
- byte[] bytes = randomWord.getBytes("UTF-8");
+ byte[] bytes = randomWord.getBytes(StandardCharsets.UTF_8);
long randomWordSize = bytes.length;
while (i >= randomWordSize) {
out.write(bytes);
@@ -119,7 +120,7 @@ private void writeRandomText(DataOutput out, final int size)
// get the next random word
randomWord = rtg.getRandomWord();
- bytes = randomWord.getBytes("UTF-8");
+ bytes = randomWord.getBytes(StandardCharsets.UTF_8);
// determine the random word size
randomWordSize = bytes.length;
}
diff --git a/hadoop-tools/hadoop-kafka/src/main/java/org/apache/hadoop/metrics2/sink/KafkaSink.java b/hadoop-tools/hadoop-kafka/src/main/java/org/apache/hadoop/metrics2/sink/KafkaSink.java
index 0856d0f4e0..9cb6b93c4e 100644
--- a/hadoop-tools/hadoop-kafka/src/main/java/org/apache/hadoop/metrics2/sink/KafkaSink.java
+++ b/hadoop-tools/hadoop-kafka/src/main/java/org/apache/hadoop/metrics2/sink/KafkaSink.java
@@ -37,7 +37,7 @@
import java.io.Closeable;
import java.io.IOException;
import java.net.InetAddress;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
@@ -156,7 +156,7 @@ public void putMetrics(MetricsRecord record) {
// Create the record to be sent from the json.
ProducerRecord data = new ProducerRecord(
- topic, jsonLines.toString().getBytes(Charset.forName("UTF-8")));
+ topic, jsonLines.toString().getBytes(StandardCharsets.UTF_8));
// Send the data to the Kafka broker. Here is an example of this data:
// {"hostname": "...", "timestamp": 1436913651516,
diff --git a/hadoop-tools/hadoop-kafka/src/test/java/org/apache/hadoop/metrics2/impl/TestKafkaMetrics.java b/hadoop-tools/hadoop-kafka/src/test/java/org/apache/hadoop/metrics2/impl/TestKafkaMetrics.java
index 03c479fba5..665dbddd9a 100644
--- a/hadoop-tools/hadoop-kafka/src/test/java/org/apache/hadoop/metrics2/impl/TestKafkaMetrics.java
+++ b/hadoop-tools/hadoop-kafka/src/test/java/org/apache/hadoop/metrics2/impl/TestKafkaMetrics.java
@@ -159,7 +159,7 @@ StringBuilder recordToJson(MetricsRecord record) {
String date = dateFormat.format(currDate);
SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm:ss");
String time = timeFormat.format(currDate);
- String hostname = new String("null");
+ String hostname = "null";
try {
hostname = InetAddress.getLocalHost().getHostName();
} catch (Exception e) {
diff --git a/hadoop-tools/hadoop-resourceestimator/src/test/java/org/apache/hadoop/resourceestimator/solver/impl/TestLpSolver.java b/hadoop-tools/hadoop-resourceestimator/src/test/java/org/apache/hadoop/resourceestimator/solver/impl/TestLpSolver.java
index d32f7c3592..1bce63466a 100644
--- a/hadoop-tools/hadoop-resourceestimator/src/test/java/org/apache/hadoop/resourceestimator/solver/impl/TestLpSolver.java
+++ b/hadoop-tools/hadoop-resourceestimator/src/test/java/org/apache/hadoop/resourceestimator/solver/impl/TestLpSolver.java
@@ -41,7 +41,7 @@
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.List;
import java.util.Map;
@@ -84,7 +84,7 @@ private void parseLog(final String inputLog)
RLESparseResourceAllocation result = solver.solve(jobHistory);
String file = "src/test/resources/lp/answer.txt";
Reader fileReader = new InputStreamReader(new FileInputStream(file),
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
BufferedReader bufferedReader = new BufferedReader(fileReader);
String line = bufferedReader.readLine();
Configuration config = new Configuration();
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java
index ecd5f0bbfc..817c5c8b2f 100644
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/RandomSeedGenerator.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.tools.rumen;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
@@ -43,8 +43,7 @@
*/
public class RandomSeedGenerator {
private static Logger LOG = LoggerFactory.getLogger(RandomSeedGenerator.class);
- private static final Charset UTF_8 = Charset.forName("UTF-8");
-
+
/** MD5 algorithm instance, one for each thread. */
private static final ThreadLocal md5Holder =
new ThreadLocal() {
@@ -74,7 +73,7 @@ public static long getSeed(String streamId, long masterSeed) {
// We could have fed the bytes of masterSeed one by one to md5.update()
// instead
String str = streamId + '/' + masterSeed;
- byte[] digest = md5.digest(str.getBytes(UTF_8));
+ byte[] digest = md5.digest(str.getBytes(StandardCharsets.UTF_8));
// Create a long from the first 8 bytes of the digest
// This is fine as MD5 has the avalanche property.
// Paranoids could have XOR folded the other 8 bytes in too.
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/Record.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/Record.java
index f0ec99ad81..84df8b8187 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/Record.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/record/Record.java
@@ -22,6 +22,7 @@
import java.io.DataOutput;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -94,8 +95,8 @@ public String toString() {
ByteArrayOutputStream s = new ByteArrayOutputStream();
CsvRecordOutput a = new CsvRecordOutput(s);
this.serialize(a);
- return new String(s.toByteArray(), "UTF-8");
- } catch (Throwable ex) {
+ return new String(s.toByteArray(), StandardCharsets.UTF_8);
+ } catch (Exception ex) {
throw new RuntimeException(ex);
}
}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
index bc92b7149a..7871a4c969 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java
@@ -20,7 +20,7 @@
import java.io.*;
import java.net.InetAddress;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.*;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -65,7 +65,7 @@ public Environment() throws IOException {
Process pid = Runtime.getRuntime().exec(command);
BufferedReader in = new BufferedReader(
- new InputStreamReader(pid.getInputStream(), Charset.forName("UTF-8")));
+ new InputStreamReader(pid.getInputStream(), StandardCharsets.UTF_8));
try {
while (true) {
String line = in.readLine();
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapper.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapper.java
index 9bab1013f2..438a00057e 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapper.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeMapper.java
@@ -20,6 +20,7 @@
import java.io.*;
import java.net.URLDecoder;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
@@ -75,13 +76,11 @@ public void configure(JobConf job) {
inputFormatClassName.equals(TextInputFormat.class.getCanonicalName()));
}
- try {
- mapOutputFieldSeparator = job.get("stream.map.output.field.separator", "\t").getBytes("UTF-8");
- mapInputFieldSeparator = job.get("stream.map.input.field.separator", "\t").getBytes("UTF-8");
- numOfMapOutputKeyFields = job.getInt("stream.num.map.output.key.fields", 1);
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException("The current system does not support UTF-8 encoding!", e);
- }
+ mapOutputFieldSeparator = job.get("stream.map.output.field.separator", "\t")
+ .getBytes(StandardCharsets.UTF_8);
+ mapInputFieldSeparator = job.get("stream.map.input.field.separator", "\t")
+ .getBytes(StandardCharsets.UTF_8);
+ numOfMapOutputKeyFields = job.getInt("stream.num.map.output.key.fields", 1);
}
// Do NOT declare default constructor
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeReducer.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeReducer.java
index ffa7b01413..1f5a247bb2 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeReducer.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/PipeReducer.java
@@ -20,6 +20,7 @@
import java.io.IOException;
import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import java.net.URLDecoder;
@@ -71,13 +72,11 @@ public void configure(JobConf job) {
SkipBadRecords.setAutoIncrReducerProcCount(job, false);
skipping = job.getBoolean(MRJobConfig.SKIP_RECORDS, false);
- try {
- reduceOutFieldSeparator = job_.get("stream.reduce.output.field.separator", "\t").getBytes("UTF-8");
- reduceInputFieldSeparator = job_.get("stream.reduce.input.field.separator", "\t").getBytes("UTF-8");
- this.numOfReduceOutputKeyFields = job_.getInt("stream.num.reduce.output.key.fields", 1);
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException("The current system does not support UTF-8 encoding!", e);
- }
+ reduceOutFieldSeparator = job_.get("stream.reduce.output.field.separator", "\t")
+ .getBytes(StandardCharsets.UTF_8);
+ reduceInputFieldSeparator = job_.get("stream.reduce.input.field.separator", "\t")
+ .getBytes(StandardCharsets.UTF_8);
+ this.numOfReduceOutputKeyFields = job_.getInt("stream.num.reduce.output.key.fields", 1);
}
public void reduce(Object key, Iterator values, OutputCollector output,
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java
index 063ea51dac..c757cf6d46 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java
@@ -19,11 +19,9 @@
package org.apache.hadoop.streaming;
import java.io.*;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.mapred.Reporter;
@@ -103,7 +101,8 @@ public Text createValue() {
void numRecStats(byte[] record, int start, int len) throws IOException {
numRec_++;
if (numRec_ == nextStatusRec_) {
- String recordStr = new String(record, start, Math.min(len, statusMaxRecordChars_), "UTF-8");
+ String recordStr = new String(record, start,
+ Math.min(len, statusMaxRecordChars_), StandardCharsets.UTF_8);
nextStatusRec_ += 100;//*= 10;
String status = getStatus(recordStr);
LOG.info(status);
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java
index 8dd987e870..a6983e1c6c 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamUtil.java
@@ -23,6 +23,7 @@
import java.io.IOException;
import java.net.InetAddress;
import java.net.URL;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
@@ -146,7 +147,7 @@ static String slurp(File f) throws IOException {
String contents = null;
try {
in.read(buf, 0, len);
- contents = new String(buf, "UTF-8");
+ contents = new String(buf, StandardCharsets.UTF_8);
} finally {
in.close();
}
@@ -160,7 +161,7 @@ static String slurpHadoop(Path p, FileSystem fs) throws IOException {
String contents = null;
try {
in.readFully(in.getPos(), buf);
- contents = new String(buf, "UTF-8");
+ contents = new String(buf, StandardCharsets.UTF_8);
} finally {
in.close();
}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java
index 7438cb8191..974cdc7c8d 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.streaming;
import java.io.*;
+import java.nio.charset.StandardCharsets;
import java.util.regex.*;
import org.apache.hadoop.io.DataOutputBuffer;
@@ -132,7 +133,7 @@ private boolean slowReadUntilMatch(Pattern markPattern, boolean includePat,
read = bin_.read(buf);
if (read == -1) return false;
- String sbuf = new String(buf, 0, read, "UTF-8");
+ String sbuf = new String(buf, 0, read, StandardCharsets.UTF_8);
Matcher match = markPattern.matcher(sbuf);
firstMatchStart_ = NA;
@@ -235,7 +236,7 @@ void addGroup(StringBuffer pat, String escapedGroup) {
}
boolean fastReadUntilMatch(String textPat, boolean includePat, DataOutputBuffer outBufOrNull) throws IOException {
- byte[] cpat = textPat.getBytes("UTF-8");
+ byte[] cpat = textPat.getBytes(StandardCharsets.UTF_8);
int m = 0;
boolean match = false;
int msup = cpat.length;
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/KeyOnlyTextOutputReader.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/KeyOnlyTextOutputReader.java
index 32bba397ce..1c17659b77 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/KeyOnlyTextOutputReader.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/KeyOnlyTextOutputReader.java
@@ -21,7 +21,7 @@
import java.io.DataInput;
import java.io.IOException;
import java.io.InputStream;
-import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.NullWritable;
@@ -77,11 +77,7 @@ public NullWritable getCurrentValue() throws IOException {
@Override
public String getLastOutput() {
if (bytes != null) {
- try {
- return new String(bytes, "UTF-8");
- } catch (UnsupportedEncodingException e) {
- return "";
- }
+ return new String(bytes, StandardCharsets.UTF_8);
} else {
return null;
}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextInputWriter.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextInputWriter.java
index 6f0fd8bfa5..31513da71d 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextInputWriter.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextInputWriter.java
@@ -20,6 +20,7 @@
import java.io.DataOutput;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
@@ -66,7 +67,7 @@ protected void writeUTF8(Object object) throws IOException {
valSize = val.getLength();
} else {
String sval = object.toString();
- bval = sval.getBytes("UTF-8");
+ bval = sval.getBytes(StandardCharsets.UTF_8);
valSize = bval.length;
}
clientOut.write(bval, 0, valSize);
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextOutputReader.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextOutputReader.java
index 06c05bc9ef..11c84a471f 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextOutputReader.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextOutputReader.java
@@ -21,8 +21,8 @@
import java.io.DataInput;
import java.io.IOException;
import java.io.InputStream;
-import java.io.UnsupportedEncodingException;
import java.nio.charset.CharacterCodingException;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
@@ -84,11 +84,7 @@ public Text getCurrentValue() throws IOException {
@Override
public String getLastOutput() {
if (bytes != null) {
- try {
- return new String(bytes, "UTF-8");
- } catch (UnsupportedEncodingException e) {
- return "";
- }
+ return new String(bytes, StandardCharsets.UTF_8);
} else {
return null;
}
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamBaseRecordReader.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamBaseRecordReader.java
index 43c1b1bec0..e3c14743cb 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamBaseRecordReader.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamBaseRecordReader.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.streaming.mapreduce;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -107,7 +108,7 @@ void numRecStats(byte[] record, int start, int len) throws IOException {
numRec_++;
if (numRec_ == nextStatusRec_) {
String recordStr = new String(record, start, Math.min(len,
- statusMaxRecordChars_), "UTF-8");
+ statusMaxRecordChars_), StandardCharsets.UTF_8);
nextStatusRec_ += 100;// *= 10;
String status = getStatus(recordStr);
LOG.info(status);
diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamXmlRecordReader.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamXmlRecordReader.java
index c7ee847763..aa8a4d8832 100644
--- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamXmlRecordReader.java
+++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/mapreduce/StreamXmlRecordReader.java
@@ -20,6 +20,7 @@
import java.io.BufferedInputStream;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -139,7 +140,7 @@ private boolean slowReadUntilMatch(Pattern markPattern, boolean includePat,
if (read == -1)
return false;
- String sbuf = new String(buf, 0, read, "UTF-8");
+ String sbuf = new String(buf, 0, read, StandardCharsets.UTF_8);
Matcher match = markPattern.matcher(sbuf);
firstMatchStart_ = NA;
@@ -246,7 +247,7 @@ void addGroup(StringBuffer pat, String escapedGroup) {
boolean fastReadUntilMatch(String textPat, boolean includePat,
DataOutputBuffer outBufOrNull) throws IOException {
- byte[] cpat = textPat.getBytes("UTF-8");
+ byte[] cpat = textPat.getBytes(StandardCharsets.UTF_8);
int m = 0;
boolean match = false;
int msup = cpat.length;
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/RawBytesMapApp.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/RawBytesMapApp.java
index 813c08c611..e9c2740ee8 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/RawBytesMapApp.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/RawBytesMapApp.java
@@ -22,6 +22,7 @@
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.io.IntWritable;
@@ -52,7 +53,7 @@ public static void main(String[] args) throws IOException {
}
private void writeString(String str) throws IOException {
- byte[] bytes = str.getBytes("UTF-8");
+ byte[] bytes = str.getBytes(StandardCharsets.UTF_8);
dos.writeInt(bytes.length);
dos.write(bytes);
}
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/RawBytesReduceApp.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/RawBytesReduceApp.java
index 741e3d3a00..4a21f11f58 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/RawBytesReduceApp.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/RawBytesReduceApp.java
@@ -21,6 +21,7 @@
import java.io.DataInputStream;
import java.io.EOFException;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.io.IntWritable;
@@ -62,7 +63,7 @@ private String readString() throws IOException {
}
byte[] bytes = new byte[length];
dis.readFully(bytes);
- return new String(bytes, "UTF-8");
+ return new String(bytes, StandardCharsets.UTF_8);
}
private int readInt() throws IOException {
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java
index fccd8d51e4..901abba885 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestFileArgs.java
@@ -21,6 +21,7 @@
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.Map;
import org.apache.hadoop.hdfs.MiniDFSCluster;
@@ -70,7 +71,7 @@ public void setUp() throws IOException {
// Set up side file
FileSystem localFs = FileSystem.getLocal(conf);
DataOutputStream dos = localFs.create(new Path("target/sidefile"));
- dos.write("hello world\n".getBytes("UTF-8"));
+ dos.write("hello world\n".getBytes(StandardCharsets.UTF_8));
dos.close();
// Since ls doesn't read stdin, we don't want to write anything
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestGzipInput.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestGzipInput.java
index a9fc5fd5a4..dc12e4eff9 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestGzipInput.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestGzipInput.java
@@ -21,6 +21,7 @@
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.zip.GZIPOutputStream;
/**
@@ -37,7 +38,7 @@ protected void createInput() throws IOException
{
GZIPOutputStream out = new GZIPOutputStream(
new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
- out.write(input.getBytes("UTF-8"));
+ out.write(input.getBytes(StandardCharsets.UTF_8));
out.close();
}
}
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
index 752268de3d..041d527ab1 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestMultipleArchiveFiles.java
@@ -21,6 +21,7 @@
import java.io.File;
import java.io.IOException;
import java.io.DataOutputStream;
+import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
@@ -86,14 +87,14 @@ protected void createInput() throws IOException
DataOutputStream dos = fileSys.create(new Path(INPUT_FILE));
String inputFileString = "symlink1" + File.separator
+ "cacheArchive1\nsymlink2" + File.separator + "cacheArchive2";
- dos.write(inputFileString.getBytes("UTF-8"));
+ dos.write(inputFileString.getBytes(StandardCharsets.UTF_8));
dos.close();
DataOutputStream out = fileSys.create(new Path(CACHE_ARCHIVE_1.toString()));
ZipOutputStream zos = new ZipOutputStream(out);
ZipEntry ze = new ZipEntry(CACHE_FILE_1.toString());
zos.putNextEntry(ze);
- zos.write(input.getBytes("UTF-8"));
+ zos.write(input.getBytes(StandardCharsets.UTF_8));
zos.closeEntry();
zos.close();
@@ -101,7 +102,7 @@ protected void createInput() throws IOException
zos = new ZipOutputStream(out);
ze = new ZipEntry(CACHE_FILE_2.toString());
zos.putNextEntry(ze);
- zos.write(input.getBytes("UTF-8"));
+ zos.write(input.getBytes(StandardCharsets.UTF_8));
zos.closeEntry();
zos.close();
}
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestRawBytesStreaming.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestRawBytesStreaming.java
index 7621fd1fe8..09adb3d5fd 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestRawBytesStreaming.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestRawBytesStreaming.java
@@ -22,6 +22,7 @@
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
@@ -46,7 +47,7 @@ public TestRawBytesStreaming() throws IOException {
protected void createInput() throws IOException {
DataOutputStream out = new DataOutputStream(new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
- out.write(input.getBytes("UTF-8"));
+ out.write(input.getBytes(StandardCharsets.UTF_8));
out.close();
}
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamAggregate.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamAggregate.java
index b27a8c65ae..b303c8c977 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamAggregate.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamAggregate.java
@@ -21,6 +21,7 @@
import org.junit.Test;
import static org.junit.Assert.*;
import java.io.*;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -53,7 +54,7 @@ protected void createInput() throws IOException
{
DataOutputStream out = new DataOutputStream(
new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
- out.write(input.getBytes("UTF-8"));
+ out.write(input.getBytes(StandardCharsets.UTF_8));
out.close();
}
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestUnconsumedInput.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestUnconsumedInput.java
index e1f6da5276..b2bc84b4f9 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestUnconsumedInput.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestUnconsumedInput.java
@@ -24,14 +24,11 @@
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.Test;
public class TestUnconsumedInput {
@@ -54,12 +51,12 @@ public TestUnconsumedInput() throws IOException
protected void createInput() throws IOException
{
- DataOutputStream out = new DataOutputStream(
- new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
+ try (DataOutputStream out = new DataOutputStream(
+ new FileOutputStream(INPUT_FILE.getAbsoluteFile()))) {
for (int i=0; i<10000; ++i) {
- out.write(input.getBytes("UTF-8"));
+ out.write(input.getBytes(StandardCharsets.UTF_8));
}
- out.close();
+ }
}
protected String[] genArgs() {
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java
index f2d9495efa..5bf2fe52d4 100644
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java
+++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/mapreduce/TestStreamXmlRecordReader.java
@@ -24,6 +24,7 @@
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
@@ -88,7 +89,7 @@ private String slurpHadoop(Path p, FileSystem fs) throws IOException {
String contents = null;
try {
in.readFully(in.getPos(), buf);
- contents = new String(buf, "UTF-8");
+ contents = new String(buf, StandardCharsets.UTF_8);
} finally {
in.close();
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java
index f7747c6216..e4b48c45f3 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java
@@ -36,7 +36,7 @@ public class TestYarnConfigurationFields extends TestConfigurationFieldsBase {
@SuppressWarnings({"deprecation", "methodlength"})
@Override
public void initializeMemberVariables() {
- xmlFilename = new String("yarn-default.xml");
+ xmlFilename = "yarn-default.xml";
configurationClasses = new Class[] { YarnConfiguration.class };
// Allocate for usage
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java
index f3bb5fde3a..c6fe29f65f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java
@@ -25,7 +25,7 @@
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.InetAddress;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.Map;
@@ -235,10 +235,10 @@ public void launchAM(ApplicationAttemptId attemptId)
final BufferedReader errReader =
new BufferedReader(new InputStreamReader(
- amProc.getErrorStream(), Charset.forName("UTF-8")));
+ amProc.getErrorStream(), StandardCharsets.UTF_8));
final BufferedReader inReader =
new BufferedReader(new InputStreamReader(
- amProc.getInputStream(), Charset.forName("UTF-8")));
+ amProc.getInputStream(), StandardCharsets.UTF_8));
// read error and input streams as this would free up the buffers
// free the error stream buffer
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfigurationOutputter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfigurationOutputter.java
index 447a3e967a..3b39c10bf8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfigurationOutputter.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/PublishedConfigurationOutputter.java
@@ -18,7 +18,6 @@
package org.apache.hadoop.yarn.service.utils;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.util.Preconditions;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
@@ -72,7 +71,7 @@ public void save(File dest) throws IOException {
* @throws IOException
*/
public void save(OutputStream out) throws IOException {
- IOUtils.write(asString(), out, Charsets.UTF_8);
+ IOUtils.write(asString(), out, StandardCharsets.UTF_8);
}
/**
* Convert to a string
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
index 00cf2d4f76..cd0d2aa979 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
@@ -22,7 +22,7 @@
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.text.DecimalFormat;
import java.util.*;
@@ -357,7 +357,7 @@ private int printApplicationAttemptReport(String applicationAttemptId)
// Use PrintWriter.println, which uses correct platform line ending.
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintWriter appAttemptReportStr = new PrintWriter(
- new OutputStreamWriter(baos, Charset.forName("UTF-8")));
+ new OutputStreamWriter(baos, StandardCharsets.UTF_8));
if (appAttemptReport != null) {
appAttemptReportStr.println("Application Attempt Report : ");
appAttemptReportStr.print("\tApplicationAttempt-Id : ");
@@ -381,11 +381,11 @@ private int printApplicationAttemptReport(String applicationAttemptId)
appAttemptReportStr.print("Application Attempt with id '"
+ applicationAttemptId + "' doesn't exist in Timeline Server.");
appAttemptReportStr.close();
- sysout.println(baos.toString("UTF-8"));
+ sysout.println(new String(baos.toByteArray(), StandardCharsets.UTF_8));
return -1;
}
appAttemptReportStr.close();
- sysout.println(baos.toString("UTF-8"));
+ sysout.println(new String(baos.toByteArray(), StandardCharsets.UTF_8));
return 0;
}
@@ -417,7 +417,7 @@ private int printContainerReport(String containerId) throws YarnException,
// Use PrintWriter.println, which uses correct platform line ending.
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintWriter containerReportStr = new PrintWriter(
- new OutputStreamWriter(baos, Charset.forName("UTF-8")));
+ new OutputStreamWriter(baos, StandardCharsets.UTF_8));
if (containerReport != null) {
containerReportStr.println("Container Report : ");
containerReportStr.print("\tContainer-Id : ");
@@ -446,11 +446,11 @@ private int printContainerReport(String containerId) throws YarnException,
containerReportStr.print("Container with id '" + containerId
+ "' doesn't exist in Timeline Server.");
containerReportStr.close();
- sysout.println(baos.toString("UTF-8"));
+ sysout.println(new String(baos.toByteArray(), StandardCharsets.UTF_8));
return -1;
}
containerReportStr.close();
- sysout.println(baos.toString("UTF-8"));
+ sysout.println(new String(baos.toByteArray(), StandardCharsets.UTF_8));
return 0;
}
@@ -468,7 +468,7 @@ private void listApplications(Set appTypes,
EnumSet appStates, Set appTags)
throws YarnException, IOException {
PrintWriter writer = new PrintWriter(
- new OutputStreamWriter(sysout, Charset.forName("UTF-8")));
+ new OutputStreamWriter(sysout, StandardCharsets.UTF_8));
if (allAppStates) {
for (YarnApplicationState appState : YarnApplicationState.values()) {
appStates.add(appState);
@@ -610,7 +610,7 @@ private int printApplicationReport(String applicationId)
// Use PrintWriter.println, which uses correct platform line ending.
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintWriter appReportStr = new PrintWriter(
- new OutputStreamWriter(baos, Charset.forName("UTF-8")));
+ new OutputStreamWriter(baos, StandardCharsets.UTF_8));
if (appReport != null) {
appReportStr.println("Application Report : ");
appReportStr.print("\tApplication-Id : ");
@@ -673,11 +673,11 @@ private int printApplicationReport(String applicationId)
appReportStr.print("Application with id '" + applicationId
+ "' doesn't exist in RM.");
appReportStr.close();
- sysout.println(baos.toString("UTF-8"));
+ sysout.println(new String(baos.toByteArray(), StandardCharsets.UTF_8));
return -1;
}
appReportStr.close();
- sysout.println(baos.toString("UTF-8"));
+ sysout.println(new String(baos.toByteArray(), StandardCharsets.UTF_8));
return 0;
}
@@ -718,7 +718,7 @@ private String getAllValidApplicationStates() {
private void listApplicationAttempts(String applicationId) throws YarnException,
IOException {
PrintWriter writer = new PrintWriter(
- new OutputStreamWriter(sysout, Charset.forName("UTF-8")));
+ new OutputStreamWriter(sysout, StandardCharsets.UTF_8));
List appAttemptsReport = client
.getApplicationAttempts(ApplicationId.fromString(applicationId));
@@ -746,7 +746,7 @@ private void listApplicationAttempts(String applicationId) throws YarnException,
private void listContainers(String appAttemptId) throws YarnException,
IOException {
PrintWriter writer = new PrintWriter(
- new OutputStreamWriter(sysout, Charset.forName("UTF-8")));
+ new OutputStreamWriter(sysout, StandardCharsets.UTF_8));
List appsReport = client.getContainers(
ApplicationAttemptId.fromString(appAttemptId));
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ClusterCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ClusterCLI.java
index 78bf93bd56..a0df9d6b18 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ClusterCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ClusterCLI.java
@@ -23,7 +23,7 @@
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
@@ -123,12 +123,12 @@ public int run(String[] args) throws Exception {
private void printClusterNodeAttributes() throws IOException, YarnException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintWriter pw = new PrintWriter(
- new OutputStreamWriter(baos, Charset.forName("UTF-8")));
+ new OutputStreamWriter(baos, StandardCharsets.UTF_8));
for (NodeAttributeInfo attribute : client.getClusterAttributes()) {
pw.println(attribute.toString());
}
pw.close();
- sysout.println(baos.toString("UTF-8"));
+ sysout.println(new String(baos.toByteArray(), StandardCharsets.UTF_8));
}
void printClusterNodeLabels() throws YarnException, IOException {
@@ -158,11 +158,11 @@ void printClusterNodeLabels() throws YarnException, IOException {
void printUsage(Options opts) throws UnsupportedEncodingException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintWriter pw =
- new PrintWriter(new OutputStreamWriter(baos, Charset.forName("UTF-8")));
+ new PrintWriter(new OutputStreamWriter(baos, StandardCharsets.UTF_8));
new HelpFormatter().printHelp(pw, HelpFormatter.DEFAULT_WIDTH, TITLE, null,
opts, HelpFormatter.DEFAULT_LEFT_PAD, HelpFormatter.DEFAULT_DESC_PAD,
null);
pw.close();
- sysout.println(baos.toString("UTF-8"));
+ sysout.println(new String(baos.toByteArray(), StandardCharsets.UTF_8));
}
}
\ No newline at end of file
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeAttributesCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeAttributesCLI.java
index 505eca8dcf..8fe388e71f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeAttributesCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeAttributesCLI.java
@@ -58,7 +58,7 @@
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
@@ -141,10 +141,10 @@ private void print(StringBuilder usageBuilder)
throws UnsupportedEncodingException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintWriter pw =
- new PrintWriter(new OutputStreamWriter(baos, Charset.forName("UTF-8")));
+ new PrintWriter(new OutputStreamWriter(baos, StandardCharsets.UTF_8));
pw.write(usageBuilder.toString());
pw.close();
- errOut.println(baos.toString("UTF-8"));
+ errOut.println(new String(baos.toByteArray(), StandardCharsets.UTF_8));
}
private Options buildOptions(CommandHandler... handlers) {
@@ -380,7 +380,7 @@ public int printNodesByAttributes(String[] attrs)
protocol.getAttributesToNodes(request);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintWriter writer = new PrintWriter(
- new OutputStreamWriter(baos, Charset.forName("UTF-8")));
+ new OutputStreamWriter(baos, StandardCharsets.UTF_8));
writer.format(HOSTNAMEVAL, "Hostname", "Attribute-value");
response.getAttributesToNodes().forEach((attributeKey, v) -> {
writer.println(getKeyString(attributeKey) + " :");
@@ -389,7 +389,7 @@ public int printNodesByAttributes(String[] attrs)
attrVal.getAttributeValue()));
});
writer.close();
- sysOut.println(baos.toString("UTF-8"));
+ sysOut.println(new String(baos.toByteArray(), StandardCharsets.UTF_8));
return 0;
}
@@ -405,7 +405,7 @@ private int printAttributesByNode(String[] nodeArray)
response.getNodeToAttributes();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintWriter writer = new PrintWriter(
- new OutputStreamWriter(baos, Charset.forName("UTF-8")));
+ new OutputStreamWriter(baos, StandardCharsets.UTF_8));
writer.printf(NODEATTRIBUTE, "Attribute", "Type", "Value");
nodeToAttrs.forEach((node, v) -> {
// print node header
@@ -415,7 +415,7 @@ private int printAttributesByNode(String[] nodeArray)
attr.getAttributeType().name(), attr.getAttributeValue()));
});
writer.close();
- sysOut.println(baos.toString("UTF-8"));
+ sysOut.println(new String(baos.toByteArray(), StandardCharsets.UTF_8));
return 0;
}
@@ -427,14 +427,14 @@ private int printClusterAttributes() throws IOException, YarnException {
protocol.getClusterNodeAttributes(request);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintWriter writer = new PrintWriter(
- new OutputStreamWriter(baos, Charset.forName("UTF-8")));
+ new OutputStreamWriter(baos, StandardCharsets.UTF_8));
writer.format(NODEATTRIBUTEINFO, "Attribute", "Type");
for (NodeAttributeInfo attr : response.getNodeAttributes()) {
writer.format(NODEATTRIBUTEINFO, getKeyString(attr.getAttributeKey()),
attr.getAttributeType().name());
}
writer.close();
- sysOut.println(baos.toString("UTF-8"));
+ sysOut.println(new String(baos.toByteArray(), StandardCharsets.UTF_8));
return 0;
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
index 6120a8496a..317f30cdde 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
@@ -21,7 +21,7 @@
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
@@ -177,7 +177,7 @@ private void printUsage(Options opts) {
private void listClusterNodes(Set nodeStates)
throws YarnException, IOException {
PrintWriter writer = new PrintWriter(
- new OutputStreamWriter(sysout, Charset.forName("UTF-8")));
+ new OutputStreamWriter(sysout, StandardCharsets.UTF_8));
List nodesReport = client.getNodeReports(
nodeStates.toArray(new NodeState[0]));
writer.println("Total Nodes:" + nodesReport.size());
@@ -202,7 +202,7 @@ private void listClusterNodes(Set nodeStates)
private void listDetailedClusterNodes(Set nodeStates)
throws YarnException, IOException {
PrintWriter writer = new PrintWriter(new OutputStreamWriter(sysout,
- Charset.forName("UTF-8")));
+ StandardCharsets.UTF_8));
List nodesReport = client.getNodeReports(nodeStates
.toArray(new NodeState[0]));
writer.println("Total Nodes:" + nodesReport.size());
@@ -265,7 +265,7 @@ private void printNodeStatus(String nodeIdStr) throws YarnException,
// Use PrintWriter.println, which uses correct platform line ending.
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintWriter nodeReportStr = new PrintWriter(
- new OutputStreamWriter(baos, Charset.forName("UTF-8")));
+ new OutputStreamWriter(baos, StandardCharsets.UTF_8));
NodeReport nodeReport = null;
for (NodeReport report : nodesReport) {
if (!report.getNodeId().equals(nodeId)) {
@@ -347,7 +347,7 @@ private void printNodeStatus(String nodeIdStr) throws YarnException,
+ nodeIdStr);
}
nodeReportStr.close();
- sysout.println(baos.toString("UTF-8"));
+ sysout.println(new String(baos.toByteArray(), StandardCharsets.UTF_8));
}
private String getAllValidNodeStates() {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/QueueCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/QueueCLI.java
index db2b2cdf53..927de28449 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/QueueCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/QueueCLI.java
@@ -136,7 +136,7 @@ void printUsage(Options opts) {
private int listQueue(String queueName) throws YarnException, IOException {
int rc;
PrintWriter writer = new PrintWriter(
- new OutputStreamWriter(sysout, Charset.forName("UTF-8")));
+ new OutputStreamWriter(sysout, StandardCharsets.UTF_8));
QueueInfo queueInfo = client.getQueueInfo(queueName);
if (queueInfo != null) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestSharedCacheClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestSharedCacheClientImpl.java
index 1b179b138a..8111c7ebdf 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestSharedCacheClientImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/api/impl/TestSharedCacheClientImpl.java
@@ -27,6 +27,7 @@
import java.io.DataOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -173,7 +174,7 @@ private Path makeFile(String filename) throws Exception {
DataOutputStream out = null;
try {
out = localFs.create(file);
- out.write(input.getBytes("UTF-8"));
+ out.write(input.getBytes(StandardCharsets.UTF_8));
} finally {
if(out != null) {
out.close();
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestNodeAttributesCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestNodeAttributesCLI.java
index cab4bda76c..12ac21bc98 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestNodeAttributesCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestNodeAttributesCLI.java
@@ -41,6 +41,7 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
@@ -60,7 +61,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
/**
@@ -528,8 +528,8 @@ private int runTool(String... args) throws Exception {
sysOutBytes.reset();
LOG.info("Running: NodeAttributesCLI " + Joiner.on(" ").join(args));
int ret = nodeAttributesCLI.run(args);
- errOutput = new String(errOutBytes.toByteArray(), Charsets.UTF_8);
- sysOutput = new String(sysOutBytes.toByteArray(), Charsets.UTF_8);
+ errOutput = new String(errOutBytes.toByteArray(), StandardCharsets.UTF_8);
+ sysOutput = new String(sysOutBytes.toByteArray(), StandardCharsets.UTF_8);
LOG.info("Err_output:\n" + errOutput);
LOG.info("Sys_output:\n" + sysOutput);
return ret;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java
index 6eb1f2fc98..411526db41 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java
@@ -38,6 +38,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
+import java.nio.charset.StandardCharsets;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
@@ -85,7 +86,6 @@
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
public class TestRMAdminCLI {
@@ -1061,7 +1061,7 @@ public void testRMHAErrorUsage() throws Exception {
try {
String[] args = {"-transitionToActive"};
assertEquals(-1, rmAdminCLIWithHAEnabled.run(args));
- String errOut = new String(errOutBytes.toByteArray(), Charsets.UTF_8);
+ String errOut = new String(errOutBytes.toByteArray(), StandardCharsets.UTF_8);
errOutBytes.reset();
assertTrue(errOut.contains("Usage: rmadmin"));
} finally {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
index 26c3e01a45..5a752064bc 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
@@ -31,7 +31,6 @@
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.Writer;
-import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
@@ -288,7 +287,7 @@ public void write(DataOutputStream out, Set pendingUploadFiles)
this.uploadedFiles.add(logFile);
} catch (IOException e) {
String message = logErrorMessage(logFile, e);
- out.write(message.getBytes(Charset.forName("UTF-8")));
+ out.write(message.getBytes(StandardCharsets.UTF_8));
} finally {
IOUtils.cleanupWithLogger(LOG, in);
}
@@ -1067,7 +1066,7 @@ public String nextLog() throws IOException {
new BoundedInputStream(valueStream, currentLogLength);
currentLogData.setPropagateClose(false);
currentLogISR = new InputStreamReader(currentLogData,
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
currentLogType = logType;
} catch (EOFException e) {
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogToolUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogToolUtils.java
index 3c56b0290d..cc137ba696 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogToolUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogToolUtils.java
@@ -25,7 +25,7 @@
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.WritableByteChannel;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
@@ -68,7 +68,7 @@ private static byte[] formatContainerLogHeader(String containerId,
.append("LogLastModifiedTime:" + lastModifiedTime + "\n")
.append("LogLength:" + fileLength + "\n")
.append("LogContents:\n");
- return sb.toString().getBytes(Charset.forName("UTF-8"));
+ return sb.toString().getBytes(StandardCharsets.UTF_8);
}
/**
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/LogAggregationHtmlBlock.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/LogAggregationHtmlBlock.java
index 4ec8794b14..b374f53145 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/LogAggregationHtmlBlock.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/LogAggregationHtmlBlock.java
@@ -27,7 +27,7 @@
import java.io.IOException;
import java.io.InputStream;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
@@ -196,7 +196,7 @@ protected void processContainerLog(Block html, long[] range, InputStream in,
Hamlet.PRE pre = html.pre();
while (toRead > 0 && (len = in.read(cbuf, 0, currentToRead)) > 0) {
- pre.__(new String(cbuf, 0, len, Charset.forName("UTF-8")));
+ pre.__(new String(cbuf, 0, len, StandardCharsets.UTF_8));
toRead = toRead - len;
currentToRead = toRead > bufferSize ? bufferSize : (int) toRead;
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/LogAggregationIndexedFileController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/LogAggregationIndexedFileController.java
index d4431d56b3..7c6be1305d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/LogAggregationIndexedFileController.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/LogAggregationIndexedFileController.java
@@ -26,7 +26,6 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Serializable;
-import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
@@ -220,7 +219,7 @@ public Object run() throws Exception {
// append a simple character("\n") to move the writer cursor, so
// we could get the correct position when we call
// fsOutputStream.getStartPos()
- final byte[] dummyBytes = "\n".getBytes(Charset.forName("UTF-8"));
+ final byte[] dummyBytes = "\n".getBytes(StandardCharsets.UTF_8);
fsDataOStream.write(dummyBytes);
fsDataOStream.flush();
@@ -286,7 +285,7 @@ private Pair initializeWriterInRolling(
int actualLength = b.length;
if (actualLength == nameLength) {
String recoveredLogFile = new String(
- b, Charset.forName("UTF-8"));
+ b, StandardCharsets.UTF_8);
if (recoveredLogFile.equals(
currentRemoteLogFile.getName())) {
overwriteCheckSum = false;
@@ -340,7 +339,7 @@ private Pair initializeWriterInRolling(
String fileName = aggregatedLogFile.getName();
checksumFileOutputStream.writeInt(fileName.length());
checksumFileOutputStream.write(fileName.getBytes(
- Charset.forName("UTF-8")));
+ StandardCharsets.UTF_8));
checksumFileOutputStream.writeLong(
currentAggregatedLogFileLength);
checksumFileOutputStream.flush();
@@ -403,7 +402,7 @@ public void write(LogKey logKey, LogValue logValue) throws IOException {
if (outputStreamState != null &&
outputStreamState.getOutputStream() != null) {
outputStreamState.getOutputStream().write(
- message.getBytes(Charset.forName("UTF-8")));
+ message.getBytes(StandardCharsets.UTF_8));
}
} finally {
IOUtils.cleanupWithLogger(LOG, in);
@@ -598,7 +597,7 @@ public boolean readAggregatedLogs(ContainerLogsRequest logRequest,
Times.format(candidate.getLastModifiedTime()),
in, os, buf, ContainerLogAggregationType.AGGREGATED);
byte[] b = aggregatedLogSuffix(candidate.getFileName())
- .getBytes(Charset.forName("UTF-8"));
+ .getBytes(StandardCharsets.UTF_8);
os.write(b, 0, b.length);
findLogs = true;
} catch (IOException e) {
@@ -769,7 +768,7 @@ public Map parseCheckSumFiles(
checksumFileInputStream.readFully(b);
int actualLength = b.length;
if (actualLength == nameLength) {
- nodeName = new String(b, Charset.forName("UTF-8"));
+ nodeName = new String(b, StandardCharsets.UTF_8);
index = checksumFileInputStream.readLong();
} else {
continue;
@@ -950,9 +949,9 @@ public IndexedLogsMeta loadIndexedLogsMeta(Path remoteLogPath, long end,
if (LOG.isDebugEnabled()) {
LOG.debug("the length of loaded UUID:{}", uuidReadLen);
LOG.debug("the loaded UUID:{}", new String(uuidRead,
- Charset.forName("UTF-8")));
+ StandardCharsets.UTF_8));
LOG.debug("the expected UUID:{}", new String(this.uuid,
- Charset.forName("UTF-8")));
+ StandardCharsets.UTF_8));
}
throw new IOException("The UUID from "
+ remoteLogPath + " is not correct. The offset of loaded UUID is "
@@ -1359,7 +1358,7 @@ private byte[] createUUID(ApplicationId appId) throws IOException {
try {
MessageDigest digest = MessageDigest.getInstance("SHA-256");
return digest.digest(appId.toString().getBytes(
- Charset.forName("UTF-8")));
+ StandardCharsets.UTF_8));
} catch (NoSuchAlgorithmException ex) {
throw new IOException(ex);
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/tfile/LogAggregationTFileController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/tfile/LogAggregationTFileController.java
index e1e2c9aeaa..4b282488c6 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/tfile/LogAggregationTFileController.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/filecontroller/tfile/LogAggregationTFileController.java
@@ -22,7 +22,7 @@
import java.io.EOFException;
import java.io.IOException;
import java.io.OutputStream;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.HashMap;
@@ -217,7 +217,7 @@ public boolean readAggregatedLogs(ContainerLogsRequest logRequest,
valueStream, os, buf,
ContainerLogAggregationType.AGGREGATED);
byte[] b = aggregatedLogSuffix(fileType).getBytes(
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
os.write(b, 0, b.length);
findLogs = true;
} else {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/Graph.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/Graph.java
index 66ccafa92a..1d24034730 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/Graph.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/Graph.java
@@ -20,7 +20,7 @@
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
@@ -190,7 +190,7 @@ public String generateGraphViz() {
public void save(String filepath) throws IOException {
try (OutputStreamWriter fout = new OutputStreamWriter(
- new FileOutputStream(filepath), Charset.forName("UTF-8"))) {
+ new FileOutputStream(filepath), StandardCharsets.UTF_8)) {
fout.write(generateGraphViz());
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java
index b1a186e8a8..6351cb69c8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/DockerClientConfigHandler.java
@@ -39,7 +39,6 @@
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Iterator;
@@ -115,7 +114,7 @@ public static Credentials readCredentialsFromConfigFile(Path configFile,
new DockerCredentialTokenIdentifier(registryUrl, applicationId);
Token token =
new Token<>(tokenId.getBytes(),
- registryCred.getBytes(Charset.forName("UTF-8")),
+ registryCred.getBytes(StandardCharsets.UTF_8),
tokenId.getKind(), new Text(registryUrl));
credentials.addToken(
new Text(registryUrl + "-" + applicationId), token);
@@ -173,7 +172,7 @@ public static boolean writeDockerCredentialsToPath(File outConfigFile,
ObjectNode registryCredNode = mapper.createObjectNode();
registryUrlNode.set(ti.getRegistryUrl(), registryCredNode);
registryCredNode.put(CONFIG_AUTH_KEY,
- new String(tk.getPassword(), Charset.forName("UTF-8")));
+ new String(tk.getPassword(), StandardCharsets.UTF_8));
LOG.debug("Prepared token for write: {}", tk);
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
index 5a518dff7e..ed48a92a04 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
@@ -26,7 +26,7 @@
import java.io.IOException;
import java.io.InputStreamReader;
import java.math.BigInteger;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collections;
@@ -519,7 +519,7 @@ private static ProcessInfo constructProcessInfo(ProcessInfo pinfo,
File pidDir = new File(procfsDir, pinfo.getPid());
fReader = new InputStreamReader(
new FileInputStream(
- new File(pidDir, PROCFS_STAT_FILE)), Charset.forName("UTF-8"));
+ new File(pidDir, PROCFS_STAT_FILE)), StandardCharsets.UTF_8);
in = new BufferedReader(fReader);
} catch (FileNotFoundException f) {
// The process vanished in the interim!
@@ -715,7 +715,7 @@ public String getCmdLine(String procfsDir) {
fReader = new InputStreamReader(
new FileInputStream(
new File(new File(procfsDir, pid.toString()), PROCFS_CMDLINE_FILE)),
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
} catch (FileNotFoundException f) {
// The process vanished in the interim!
return ret;
@@ -773,7 +773,7 @@ private static void constructProcessSMAPInfo(ProcessTreeSmapMemInfo pInfo,
return;
}
fReader = new InputStreamReader(
- new FileInputStream(file), Charset.forName("UTF-8"));
+ new FileInputStream(file), StandardCharsets.UTF_8);
in = new BufferedReader(fReader);
ProcessSmapMemoryInfo memoryMappingInfo = null;
List lines = IOUtils.readLines(in);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/TestLogAggregationIndexedFileController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/TestLogAggregationIndexedFileController.java
index b7fcb18ff6..3e91ab5a54 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/TestLogAggregationIndexedFileController.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/logaggregation/filecontroller/ifile/TestLogAggregationIndexedFileController.java
@@ -25,7 +25,7 @@
import java.io.PrintStream;
import java.io.Writer;
import java.net.URL;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
@@ -267,7 +267,7 @@ public boolean isRollover(final FileContext fc, final Path candidate) throws IOE
fInput = FileSystem.create(fs, checksumFile, LOG_FILE_UMASK);
fInput.writeInt(nodeName.length());
fInput.write(nodeName.getBytes(
- Charset.forName("UTF-8")));
+ StandardCharsets.UTF_8));
fInput.writeLong(0);
} finally {
IOUtils.closeStream(fInput);
@@ -579,7 +579,7 @@ void testGetLogMetaFilesOfNode() throws Exception {
fInput = FileSystem.create(fs, checksumFile, LOG_FILE_UMASK);
fInput.writeInt(nodeName.length());
fInput.write(nodeName.getBytes(
- Charset.forName("UTF-8")));
+ StandardCharsets.UTF_8));
fInput.writeLong(0);
} finally {
IOUtils.closeStream(fInput);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java
index c4d8d4da33..a55aa29332 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/LeveldbTimelineStore.java
@@ -48,7 +48,7 @@
import org.slf4j.LoggerFactory;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.locks.ReentrantLock;
@@ -127,25 +127,25 @@ public class LeveldbTimelineStore extends AbstractService
//call LevelDb recovery
static final String BACKUP_EXT = ".backup-";
- private static final byte[] START_TIME_LOOKUP_PREFIX = "k".getBytes(Charset.forName("UTF-8"));
- private static final byte[] ENTITY_ENTRY_PREFIX = "e".getBytes(Charset.forName("UTF-8"));
- private static final byte[] INDEXED_ENTRY_PREFIX = "i".getBytes(Charset.forName("UTF-8"));
+ private static final byte[] START_TIME_LOOKUP_PREFIX = "k".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] ENTITY_ENTRY_PREFIX = "e".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] INDEXED_ENTRY_PREFIX = "i".getBytes(StandardCharsets.UTF_8);
- private static final byte[] EVENTS_COLUMN = "e".getBytes(Charset.forName("UTF-8"));
- private static final byte[] PRIMARY_FILTERS_COLUMN = "f".getBytes(Charset.forName("UTF-8"));
- private static final byte[] OTHER_INFO_COLUMN = "i".getBytes(Charset.forName("UTF-8"));
- private static final byte[] RELATED_ENTITIES_COLUMN = "r".getBytes(Charset.forName("UTF-8"));
+ private static final byte[] EVENTS_COLUMN = "e".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] PRIMARY_FILTERS_COLUMN = "f".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] OTHER_INFO_COLUMN = "i".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] RELATED_ENTITIES_COLUMN = "r".getBytes(StandardCharsets.UTF_8);
private static final byte[] INVISIBLE_REVERSE_RELATED_ENTITIES_COLUMN =
- "z".getBytes(Charset.forName("UTF-8"));
- private static final byte[] DOMAIN_ID_COLUMN = "d".getBytes(Charset.forName("UTF-8"));
+ "z".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] DOMAIN_ID_COLUMN = "d".getBytes(StandardCharsets.UTF_8);
- private static final byte[] DOMAIN_ENTRY_PREFIX = "d".getBytes(Charset.forName("UTF-8"));
- private static final byte[] OWNER_LOOKUP_PREFIX = "o".getBytes(Charset.forName("UTF-8"));
- private static final byte[] DESCRIPTION_COLUMN = "d".getBytes(Charset.forName("UTF-8"));
- private static final byte[] OWNER_COLUMN = "o".getBytes(Charset.forName("UTF-8"));
- private static final byte[] READER_COLUMN = "r".getBytes(Charset.forName("UTF-8"));
- private static final byte[] WRITER_COLUMN = "w".getBytes(Charset.forName("UTF-8"));
- private static final byte[] TIMESTAMP_COLUMN = "t".getBytes(Charset.forName("UTF-8"));
+ private static final byte[] DOMAIN_ENTRY_PREFIX = "d".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] OWNER_LOOKUP_PREFIX = "o".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] DESCRIPTION_COLUMN = "d".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] OWNER_COLUMN = "o".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] READER_COLUMN = "r".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] WRITER_COLUMN = "w".getBytes(StandardCharsets.UTF_8);
+ private static final byte[] TIMESTAMP_COLUMN = "t".getBytes(StandardCharsets.UTF_8);
private static final byte[] EMPTY_BYTES = new byte[0];
@@ -456,7 +456,7 @@ private static TimelineEntity getEntity(String entityId, String entityType,
}
} else if (key[prefixlen] == DOMAIN_ID_COLUMN[0]) {
byte[] v = iterator.peekNext().getValue();
- String domainId = new String(v, Charset.forName("UTF-8"));
+ String domainId = new String(v, StandardCharsets.UTF_8);
entity.setDomainId(domainId);
} else {
if (key[prefixlen] !=
@@ -839,7 +839,7 @@ private void put(TimelineEntity entity, TimelinePutResponse response,
if (domainIdBytes == null) {
domainId = TimelineDataManager.DEFAULT_DOMAIN_ID;
} else {
- domainId = new String(domainIdBytes, Charset.forName("UTF-8"));
+ domainId = new String(domainIdBytes, StandardCharsets.UTF_8);
}
if (!domainId.equals(entity.getDomainId())) {
// in this case the entity will be put, but the relation will be
@@ -894,9 +894,9 @@ private void put(TimelineEntity entity, TimelinePutResponse response,
return;
}
} else {
- writeBatch.put(key, entity.getDomainId().getBytes(Charset.forName("UTF-8")));
+ writeBatch.put(key, entity.getDomainId().getBytes(StandardCharsets.UTF_8));
writePrimaryFilterEntries(writeBatch, primaryFilters, key,
- entity.getDomainId().getBytes(Charset.forName("UTF-8")));
+ entity.getDomainId().getBytes(StandardCharsets.UTF_8));
}
db.write(writeBatch);
} catch (DBException de) {
@@ -928,7 +928,7 @@ private void put(TimelineEntity entity, TimelinePutResponse response,
// This is the new entity, the domain should be the same
byte[] key = createDomainIdKey(relatedEntity.getId(),
relatedEntity.getType(), relatedEntityStartTime);
- db.put(key, entity.getDomainId().getBytes(Charset.forName("UTF-8")));
+ db.put(key, entity.getDomainId().getBytes(StandardCharsets.UTF_8));
db.put(createRelatedEntityKey(relatedEntity.getId(),
relatedEntity.getType(), relatedEntityStartTime,
entity.getEntityId(), entity.getEntityType()), EMPTY_BYTES);
@@ -1255,7 +1255,7 @@ private static byte[] createOtherInfoKey(String entityId, String entityType,
* to the end of the array (for parsing other info keys).
*/
private static String parseRemainingKey(byte[] b, int offset) {
- return new String(b, offset, b.length - offset, Charset.forName("UTF-8"));
+ return new String(b, offset, b.length - offset, StandardCharsets.UTF_8);
}
/**
@@ -1629,9 +1629,9 @@ public void put(TimelineDomain domain) throws IOException {
domain.getOwner(), domain.getId(), DESCRIPTION_COLUMN);
if (domain.getDescription() != null) {
writeBatch.put(domainEntryKey, domain.getDescription().
- getBytes(Charset.forName("UTF-8")));
+ getBytes(StandardCharsets.UTF_8));
writeBatch.put(ownerLookupEntryKey, domain.getDescription().
- getBytes(Charset.forName("UTF-8")));
+ getBytes(StandardCharsets.UTF_8));
} else {
writeBatch.put(domainEntryKey, EMPTY_BYTES);
writeBatch.put(ownerLookupEntryKey, EMPTY_BYTES);
@@ -1642,17 +1642,17 @@ public void put(TimelineDomain domain) throws IOException {
ownerLookupEntryKey = createOwnerLookupKey(
domain.getOwner(), domain.getId(), OWNER_COLUMN);
// Null check for owner is done before
- writeBatch.put(domainEntryKey, domain.getOwner().getBytes(Charset.forName("UTF-8")));
- writeBatch.put(ownerLookupEntryKey, domain.getOwner().getBytes(Charset.forName("UTF-8")));
+ writeBatch.put(domainEntryKey, domain.getOwner().getBytes(StandardCharsets.UTF_8));
+ writeBatch.put(ownerLookupEntryKey, domain.getOwner().getBytes(StandardCharsets.UTF_8));
// Write readers
domainEntryKey = createDomainEntryKey(domain.getId(), READER_COLUMN);
ownerLookupEntryKey = createOwnerLookupKey(
domain.getOwner(), domain.getId(), READER_COLUMN);
if (domain.getReaders() != null && domain.getReaders().length() > 0) {
- writeBatch.put(domainEntryKey, domain.getReaders().getBytes(Charset.forName("UTF-8")));
+ writeBatch.put(domainEntryKey, domain.getReaders().getBytes(StandardCharsets.UTF_8));
writeBatch.put(ownerLookupEntryKey, domain.getReaders().
- getBytes(Charset.forName("UTF-8")));
+ getBytes(StandardCharsets.UTF_8));
} else {
writeBatch.put(domainEntryKey, EMPTY_BYTES);
writeBatch.put(ownerLookupEntryKey, EMPTY_BYTES);
@@ -1663,9 +1663,9 @@ public void put(TimelineDomain domain) throws IOException {
ownerLookupEntryKey = createOwnerLookupKey(
domain.getOwner(), domain.getId(), WRITER_COLUMN);
if (domain.getWriters() != null && domain.getWriters().length() > 0) {
- writeBatch.put(domainEntryKey, domain.getWriters().getBytes(Charset.forName("UTF-8")));
+ writeBatch.put(domainEntryKey, domain.getWriters().getBytes(StandardCharsets.UTF_8));
writeBatch.put(ownerLookupEntryKey, domain.getWriters().
- getBytes(Charset.forName("UTF-8")));
+ getBytes(StandardCharsets.UTF_8));
} else {
writeBatch.put(domainEntryKey, EMPTY_BYTES);
writeBatch.put(ownerLookupEntryKey, EMPTY_BYTES);
@@ -1802,13 +1802,13 @@ private static TimelineDomain getTimelineDomain(
byte[] value = iterator.peekNext().getValue();
if (value != null && value.length > 0) {
if (key[prefix.length] == DESCRIPTION_COLUMN[0]) {
- domain.setDescription(new String(value, Charset.forName("UTF-8")));
+ domain.setDescription(new String(value, StandardCharsets.UTF_8));
} else if (key[prefix.length] == OWNER_COLUMN[0]) {
- domain.setOwner(new String(value, Charset.forName("UTF-8")));
+ domain.setOwner(new String(value, StandardCharsets.UTF_8));
} else if (key[prefix.length] == READER_COLUMN[0]) {
- domain.setReaders(new String(value, Charset.forName("UTF-8")));
+ domain.setReaders(new String(value, StandardCharsets.UTF_8));
} else if (key[prefix.length] == WRITER_COLUMN[0]) {
- domain.setWriters(new String(value, Charset.forName("UTF-8")));
+ domain.setWriters(new String(value, StandardCharsets.UTF_8));
} else if (key[prefix.length] == TIMESTAMP_COLUMN[0]) {
domain.setCreatedTime(readReverseOrderedLong(value, 0));
domain.setModifiedTime(readReverseOrderedLong(value, 8));
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/lib/ZKClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/lib/ZKClient.java
index 21cbe20ab4..c0e877b4c7 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/lib/ZKClient.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/lib/ZKClient.java
@@ -19,7 +19,7 @@
package org.apache.hadoop.yarn.lib;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import org.apache.zookeeper.CreateMode;
@@ -57,7 +57,7 @@ public ZKClient(String string) throws IOException {
public void registerService(String path, String data) throws
IOException, InterruptedException {
try {
- zkClient.create(path, data.getBytes(Charset.forName("UTF-8")),
+ zkClient.create(path, data.getBytes(StandardCharsets.UTF_8),
ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL);
} catch(KeeperException ke) {
throw new IOException(ke);
@@ -114,7 +114,7 @@ public String getServiceData(String path) throws IOException,
try {
Stat stat = new Stat();
byte[] byteData = zkClient.getData(path, false, stat);
- data = new String(byteData, Charset.forName("UTF-8"));
+ data = new String(byteData, StandardCharsets.UTF_8);
} catch(KeeperException ke) {
throw new IOException(ke);
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/LogWebServiceUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/LogWebServiceUtils.java
index c4568be801..f396c29a33 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/LogWebServiceUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/webapp/LogWebServiceUtils.java
@@ -46,7 +46,7 @@
import java.io.IOException;
import java.io.OutputStream;
import java.lang.reflect.UndeclaredThrowableException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.HashSet;
import java.util.Set;
@@ -120,7 +120,7 @@ private static StreamingOutput getStreamingOutput(
.readAggregatedLogs(request, os);
if (!findLogs) {
os.write(("Can not find logs for container:" + containerIdStr)
- .getBytes(Charset.forName("UTF-8")));
+ .getBytes(StandardCharsets.UTF_8));
} else {
if (printEmptyLocalContainerLog) {
StringBuilder sb = new StringBuilder();
@@ -129,7 +129,7 @@ private static StreamingOutput getStreamingOutput(
+ "\n");
sb.append("LogContents:\n");
sb.append(getNoRedirectWarning() + "\n");
- os.write(sb.toString().getBytes(Charset.forName("UTF-8")));
+ os.write(sb.toString().getBytes(StandardCharsets.UTF_8));
}
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
index a28a6fc411..19335045c8 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
@@ -50,6 +50,7 @@
import java.io.IOException;
import java.io.InterruptedIOException;
import java.net.InetSocketAddress;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -1064,7 +1065,7 @@ public synchronized void updateYarnSysFS(Context ctx, String user,
if (file.createNewFile()) {
FileOutputStream output = new FileOutputStream(file);
try {
- output.write(spec.getBytes("UTF-8"));
+ output.write(spec.getBytes(StandardCharsets.UTF_8));
} finally {
output.close();
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java
index 9d57f8fff4..78ba39ef69 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java
@@ -29,7 +29,7 @@
import java.io.PrintStream;
import java.net.InetSocketAddress;
import java.net.URISyntaxException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -502,7 +502,7 @@ private Thread startStreamReader(final InputStream stream)
@Override
public void run() {
try (BufferedReader lines = new BufferedReader(
- new InputStreamReader(stream, Charset.forName("UTF-8")))) {
+ new InputStreamReader(stream, StandardCharsets.UTF_8))) {
char[] buf = new char[512];
int nRead;
while ((nRead = lines.read(buf, 0, buf.length)) > 0) {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsResourceCalculator.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsResourceCalculator.java
index 2267cf50ed..f5e987deee 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsResourceCalculator.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/CGroupsResourceCalculator.java
@@ -35,7 +35,7 @@
import java.io.IOException;
import java.io.InputStreamReader;
import java.math.BigInteger;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -316,7 +316,7 @@ private void processFile(File file, Function processLine)
throws YarnException {
// Read "procfsDir//stat" file - typically /proc//stat
try (InputStreamReader fReader = new InputStreamReader(
- new FileInputStream(file), Charset.forName("UTF-8"))) {
+ new FileInputStream(file), StandardCharsets.UTF_8)) {
try (BufferedReader in = new BufferedReader(fReader)) {
try {
String str;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
index 2769788569..d0acc9a0bb 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
@@ -28,7 +28,7 @@
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
@@ -214,7 +214,7 @@ private void printLocalLogFile(Block html, File logFile) {
IOUtils.skipFully(logByteStream, start);
InputStreamReader reader =
- new InputStreamReader(logByteStream, Charset.forName("UTF-8"));
+ new InputStreamReader(logByteStream, StandardCharsets.UTF_8);
int bufferSize = 65536;
char[] cbuf = new char[bufferSize];
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerShellWebSocket.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerShellWebSocket.java
index 138f9e0b02..175ee09f8d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerShellWebSocket.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerShellWebSocket.java
@@ -20,7 +20,7 @@
import java.io.IOException;
import java.net.URI;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
@@ -77,7 +77,7 @@ public void onText(Session session, String message) throws IOException {
if (!message.equals("1{}")) {
// Send keystroke to process input
byte[] payload;
- payload = message.getBytes(Charset.forName("UTF-8"));
+ payload = message.getBytes(StandardCharsets.UTF_8);
if (payload != null) {
pair.out.write(payload);
pair.out.flush();
@@ -86,7 +86,7 @@ public void onText(Session session, String message) throws IOException {
// Render process output
int no = pair.in.available();
pair.in.read(buffer, 0, Math.min(no, buffer.length));
- String formatted = new String(buffer, Charset.forName("UTF-8"))
+ String formatted = new String(buffer, StandardCharsets.UTF_8)
.replaceAll("\n", "\r\n");
session.getRemote().sendString(formatted);
}
@@ -142,7 +142,7 @@ public void onClose(Session session, int status, String reason) {
try {
LOG.info(session.getRemoteAddress().getHostString() + " closed!");
String exit = "exit\r\n";
- pair.out.write(exit.getBytes(Charset.forName("UTF-8")));
+ pair.out.write(exit.getBytes(StandardCharsets.UTF_8));
pair.out.flush();
pair.in.close();
pair.out.close();
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java
index 3cf9f65909..5ec584f36d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java
@@ -21,7 +21,7 @@
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.security.Principal;
import java.util.ArrayList;
import java.util.Collection;
@@ -490,7 +490,7 @@ public void write(OutputStream os) throws IOException,
}
sb.append(StringUtils.repeat("*", endOfFile.length() + 50)
+ "\n\n");
- os.write(sb.toString().getBytes(Charset.forName("UTF-8")));
+ os.write(sb.toString().getBytes(StandardCharsets.UTF_8));
// If we have aggregated logs for this container,
// output the aggregation logs as well.
ApplicationId appId = containerId.getApplicationAttemptId()
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java
index 0d67c13b64..fd82126b7e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestAuxServices.java
@@ -47,12 +47,12 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.attribute.FileTime;
@@ -426,7 +426,7 @@ public void testCustomizedAuxServiceClassPath() throws Exception {
Assert.assertTrue(meta.size() == 1);
for(Entry i : meta.entrySet()) {
auxName = i.getKey();
- String auxClassPath = Charsets.UTF_8.decode(i.getValue()).toString();
+ String auxClassPath = StandardCharsets.UTF_8.decode(i.getValue()).toString();
defaultAuxClassPath = new HashSet(Arrays.asList(StringUtils
.getTrimmedStrings(auxClassPath)));
}
@@ -478,7 +478,7 @@ public void testCustomizedAuxServiceClassPath() throws Exception {
Set customizedAuxClassPath = null;
for(Entry i : meta.entrySet()) {
Assert.assertTrue(auxName.equals(i.getKey()));
- String classPath = Charsets.UTF_8.decode(i.getValue()).toString();
+ String classPath = StandardCharsets.UTF_8.decode(i.getValue()).toString();
customizedAuxClassPath = new HashSet(Arrays.asList(StringUtils
.getTrimmedStrings(classPath)));
Assert.assertTrue(classPath.contains(testJar.getName()));
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
index 6a9dd6b747..7c4815ff94 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java
@@ -35,7 +35,6 @@
import java.io.PrintStream;
import java.io.PrintWriter;
import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
@@ -218,7 +217,7 @@ public void testSpecialCharSymlinks() throws IOException {
//Capture output from prelaunch.out
List output = Files.readAllLines(Paths.get(localLogDir.getAbsolutePath(), ContainerLaunch.CONTAINER_PRE_LAUNCH_STDOUT),
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
assert(output.contains("hello"));
symLinkFile = new File(tmpDir, badSymlink);
@@ -549,7 +548,7 @@ public void testInvalidEnvSyntaxDiagnostics() throws IOException {
} catch(ExitCodeException e){
//Capture diagnostics from prelaunch.stderr
List error = Files.readAllLines(Paths.get(localLogDir.getAbsolutePath(), ContainerLaunch.CONTAINER_PRE_LAUNCH_STDERR),
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
diagnostics = StringUtils.join("\n", error);
}
Assert.assertTrue(diagnostics.contains(Shell.WINDOWS ?
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestTrafficController.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestTrafficController.java
index 63c654463f..33b8434c9a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestTrafficController.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/resources/TestTrafficController.java
@@ -36,7 +36,7 @@
import java.io.File;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.List;
@@ -120,7 +120,7 @@ private void verifyTrafficControlOperation(PrivilegedOperation op,
Assert.assertTrue(tcCmdsFile.exists());
List tcCmds = Files.readAllLines(tcCmdsFile.toPath(),
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
//Verify that the number of commands is the same as expected and verify
//that each command is the same, in sequence
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestDockerContainerRuntime.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestDockerContainerRuntime.java
index ea7c213809..c5f508778f 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestDockerContainerRuntime.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/TestDockerContainerRuntime.java
@@ -76,7 +76,7 @@
import java.io.FileWriter;
import java.io.IOException;
import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.attribute.FileAttribute;
@@ -1992,7 +1992,7 @@ private List getDockerCommandsForDockerStop(
PrivilegedOperation.OperationType.RUN_DOCKER_CMD);
String dockerCommandFile = op.getArguments().get(0);
return Files.readAllLines(Paths.get(dockerCommandFile),
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
}
private List getDockerCommandsForSignal(
@@ -2471,7 +2471,7 @@ public void testLaunchContainer(ByteBuffer tokens, File dockerConfigFile)
String dockerCommandFile = args.get(argsCounter++);
List dockerCommands = Files
- .readAllLines(Paths.get(dockerCommandFile), Charset.forName("UTF-8"));
+ .readAllLines(Paths.get(dockerCommandFile), StandardCharsets.UTF_8);
int expected = 14;
int counter = 0;
@@ -2617,7 +2617,7 @@ private List readDockerCommands(int invocations) throws IOException,
String dockerCommandFile = args.get((https) ? 14 : 12);
List dockerCommands = Files.readAllLines(
- Paths.get(dockerCommandFile), Charset.forName("UTF-8"));
+ Paths.get(dockerCommandFile), StandardCharsets.UTF_8);
return dockerCommands;
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/TestDockerCommandExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/TestDockerCommandExecutor.java
index dcba179a28..e5737d9246 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/TestDockerCommandExecutor.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/docker/TestDockerCommandExecutor.java
@@ -37,7 +37,7 @@
import org.junit.Test;
import java.io.IOException;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
@@ -408,7 +408,7 @@ private List getValidatedDockerCommands(
String dockerCommandFile = op.getArguments().get(0);
List dockerCommandFileContents = Files
.readAllLines(Paths.get(dockerCommandFile),
- Charset.forName("UTF-8"));
+ StandardCharsets.UTF_8);
dockerCommands.addAll(dockerCommandFileContents);
}
return dockerCommands;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/gpu/TestGpuDeviceInformationParser.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/gpu/TestGpuDeviceInformationParser.java
index 28e06b72bc..49c4e2cbb1 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/gpu/TestGpuDeviceInformationParser.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/gpu/TestGpuDeviceInformationParser.java
@@ -26,12 +26,12 @@
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class TestGpuDeviceInformationParser {
- private static final String UTF_8 = "UTF-8";
private static final double DELTA = 1e-6;
@Rule
@@ -40,7 +40,7 @@ public class TestGpuDeviceInformationParser {
@Test
public void testParse() throws IOException, YarnException {
File f = new File("src/test/resources/nvidia-smi-sample-output.xml");
- String s = FileUtils.readFileToString(f, UTF_8);
+ String s = FileUtils.readFileToString(f, StandardCharsets.UTF_8);
GpuDeviceInformationParser parser = new GpuDeviceInformationParser();
GpuDeviceInformation info = parser.parseXml(s);
@@ -54,7 +54,7 @@ public void testParse() throws IOException, YarnException {
@Test
public void testParseExcerpt() throws IOException, YarnException {
File f = new File("src/test/resources/nvidia-smi-output-excerpt.xml");
- String s = FileUtils.readFileToString(f, UTF_8);
+ String s = FileUtils.readFileToString(f, StandardCharsets.UTF_8);
GpuDeviceInformationParser parser = new GpuDeviceInformationParser();
GpuDeviceInformation info = parser.parseXml(s);
@@ -69,7 +69,7 @@ public void testParseExcerpt() throws IOException, YarnException {
public void testParseConsecutivelyWithSameParser()
throws IOException, YarnException {
File f = new File("src/test/resources/nvidia-smi-sample-output.xml");
- String s = FileUtils.readFileToString(f, UTF_8);
+ String s = FileUtils.readFileToString(f, StandardCharsets.UTF_8);
for (int i = 0; i < 3; i++) {
GpuDeviceInformationParser parser = new GpuDeviceInformationParser();
@@ -99,7 +99,7 @@ public void testParseInvalidRootElement() throws YarnException {
@Test
public void testParseMissingTags() throws IOException, YarnException {
File f = new File("src/test/resources/nvidia-smi-output-missing-tags.xml");
- String s = FileUtils.readFileToString(f, UTF_8);
+ String s = FileUtils.readFileToString(f, StandardCharsets.UTF_8);
GpuDeviceInformationParser parser = new GpuDeviceInformationParser();
GpuDeviceInformation info = parser.parseXml(s);
@@ -119,7 +119,7 @@ public void testParseMissingTags() throws IOException, YarnException {
@Test
public void testParseMissingInnerTags() throws IOException, YarnException {
File f =new File("src/test/resources/nvidia-smi-output-missing-tags2.xml");
- String s = FileUtils.readFileToString(f, UTF_8);
+ String s = FileUtils.readFileToString(f, StandardCharsets.UTF_8);
GpuDeviceInformationParser parser = new GpuDeviceInformationParser();
GpuDeviceInformation info = parser.parseXml(s);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAuditLogger.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAuditLogger.java
index 1b3a49433f..4d81940198 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAuditLogger.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAuditLogger.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.yarn.server.resourcemanager;
-import java.io.UnsupportedEncodingException;
import java.net.InetAddress;
+import java.nio.charset.StandardCharsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -160,12 +160,8 @@ private static void appendCallerContext(StringBuilder sb, CallerContext callerCo
}
if (signature != null) {
- try {
- String sigStr = new String(signature, "UTF-8");
- add(Keys.CALLERSIGNATURE, sigStr, sb);
- } catch (UnsupportedEncodingException e) {
- // ignore this signature
- }
+ String sigStr = new String(signature, StandardCharsets.UTF_8);
+ add(Keys.CALLERSIGNATURE, sigStr, sb);
}
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
index 61442e4c29..a3d529c102 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
@@ -149,7 +149,7 @@
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URL;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.security.SecureRandom;
import java.util.ArrayList;
@@ -422,7 +422,7 @@ public ZKCuratorManager createAndStartZKManager(Configuration
String defaultFencingAuth =
zkRootNodeUsername + ":" + zkRootNodePassword;
byte[] defaultFencingAuthData =
- defaultFencingAuth.getBytes(Charset.forName("UTF-8"));
+ defaultFencingAuth.getBytes(StandardCharsets.UTF_8);
String scheme = new DigestAuthenticationProvider().getScheme();
AuthInfo authInfo = new AuthInfo(scheme, defaultFencingAuthData);
authInfos.add(authInfo);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/invariants/MetricsInvariantChecker.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/invariants/MetricsInvariantChecker.java
index 2ea44d2aa6..756c0b7d27 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/invariants/MetricsInvariantChecker.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/invariants/MetricsInvariantChecker.java
@@ -17,7 +17,6 @@
*/
package org.apache.hadoop.yarn.server.resourcemanager.monitor.invariants;
-import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metrics2.AbstractMetric;
@@ -39,6 +38,7 @@
import javax.script.SimpleBindings;
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -113,7 +113,7 @@ public void init(Configuration config, RMContext rmContext,
StringBuilder sb = new StringBuilder();
try {
List tempInv =
- Files.readLines(new File(invariantFile), Charsets.UTF_8);
+ Files.readLines(new File(invariantFile), StandardCharsets.UTF_8);
boolean first = true;
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java
index be5494a43d..650e83983b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesAppsModification.java
@@ -30,6 +30,7 @@
import java.io.StringReader;
import java.io.StringWriter;
import java.net.URI;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -925,7 +926,7 @@ public void testAppSubmit(String acceptMedia, String contentMedia)
Text key = new Text("secret1");
assertTrue("Secrets missing from credentials object", cs
.getAllSecretKeys().contains(key));
- assertEquals("mysecret", new String(cs.getSecretKey(key), "UTF-8"));
+ assertEquals("mysecret", new String(cs.getSecretKey(key), StandardCharsets.UTF_8));
// Check LogAggregationContext
ApplicationSubmissionContext asc = app.getApplicationSubmissionContext();
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLogInfo.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLogInfo.java
index 22495eaf08..6a4527c13c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLogInfo.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/test/java/org/apache/hadoop/yarn/server/timeline/TestLogInfo.java
@@ -40,7 +40,7 @@
import java.io.IOException;
import java.io.OutputStream;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
@@ -199,7 +199,7 @@ private void writeBrokenFile(Path logPath) throws IOException {
try {
String broken = "{ broken { [[]} broken";
out = PluginStoreTestUtils.createLogFile(logPath, fs);
- out.write(broken.getBytes(Charset.forName("UTF-8")));
+ out.write(broken.getBytes(StandardCharsets.UTF_8));
out.close();
out = null;
} finally {
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/DocumentStoreTestUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/DocumentStoreTestUtils.java
index 5d442152fe..cdf2e181bd 100755
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/DocumentStoreTestUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-documentstore/src/test/java/org/apache/hadoop/yarn/server/timelineservice/documentstore/DocumentStoreTestUtils.java
@@ -25,6 +25,7 @@
import org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.flowrun.FlowRunDocument;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import java.util.List;
import com.fasterxml.jackson.core.type.TypeReference;
@@ -41,7 +42,7 @@ public static List bakeTimelineEntities()
throws IOException {
String jsonStr = IOUtils.toString(
DocumentStoreTestUtils.class.getClassLoader().getResourceAsStream(
- "documents/timeline-entities.json"), "UTF-8");
+ "documents/timeline-entities.json"), StandardCharsets.UTF_8);
return JsonUtils.fromJson(jsonStr,
new TypeReference>(){});
}
@@ -50,7 +51,7 @@ public static List bakeYarnAppTimelineEntities()
throws IOException {
String jsonStr = IOUtils.toString(
DocumentStoreTestUtils.class.getClassLoader().getResourceAsStream(
- "documents/test-timeline-entities-doc.json"), "UTF-8");
+ "documents/test-timeline-entities-doc.json"), StandardCharsets.UTF_8);
return JsonUtils.fromJson(jsonStr,
new TypeReference>() {});
}
@@ -59,7 +60,7 @@ public static TimelineEntityDocument bakeTimelineEntityDoc()
throws IOException {
String jsonStr = IOUtils.toString(
DocumentStoreTestUtils.class.getClassLoader().getResourceAsStream(
- "documents/timeline-app-doc.json"), "UTF-8");
+ "documents/timeline-app-doc.json"), StandardCharsets.UTF_8);
return JsonUtils.fromJson(jsonStr,
new TypeReference() {});
}
@@ -67,7 +68,7 @@ public static TimelineEntityDocument bakeTimelineEntityDoc()
public static FlowActivityDocument bakeFlowActivityDoc() throws IOException {
String jsonStr = IOUtils.toString(
DocumentStoreTestUtils.class.getClassLoader().getResourceAsStream(
- "documents/flowactivity-doc.json"), "UTF-8");
+ "documents/flowactivity-doc.json"), StandardCharsets.UTF_8);
return JsonUtils.fromJson(jsonStr,
new TypeReference() {});
}
@@ -75,7 +76,7 @@ public static FlowActivityDocument bakeFlowActivityDoc() throws IOException {
public static FlowRunDocument bakeFlowRunDoc() throws IOException {
String jsonStr = IOUtils.toString(
DocumentStoreTestUtils.class.getClassLoader().getResourceAsStream(
- "documents/flowrun-doc.json"), "UTF-8");
+ "documents/flowrun-doc.json"), StandardCharsets.UTF_8);
return JsonUtils.fromJson(jsonStr,
new TypeReference(){});
}
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java
index dff21a31da..2e771fc77e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineReaderImpl.java
@@ -23,7 +23,7 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
-import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.HashSet;
@@ -174,7 +174,7 @@ private String getFlowRunPath(String userId, String clusterId,
APP_FLOW_MAPPING_FILE);
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(
- fs.open(appFlowMappingFilePath), Charset.forName("UTF-8")));
+ fs.open(appFlowMappingFilePath), StandardCharsets.UTF_8));
CSVParser parser = new CSVParser(reader, csvFormat)) {
for (CSVRecord record : parser.getRecords()) {
if (record.size() < 4) {
@@ -300,7 +300,7 @@ public int compare(Long l1, Long l2) {
}
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(fs.open(entityFile),
- Charset.forName("UTF-8")))) {
+ StandardCharsets.UTF_8))) {
TimelineEntity entity = readEntityFromFile(reader);
if (!entity.getType().equals(entityType)) {
continue;
@@ -402,7 +402,7 @@ public TimelineEntity getEntity(TimelineReaderContext context,
}
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(
- fs.open(entityFilePath), Charset.forName("UTF-8")))) {
+ fs.open(entityFilePath), StandardCharsets.UTF_8))) {
TimelineEntity entity = readEntityFromFile(reader);
return createEntityToBeReturned(
entity, dataToRetrieve.getFieldsToRetrieve());
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineWriterImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineWriterImpl.java
index 4b96f4ddd0..2f7007165a 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineWriterImpl.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/FileSystemTimelineWriterImpl.java
@@ -20,6 +20,7 @@
import java.io.File;
import java.io.IOException;
+import java.nio.charset.StandardCharsets;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -139,7 +140,7 @@ private synchronized void writeInternal(String clusterId, String userId,
byte[] record = new StringBuilder()
.append(TimelineUtils.dumpTimelineRecordtoJSON(entity))
- .append("\n").toString().getBytes("UTF-8");
+ .append("\n").toString().getBytes(StandardCharsets.UTF_8);
writeFileWithRetries(filePath, record);
} catch (Exception ioe) {
LOG.warn("Interrupted operation:{}", ioe.getMessage());
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
index 56adabe8f3..7817362885 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
@@ -30,6 +30,7 @@
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.Enumeration;
@@ -287,7 +288,7 @@ private void proxyLink(final HttpServletRequest req,
StringBuilder sb = new StringBuilder();
BufferedReader reader =
new BufferedReader(
- new InputStreamReader(req.getInputStream(), "UTF-8"));
+ new InputStreamReader(req.getInputStream(), StandardCharsets.UTF_8));
String line;
while ((line = reader.readLine()) != null) {
sb.append(line);
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java
index 1d0ca00e7e..49b6a7954b 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java
@@ -30,6 +30,7 @@
import java.net.SocketTimeoutException;
import java.net.URI;
import java.net.URL;
+import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
@@ -522,7 +523,7 @@ private String readInputStream(InputStream input) throws Exception {
while ((read = input.read(buffer)) >= 0) {
data.write(buffer, 0, read);
}
- return new String(data.toByteArray(), "UTF-8");
+ return new String(data.toByteArray(), StandardCharsets.UTF_8);
}
private boolean isResponseCookiePresent(HttpURLConnection proxyConn,
diff --git a/pom.xml b/pom.xml
index 0c175d8da0..13e3aec63e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -225,6 +225,14 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x
org.apache.hadoop.thirdparty.com.google.common.io.BaseEncoding.**
+
+ true
+ Use java.nio.charset.StandardCharsets rather than Guava provided Charsets
+
+ org.apache.hadoop.thirdparty.com.google.common.base.Charsets
+ org.apache.hadoop.thirdparty.com.google.common.base.Charsets.**
+
+
true
Use alternative to Guava provided Optional