HADOOP-18957. Use StandardCharsets.UTF_8 (#6231). Contributed by PJ Fanning.

Signed-off-by: Ayush Saxena <ayushsaxena@apache.org>
This commit is contained in:
PJ Fanning 2023-11-20 19:14:48 +01:00 committed by GitHub
parent 616e381c9f
commit f609460bda
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
224 changed files with 671 additions and 669 deletions

View File

@ -19,7 +19,7 @@
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
/**
* Example that uses <code>AuthenticatedURL</code>.
@ -42,7 +42,7 @@ public static void main(String[] args) {
if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
BufferedReader reader = new BufferedReader(
new InputStreamReader(
conn.getInputStream(), Charset.forName("UTF-8")));
conn.getInputStream(), StandardCharsets.UTF_8));
String line = reader.readLine();
while (line != null) {
System.out.println(line);

View File

@ -23,7 +23,7 @@
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Properties;
@ -53,8 +53,6 @@ public class PseudoAuthenticationHandler implements AuthenticationHandler {
*/
public static final String ANONYMOUS_ALLOWED = TYPE + ".anonymous.allowed";
private static final Charset UTF8_CHARSET = Charset.forName("UTF-8");
private static final String PSEUDO_AUTH = "PseudoAuth";
private boolean acceptAnonymous;
@ -146,7 +144,7 @@ private String getUserName(HttpServletRequest request) {
if(queryString == null || queryString.length() == 0) {
return null;
}
List<NameValuePair> list = URLEncodedUtils.parse(queryString, UTF8_CHARSET);
List<NameValuePair> list = URLEncodedUtils.parse(queryString, StandardCharsets.UTF_8);
if (list != null) {
for (NameValuePair nv : list) {
if (PseudoAuthenticator.USER_NAME.equals(nv.getName())) {

View File

@ -13,7 +13,7 @@
*/
package org.apache.hadoop.security.authentication.util;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Properties;
import javax.servlet.ServletContext;
@ -38,7 +38,7 @@ public void init(Properties config, ServletContext servletContext,
long tokenValidity) throws Exception {
String signatureSecret = config.getProperty(
AuthenticationFilter.SIGNATURE_SECRET, null);
secret = signatureSecret.getBytes(Charset.forName("UTF-8"));
secret = signatureSecret.getBytes(StandardCharsets.UTF_8);
secrets = new byte[][]{secret};
}

View File

@ -13,7 +13,7 @@
*/
package org.apache.hadoop.security.authentication.util;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Properties;
import java.util.Random;
import javax.servlet.ServletContext;
@ -140,11 +140,11 @@ public void testUpgradeChangeSecretLength() throws Exception {
long seed = System.currentTimeMillis();
Random rand = new Random(seed);
byte[] secret2 = Long.toString(rand.nextLong())
.getBytes(Charset.forName("UTF-8"));
.getBytes(StandardCharsets.UTF_8);
byte[] secret1 = Long.toString(rand.nextLong())
.getBytes(Charset.forName("UTF-8"));
.getBytes(StandardCharsets.UTF_8);
byte[] secret3 = Long.toString(rand.nextLong())
.getBytes(Charset.forName("UTF-8"));
.getBytes(StandardCharsets.UTF_8);
rand = new Random(seed);
// Secrets 4 and 5 get thrown away by ZK when the new secret provider tries
// to init
@ -238,7 +238,7 @@ private class OldMockZKSignerSecretProvider
@Override
protected byte[] generateRandomSecret() {
return Long.toString(rand.nextLong()).getBytes(Charset.forName("UTF-8"));
return Long.toString(rand.nextLong()).getBytes(StandardCharsets.UTF_8);
}
}

View File

@ -43,6 +43,7 @@
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
@ -82,7 +83,6 @@
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.commons.collections.map.UnmodifiableMap;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -2903,7 +2903,7 @@ public Reader getConfResourceAsReader(String name) {
LOG.info("found resource " + name + " at " + url);
}
return new InputStreamReader(url.openStream(), Charsets.UTF_8);
return new InputStreamReader(url.openStream(), StandardCharsets.UTF_8);
} catch (Exception e) {
return null;
}

View File

@ -899,7 +899,7 @@ private static void runCommandOnStream(
try (BufferedReader reader =
new BufferedReader(
new InputStreamReader(process.getInputStream(),
Charset.forName("UTF-8")))) {
StandardCharsets.UTF_8))) {
String line;
while((line = reader.readLine()) != null) {
LOG.debug(line);
@ -922,7 +922,7 @@ private static void runCommandOnStream(
try (BufferedReader reader =
new BufferedReader(
new InputStreamReader(process.getErrorStream(),
Charset.forName("UTF-8")))) {
StandardCharsets.UTF_8))) {
String line;
while((line = reader.readLine()) != null) {
LOG.debug(line);

View File

@ -20,6 +20,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
@ -30,7 +31,6 @@
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -104,7 +104,7 @@ public CompletableFuture<UploadHandle> startUpload(Path filePath)
fs.mkdirs(collectorPath, FsPermission.getDirDefault());
ByteBuffer byteBuffer = ByteBuffer.wrap(
collectorPath.toString().getBytes(Charsets.UTF_8));
collectorPath.toString().getBytes(StandardCharsets.UTF_8));
return BBUploadHandle.from(byteBuffer);
});
}
@ -130,7 +130,7 @@ private PartHandle innerPutPart(Path filePath,
byte[] uploadIdByteArray = uploadId.toByteArray();
checkUploadId(uploadIdByteArray);
Path collectorPath = new Path(new String(uploadIdByteArray, 0,
uploadIdByteArray.length, Charsets.UTF_8));
uploadIdByteArray.length, StandardCharsets.UTF_8));
Path partPath =
mergePaths(collectorPath, mergePaths(new Path(Path.SEPARATOR),
new Path(partNumber + ".part")));
@ -149,7 +149,7 @@ private PartHandle innerPutPart(Path filePath,
cleanupWithLogger(LOG, inputStream);
}
return BBPartHandle.from(ByteBuffer.wrap(
partPath.toString().getBytes(Charsets.UTF_8)));
partPath.toString().getBytes(StandardCharsets.UTF_8)));
}
private Path createCollectorPath(Path filePath) {
@ -210,7 +210,7 @@ private PathHandle innerComplete(
.map(pair -> {
byte[] byteArray = pair.getValue().toByteArray();
return new Path(new String(byteArray, 0, byteArray.length,
Charsets.UTF_8));
StandardCharsets.UTF_8));
})
.collect(Collectors.toList());
@ -223,7 +223,7 @@ private PathHandle innerComplete(
"Duplicate PartHandles");
byte[] uploadIdByteArray = multipartUploadId.toByteArray();
Path collectorPath = new Path(new String(uploadIdByteArray, 0,
uploadIdByteArray.length, Charsets.UTF_8));
uploadIdByteArray.length, StandardCharsets.UTF_8));
boolean emptyFile = totalPartsLen(partHandles) == 0;
if (emptyFile) {
@ -250,7 +250,7 @@ public CompletableFuture<Void> abort(UploadHandle uploadId,
byte[] uploadIdByteArray = uploadId.toByteArray();
checkUploadId(uploadIdByteArray);
Path collectorPath = new Path(new String(uploadIdByteArray, 0,
uploadIdByteArray.length, Charsets.UTF_8));
uploadIdByteArray.length, StandardCharsets.UTF_8));
return FutureIO.eval(() -> {
// force a check for a file existing; raises FNFE if not found

View File

@ -22,6 +22,7 @@
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Iterator;
import java.util.LinkedList;
@ -114,7 +115,7 @@ protected void processArguments(LinkedList<PathData> items)
private void writeDelimiter(FSDataOutputStream out) throws IOException {
if (delimiter != null) {
out.write(delimiter.getBytes("UTF-8"));
out.write(delimiter.getBytes(StandardCharsets.UTF_8));
}
}

View File

@ -120,7 +120,7 @@ public static String quoteHtmlChars(String item) {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
try {
quoteHtmlChars(buffer, bytes, 0, bytes.length);
return buffer.toString("UTF-8");
return new String(buffer.toByteArray(), StandardCharsets.UTF_8);
} catch (IOException ioe) {
// Won't happen, since it is a bytearrayoutputstream
return null;

View File

@ -20,7 +20,6 @@
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.charset.UnsupportedCharsetException;
import java.util.ArrayList;
import org.apache.commons.codec.binary.Base64;
@ -75,14 +74,10 @@ public DefaultStringifier(Configuration conf, Class<T> c) {
@Override
public T fromString(String str) throws IOException {
try {
byte[] bytes = Base64.decodeBase64(str.getBytes("UTF-8"));
byte[] bytes = Base64.decodeBase64(str.getBytes(StandardCharsets.UTF_8));
inBuf.reset(bytes, bytes.length);
T restored = deserializer.deserialize(null);
return restored;
} catch (UnsupportedCharsetException ex) {
throw new IOException(ex.toString());
}
}
@Override

View File

@ -25,6 +25,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
import java.nio.charset.StandardCharsets;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
@ -86,12 +87,12 @@ public static int writeCompressedByteArray(DataOutput out,
public static String readCompressedString(DataInput in) throws IOException {
byte[] bytes = readCompressedByteArray(in);
if (bytes == null) return null;
return new String(bytes, "UTF-8");
return new String(bytes, StandardCharsets.UTF_8);
}
public static int writeCompressedString(DataOutput out, String s) throws IOException {
return writeCompressedByteArray(out, (s != null) ? s.getBytes("UTF-8") : null);
return writeCompressedByteArray(out, (s != null) ? s.getBytes(StandardCharsets.UTF_8) : null);
}
/*
@ -103,7 +104,7 @@ public static int writeCompressedString(DataOutput out, String s) throws IOExce
*/
public static void writeString(DataOutput out, String s) throws IOException {
if (s != null) {
byte[] buffer = s.getBytes("UTF-8");
byte[] buffer = s.getBytes(StandardCharsets.UTF_8);
int len = buffer.length;
out.writeInt(len);
out.write(buffer, 0, len);

View File

@ -23,6 +23,7 @@
import java.io.PrintWriter;
import java.net.URL;
import java.net.URLConnection;
import java.nio.charset.StandardCharsets;
import java.util.regex.Pattern;
import javax.net.ssl.HttpsURLConnection;
@ -33,7 +34,6 @@
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.HadoopIllegalArgumentException;
@ -297,7 +297,7 @@ private void process(String urlString) throws Exception {
// read from the servlet
BufferedReader in = new BufferedReader(
new InputStreamReader(connection.getInputStream(), Charsets.UTF_8));
new InputStreamReader(connection.getInputStream(), StandardCharsets.UTF_8));
for (String line;;) {
line = in.readLine();
if (line == null) {

View File

@ -23,6 +23,8 @@
import java.net.URL;
import java.net.URLClassLoader;
import static java.security.AccessController.*;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedAction;
import java.util.Iterator;
import java.util.Map;
@ -289,7 +291,7 @@ static String toString(Configuration c) {
PropertiesConfiguration tmp = new PropertiesConfiguration();
tmp.copy(c);
tmp.write(pw);
return buffer.toString("UTF-8");
return new String(buffer.toByteArray(), StandardCharsets.UTF_8);
} catch (Exception e) {
throw new MetricsConfigException(e);
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.security;
import java.nio.charset.StandardCharsets;
import java.security.Provider;
import java.util.Map;
@ -82,7 +83,7 @@ public byte[] evaluateResponse(byte[] response) throws SaslException {
try {
String payload;
try {
payload = new String(response, "UTF-8");
payload = new String(response, StandardCharsets.UTF_8);
} catch (Exception e) {
throw new IllegalArgumentException("Received corrupt response", e);
}

View File

@ -24,7 +24,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.ProviderUtils;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -33,6 +32,7 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.security.GeneralSecurityException;
import java.security.KeyStore;
import java.security.KeyStoreException;
@ -199,7 +199,7 @@ public CredentialEntry getCredentialEntry(String alias)
public static char[] bytesToChars(byte[] bytes) throws IOException {
String pass;
pass = new String(bytes, Charsets.UTF_8);
pass = new String(bytes, StandardCharsets.UTF_8);
return pass.toCharArray();
}
@ -268,7 +268,7 @@ CredentialEntry innerSetCredential(String alias, char[] material)
writeLock.lock();
try {
keyStore.setKeyEntry(alias,
new SecretKeySpec(new String(material).getBytes("UTF-8"),
new SecretKeySpec(new String(material).getBytes(StandardCharsets.UTF_8),
getAlgorithm()), password, null);
} catch (KeyStoreException e) {
throw new IOException("Can't store credential " + alias + " in " + this,

View File

@ -70,7 +70,7 @@ public synchronized CredentialEntry createCredentialEntry(String name, char[] cr
" already exists in " + this);
}
credentials.addSecretKey(new Text(name),
new String(credential).getBytes("UTF-8"));
new String(credential).getBytes(StandardCharsets.UTF_8));
return new CredentialEntry(name, credential);
}

View File

@ -51,7 +51,7 @@
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.security.Principal;
import java.util.Enumeration;
import java.util.List;
@ -94,8 +94,6 @@ public class DelegationTokenAuthenticationFilter
public static final String DELEGATION_TOKEN_SECRET_MANAGER_ATTR =
"hadoop.http.delegation-token-secret-manager";
private static final Charset UTF8_CHARSET = Charset.forName("UTF-8");
private static final ThreadLocal<UserGroupInformation> UGI_TL =
new ThreadLocal<UserGroupInformation>();
public static final String PROXYUSER_PREFIX = "proxyuser";
@ -226,7 +224,7 @@ static String getDoAs(HttpServletRequest request) {
if (queryString == null) {
return null;
}
List<NameValuePair> list = URLEncodedUtils.parse(queryString, UTF8_CHARSET);
List<NameValuePair> list = URLEncodedUtils.parse(queryString, StandardCharsets.UTF_8);
if (list != null) {
for (NameValuePair nv : list) {
if (DelegationTokenAuthenticatedURL.DO_AS.

View File

@ -23,7 +23,7 @@
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.List;
/**
@ -31,7 +31,6 @@
*/
@InterfaceAudience.Private
class ServletUtils {
private static final Charset UTF8_CHARSET = Charset.forName("UTF-8");
/**
* Extract a query string parameter without triggering http parameters
@ -49,7 +48,7 @@ public static String getParameter(HttpServletRequest request, String name)
if (queryString == null) {
return null;
}
List<NameValuePair> list = URLEncodedUtils.parse(queryString, UTF8_CHARSET);
List<NameValuePair> list = URLEncodedUtils.parse(queryString, StandardCharsets.UTF_8);
if (list != null) {
for (NameValuePair nv : list) {
if (name.equals(nv.getName())) {

View File

@ -22,7 +22,7 @@
import java.io.InputStreamReader;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.HashMap;
@ -247,7 +247,7 @@ private void readProcMemInfoFile(boolean readAgain) {
try {
fReader = new InputStreamReader(
Files.newInputStream(Paths.get(procfsMemFile)),
Charset.forName("UTF-8"));
StandardCharsets.UTF_8);
in = new BufferedReader(fReader);
} catch (IOException f) {
// shouldn't happen....
@ -319,7 +319,7 @@ private void readProcCpuInfoFile() {
try {
fReader =
new InputStreamReader(Files.newInputStream(Paths.get(procfsCpuFile)),
Charset.forName("UTF-8"));
StandardCharsets.UTF_8);
in = new BufferedReader(fReader);
} catch (IOException f) {
// shouldn't happen....
@ -380,7 +380,7 @@ private void readProcStatFile() {
try {
fReader = new InputStreamReader(
Files.newInputStream(Paths.get(procfsStatFile)),
Charset.forName("UTF-8"));
StandardCharsets.UTF_8);
in = new BufferedReader(fReader);
} catch (IOException f) {
// shouldn't happen....
@ -435,7 +435,7 @@ private void readProcNetInfoFile() {
try {
fReader = new InputStreamReader(
Files.newInputStream(Paths.get(procfsNetFile)),
Charset.forName("UTF-8"));
StandardCharsets.UTF_8);
in = new BufferedReader(fReader);
} catch (IOException f) {
return;
@ -490,7 +490,7 @@ private void readProcDisksInfoFile() {
try {
in = new BufferedReader(new InputStreamReader(
Files.newInputStream(Paths.get(procfsDisksFile)),
Charset.forName("UTF-8")));
StandardCharsets.UTF_8));
} catch (IOException f) {
return;
}
@ -558,7 +558,7 @@ int readDiskBlockInformation(String diskName, int defSector) {
try {
in = new BufferedReader(new InputStreamReader(
Files.newInputStream(Paths.get(procfsDiskSectorFile)),
Charset.forName("UTF-8")));
StandardCharsets.UTF_8));
} catch (IOException f) {
return defSector;
}

View File

@ -19,6 +19,7 @@
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
@ -27,7 +28,6 @@
import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Id;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Splitter;
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
@ -148,7 +148,7 @@ public static List<ZKAuthInfo> parseAuth(String authString) throws
"Auth '" + comp + "' not of expected form scheme:auth");
}
ret.add(new ZKAuthInfo(parts[0],
parts[1].getBytes(Charsets.UTF_8)));
parts[1].getBytes(StandardCharsets.UTF_8)));
}
return ret;
}
@ -172,7 +172,7 @@ public static String resolveConfIndirection(String valInConf)
return valInConf;
}
String path = valInConf.substring(1).trim();
return Files.asCharSource(new File(path), Charsets.UTF_8).read().trim();
return Files.asCharSource(new File(path), StandardCharsets.UTF_8).read().trim();
}
/**

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.util.curator;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
@ -260,7 +260,7 @@ public byte[] getData(final String path, Stat stat) throws Exception {
public String getStringData(final String path) throws Exception {
byte[] bytes = getData(path);
if (bytes != null) {
return new String(bytes, Charset.forName("UTF-8"));
return new String(bytes, StandardCharsets.UTF_8);
}
return null;
}
@ -275,7 +275,7 @@ public String getStringData(final String path) throws Exception {
public String getStringData(final String path, Stat stat) throws Exception {
byte[] bytes = getData(path, stat);
if (bytes != null) {
return new String(bytes, Charset.forName("UTF-8"));
return new String(bytes, StandardCharsets.UTF_8);
}
return null;
}
@ -299,7 +299,7 @@ public void setData(String path, byte[] data, int version) throws Exception {
* @throws Exception If it cannot contact Zookeeper.
*/
public void setData(String path, String data, int version) throws Exception {
byte[] bytes = data.getBytes(Charset.forName("UTF-8"));
byte[] bytes = data.getBytes(StandardCharsets.UTF_8);
setData(path, bytes, version);
}

View File

@ -60,7 +60,7 @@ public class TestCommonConfigurationFields extends TestConfigurationFieldsBase {
@SuppressWarnings("deprecation")
@Override
public void initializeMemberVariables() {
xmlFilename = new String("core-default.xml");
xmlFilename = "core-default.xml";
configurationClasses = new Class[] {
CommonConfigurationKeys.class,
CommonConfigurationKeysPublic.class,

View File

@ -698,7 +698,7 @@ public void testUnTar() throws Exception {
OutputStream os = new FileOutputStream(simpleTar);
try (TarOutputStream tos = new TarOutputStream(os)) {
TarEntry te = new TarEntry("/bar/foo");
byte[] data = "some-content".getBytes("UTF-8");
byte[] data = "some-content".getBytes(StandardCharsets.UTF_8);
te.setSize(data.length);
tos.putNextEntry(te);
tos.write(data);
@ -782,7 +782,7 @@ public void testUnZip() throws Exception {
ZipArchiveList.add(new ZipArchiveEntry("foo_" + i));
ZipArchiveEntry archiveEntry = ZipArchiveList.get(i);
archiveEntry.setUnixMode(count += 0100);
byte[] data = "some-content".getBytes("UTF-8");
byte[] data = "some-content".getBytes(StandardCharsets.UTF_8);
archiveEntry.setSize(data.length);
tos.putArchiveEntry(archiveEntry);
tos.write(data);

View File

@ -30,6 +30,7 @@
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.util.HashSet;
import java.util.Set;
@ -117,7 +118,7 @@ private void writeVersionToMasterIndexImpl(int version, Path masterIndexPath) th
final FSDataOutputStream fsdos = localFileSystem.create(masterIndexPath);
try {
String versionString = version + "\n";
fsdos.write(versionString.getBytes("UTF-8"));
fsdos.write(versionString.getBytes(StandardCharsets.UTF_8));
fsdos.flush();
} finally {
fsdos.close();

View File

@ -22,13 +22,13 @@
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.CompletableFuture;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.assertj.core.api.Assertions;
import org.junit.Assume;
import org.junit.Test;
@ -596,8 +596,8 @@ public void testMultipartUploadAbort() throws Exception {
abortUpload(uploadHandle, file);
String contents = "ThisIsPart49\n";
int len = contents.getBytes(Charsets.UTF_8).length;
InputStream is = IOUtils.toInputStream(contents, "UTF-8");
int len = contents.getBytes(StandardCharsets.UTF_8).length;
InputStream is = IOUtils.toInputStream(contents, StandardCharsets.UTF_8);
intercept(IOException.class,
() -> awaitFuture(
@ -624,7 +624,7 @@ public void testMultipartUploadAbort() throws Exception {
public void testAbortUnknownUpload() throws Exception {
Path file = methodPath();
ByteBuffer byteBuffer = ByteBuffer.wrap(
"invalid-handle".getBytes(Charsets.UTF_8));
"invalid-handle".getBytes(StandardCharsets.UTF_8));
intercept(FileNotFoundException.class,
() -> abortUpload(BBUploadHandle.from(byteBuffer), file));
}

View File

@ -45,6 +45,7 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -806,7 +807,7 @@ public static String readUTF8(FileSystem fs,
try (FSDataInputStream in = fs.open(path)) {
byte[] buf = new byte[length];
in.readFully(0, buf);
return new String(buf, "UTF-8");
return new String(buf, StandardCharsets.UTF_8);
}
}

View File

@ -23,6 +23,7 @@
import java.io.IOException;
import java.io.PrintStream;
import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
@ -30,7 +31,6 @@
import org.junit.Before;
import org.junit.Test;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -97,8 +97,8 @@ private Object runTool(String ... args) throws Exception {
outBytes.reset();
LOG.info("Running: HAAdmin " + Joiner.on(" ").join(args));
int ret = tool.run(args);
errOutput = new String(errOutBytes.toByteArray(), Charsets.UTF_8);
output = new String(outBytes.toByteArray(), Charsets.UTF_8);
errOutput = new String(errOutBytes.toByteArray(), StandardCharsets.UTF_8);
output = new String(outBytes.toByteArray(), StandardCharsets.UTF_8);
LOG.info("Err_output:\n" + errOutput + "\nOutput:\n" + output);
return ret;
}

View File

@ -27,6 +27,7 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.anyInt;
@ -90,6 +91,6 @@ protected boolean isActive() {
private String doGet() throws IOException {
servlet.doGet(req, resp);
return new String(respOut.toByteArray(), "UTF-8");
return new String(respOut.toByteArray(), StandardCharsets.UTF_8);
}
}

View File

@ -23,6 +23,7 @@
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@ -57,7 +58,7 @@ public static void makeTestFile() throws Exception {
for (File f : new File[] { testFilePathIs, testFilePathRaf,
testFilePathFadis }) {
FileOutputStream fos = new FileOutputStream(f);
fos.write("hello".getBytes("UTF-8"));
fos.write("hello".getBytes(StandardCharsets.UTF_8));
fos.close();
}

View File

@ -22,8 +22,8 @@
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.StandardCharsets;
import java.util.Random;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.primitives.Bytes;
import org.junit.Test;
@ -105,7 +105,7 @@ public void testCoding() throws Exception {
ByteBuffer bb = Text.encode(before);
byte[] utf8Text = bb.array();
byte[] utf8Java = before.getBytes("UTF-8");
byte[] utf8Java = before.getBytes(StandardCharsets.UTF_8);
assertEquals(0, WritableComparator.compareBytes(
utf8Text, 0, bb.limit(),
utf8Java, 0, utf8Java.length));
@ -392,7 +392,7 @@ public void testReadWriteOperations() {
@Test
public void testReadWithKnownLength() throws IOException {
String line = "hello world";
byte[] inputBytes = line.getBytes(Charsets.UTF_8);
byte[] inputBytes = line.getBytes(StandardCharsets.UTF_8);
DataInputBuffer in = new DataInputBuffer();
Text text = new Text();

View File

@ -23,6 +23,7 @@
import java.io.IOException;
import java.io.UTFDataFormatException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Random;
import org.apache.hadoop.test.GenericTestUtils;
@ -110,7 +111,7 @@ public void testNullEncoding() throws Exception {
DataOutputBuffer dob = new DataOutputBuffer();
new UTF8(s).write(dob);
assertEquals(s, new String(dob.getData(), 2, dob.getLength()-2, "UTF-8"));
assertEquals(s, new String(dob.getData(), 2, dob.getLength()-2, StandardCharsets.UTF_8));
}
/**
@ -125,7 +126,7 @@ public void testNonBasicMultilingualPlane() throws Exception {
String catFace = "\uD83D\uDC31";
// This encodes to 4 bytes in UTF-8:
byte[] encoded = catFace.getBytes("UTF-8");
byte[] encoded = catFace.getBytes(StandardCharsets.UTF_8);
assertEquals(4, encoded.length);
assertEquals("f09f90b1", StringUtils.byteToHexString(encoded));

View File

@ -23,6 +23,7 @@
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.regex.Pattern;
import org.apache.hadoop.io.IOUtils;
@ -113,7 +114,7 @@ public void testFileSink() throws IOException {
is = new FileInputStream(outFile);
baos = new ByteArrayOutputStream((int)outFile.length());
IOUtils.copyBytes(is, baos, 1024, true);
outFileContent = new String(baos.toByteArray(), "UTF-8");
outFileContent = new String(baos.toByteArray(), StandardCharsets.UTF_8);
} finally {
IOUtils.cleanupWithLogger(null, baos, is);
}

View File

@ -25,7 +25,7 @@
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
@ -75,7 +75,7 @@ public void testPutMetrics() throws IOException, IllegalAccessException {
sock.receive(p);
String result =new String(p.getData(), 0, p.getLength(),
Charset.forName("UTF-8"));
StandardCharsets.UTF_8);
assertTrue(
"Received data did not match data sent",
result.equals("host.process.jvm.Context.foo1:1.25|c") ||
@ -109,7 +109,7 @@ public void testPutMetrics2() throws IOException, IllegalAccessException {
sink.putMetrics(record);
sock.receive(p);
String result =
new String(p.getData(), 0, p.getLength(), Charset.forName("UTF-8"));
new String(p.getData(), 0, p.getLength(), StandardCharsets.UTF_8);
assertTrue("Received data did not match data sent",
result.equals("process.jvm.Context.foo1:1|c") ||

View File

@ -21,11 +21,11 @@
import static org.junit.Assert.assertEquals;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
@ -41,7 +41,7 @@ public class TestTableMapping {
public void testResolve() throws IOException {
File mapFile = File.createTempFile(getClass().getSimpleName() +
".testResolve", ".txt");
Files.asCharSink(mapFile, Charsets.UTF_8).write(
Files.asCharSink(mapFile, StandardCharsets.UTF_8).write(
hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
mapFile.deleteOnExit();
TableMapping mapping = new TableMapping();
@ -64,7 +64,7 @@ public void testResolve() throws IOException {
public void testTableCaching() throws IOException {
File mapFile = File.createTempFile(getClass().getSimpleName() +
".testTableCaching", ".txt");
Files.asCharSink(mapFile, Charsets.UTF_8).write(
Files.asCharSink(mapFile, StandardCharsets.UTF_8).write(
hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
mapFile.deleteOnExit();
TableMapping mapping = new TableMapping();
@ -128,7 +128,7 @@ public void testFileDoesNotExist() {
public void testClearingCachedMappings() throws IOException {
File mapFile = File.createTempFile(getClass().getSimpleName() +
".testClearingCachedMappings", ".txt");
Files.asCharSink(mapFile, Charsets.UTF_8).write(
Files.asCharSink(mapFile, StandardCharsets.UTF_8).write(
hostName1 + " /rack1\n" + hostName2 + "\t/rack2\n");
mapFile.deleteOnExit();
@ -147,7 +147,7 @@ public void testClearingCachedMappings() throws IOException {
assertEquals("/rack1", result.get(0));
assertEquals("/rack2", result.get(1));
Files.asCharSink(mapFile, Charsets.UTF_8).write("");
Files.asCharSink(mapFile, StandardCharsets.UTF_8).write("");
mapping.reloadCachedMappings();
@ -166,7 +166,7 @@ public void testClearingCachedMappings() throws IOException {
public void testBadFile() throws IOException {
File mapFile = File.createTempFile(getClass().getSimpleName() +
".testBadFile", ".txt");
Files.asCharSink(mapFile, Charsets.UTF_8).write("bad contents");
Files.asCharSink(mapFile, StandardCharsets.UTF_8).write("bad contents");
mapFile.deleteOnExit();
TableMapping mapping = new TableMapping();

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.security.token.delegation;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@ -400,7 +401,7 @@ public List<ACL> getDefaultAcl() {
.connectString(connectString)
.retryPolicy(retryPolicy)
.aclProvider(digestAclProvider)
.authorization("digest", userPass.getBytes("UTF-8"))
.authorization("digest", userPass.getBytes(StandardCharsets.UTF_8))
.build();
curatorFramework.start();
ZKDelegationTokenSecretManager.setCurator(curatorFramework);

View File

@ -24,6 +24,7 @@
import java.io.IOException;
import java.io.PrintStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
@ -46,7 +47,7 @@ public class TestClasspath {
.class);
private static final File TEST_DIR = GenericTestUtils.getTestDir(
"TestClasspath");
private static final Charset UTF8 = Charset.forName("UTF-8");
private static final Charset UTF8 = StandardCharsets.UTF_8;
static {
ExitUtil.disableSystemExit();

View File

@ -21,6 +21,7 @@
import java.io.FileOutputStream;
import java.io.PrintStream;
import java.lang.reflect.Constructor;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
@ -49,7 +50,7 @@ public void testCorrectness() throws Exception {
checkOnBytes(new byte[] {40, 60, 97, -70}, false);
checkOnBytes("hello world!".getBytes("UTF-8"), false);
checkOnBytes("hello world!".getBytes(StandardCharsets.UTF_8), false);
for (int i = 0; i < 10000; i++) {
byte randomBytes[] = new byte[new Random().nextInt(2048)];

View File

@ -22,6 +22,7 @@
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import org.apache.hadoop.test.GenericTestUtils;
@ -31,7 +32,6 @@
import org.apache.zookeeper.data.ACL;
import org.junit.Test;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
public class TestZKUtil {
@ -131,7 +131,7 @@ public void testConfIndirection() throws IOException {
assertEquals("x", ZKUtil.resolveConfIndirection("x"));
TEST_FILE.getParentFile().mkdirs();
Files.asCharSink(TEST_FILE, Charsets.UTF_8).write("hello world");
Files.asCharSink(TEST_FILE, StandardCharsets.UTF_8).write("hello world");
assertEquals("hello world", ZKUtil.resolveConfIndirection(
"@" + TEST_FILE.getAbsolutePath()));

View File

@ -22,6 +22,7 @@
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@ -117,7 +118,7 @@ public void testGetStringData() throws Exception {
curator.create(node1);
assertNull(curator.getStringData(node1));
byte[] setData = "setData".getBytes("UTF-8");
byte[] setData = "setData".getBytes(StandardCharsets.UTF_8);
curator.setData(node1, setData, -1);
assertEquals("setData", curator.getStringData(node1));
@ -136,7 +137,7 @@ public void testTransaction() throws Exception {
String fencingNodePath = "/fencing";
String node1 = "/node1";
String node2 = "/node2";
byte[] testData = "testData".getBytes("UTF-8");
byte[] testData = "testData".getBytes(StandardCharsets.UTF_8);
assertFalse(curator.exists(fencingNodePath));
assertFalse(curator.exists(node1));
assertFalse(curator.exists(node2));
@ -154,7 +155,7 @@ public void testTransaction() throws Exception {
assertTrue(Arrays.equals(testData, curator.getData(node1)));
assertTrue(Arrays.equals(testData, curator.getData(node2)));
byte[] setData = "setData".getBytes("UTF-8");
byte[] setData = "setData".getBytes(StandardCharsets.UTF_8);
txn = curator.createTransaction(zkAcl, fencingNodePath);
txn.setData(node1, setData, -1);
txn.delete(node2);

View File

@ -33,7 +33,7 @@
import java.io.Writer;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
@ -64,8 +64,7 @@ public void writeTo(Object obj, Class<?> aClass, Type type,
Annotation[] annotations, MediaType mediaType,
MultivaluedMap<String, Object> stringObjectMultivaluedMap,
OutputStream outputStream) throws IOException, WebApplicationException {
Writer writer = new OutputStreamWriter(outputStream, Charset
.forName("UTF-8"));
Writer writer = new OutputStreamWriter(outputStream, StandardCharsets.UTF_8);
JsonSerialization.writer().writeValue(writer, obj);
}

View File

@ -28,6 +28,7 @@
import java.io.EOFException;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
/**
* Support for marshalling objects to and from JSON.
@ -47,7 +48,6 @@
@InterfaceStability.Evolving
public class JsonSerDeser<T> extends JsonSerialization<T> {
private static final String UTF_8 = "UTF-8";
public static final String E_NO_DATA = "No data at path";
public static final String E_DATA_TOO_SHORT = "Data at path too short";
public static final String E_MISSING_MARKER_STRING =
@ -102,7 +102,7 @@ public T fromBytes(String path, byte[] bytes, String marker)
if (StringUtils.isNotEmpty(marker) && len < marker.length()) {
throw new NoRecordException(path, E_DATA_TOO_SHORT);
}
String json = new String(bytes, 0, len, UTF_8);
String json = new String(bytes, 0, len, StandardCharsets.UTF_8);
if (StringUtils.isNotEmpty(marker)
&& !json.contains(marker)) {
throw new NoRecordException(path, E_MISSING_MARKER_STRING + marker);

View File

@ -42,6 +42,7 @@
import javax.security.auth.login.AppConfigurationEntry;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Collections;
@ -295,7 +296,7 @@ private void initSecurity() throws IOException {
digestAuthUser = id;
digestAuthPassword = pass;
String authPair = id + ":" + pass;
digestAuthData = authPair.getBytes("UTF-8");
digestAuthData = authPair.getBytes(StandardCharsets.UTF_8);
if (LOG.isDebugEnabled()) {
LOG.debug("Auth is Digest ACL: {}", aclToString(acl));
}

View File

@ -80,6 +80,7 @@
import java.nio.channels.DatagramChannel;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.nio.charset.StandardCharsets;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
@ -628,7 +629,7 @@ private void enableDNSSECIfNecessary(Zone zone, Configuration conf,
Name zoneName = zone.getOrigin();
DNSKEYRecord dnskeyRecord = dnsKeyRecs.get(zoneName);
if (dnskeyRecord == null) {
byte[] key = Base64.decodeBase64(publicKey.getBytes("UTF-8"));
byte[] key = Base64.decodeBase64(publicKey.getBytes(StandardCharsets.UTF_8));
dnskeyRecord = new DNSKEYRecord(zoneName,
DClass.IN, ttl,
DNSKEYRecord.Flags.ZONE_KEY,

View File

@ -30,6 +30,7 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@ -59,7 +60,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet;
import org.apache.hadoop.thirdparty.com.google.common.collect.Maps;
import org.apache.hadoop.thirdparty.com.google.common.net.InetAddresses;
@ -147,7 +147,7 @@ public static Map<String, String> createSaslPropertiesForEncryption(
* @return key encoded as SASL password
*/
public static char[] encryptionKeyToPassword(byte[] encryptionKey) {
return new String(Base64.encodeBase64(encryptionKey, false), Charsets.UTF_8)
return new String(Base64.encodeBase64(encryptionKey, false), StandardCharsets.UTF_8)
.toCharArray();
}

View File

@ -30,6 +30,7 @@
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.Socket;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
@ -65,7 +66,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* Negotiates SASL for DataTransferProtocol on behalf of a client. There are
@ -347,7 +347,7 @@ private static String getUserNameFromEncryptionKey(
return encryptionKey.keyId + NAME_DELIMITER +
encryptionKey.blockPoolId + NAME_DELIMITER +
new String(Base64.encodeBase64(encryptionKey.nonce, false),
Charsets.UTF_8);
StandardCharsets.UTF_8);
}
/**
@ -450,7 +450,7 @@ private IOStreamPair getSaslStreams(InetAddress addr,
private void updateToken(Token<BlockTokenIdentifier> accessToken,
SecretKey secretKey, Map<String, String> saslProps)
throws IOException {
byte[] newSecret = saslProps.get(Sasl.QOP).getBytes(Charsets.UTF_8);
byte[] newSecret = saslProps.get(Sasl.QOP).getBytes(StandardCharsets.UTF_8);
BlockTokenIdentifier bkid = accessToken.decodeIdentifier();
bkid.setHandshakeMsg(newSecret);
byte[] bkidBytes = bkid.getBytes();
@ -471,7 +471,7 @@ private void updateToken(Token<BlockTokenIdentifier> accessToken,
*/
private static String buildUserName(Token<BlockTokenIdentifier> blockToken) {
return new String(Base64.encodeBase64(blockToken.getIdentifier(), false),
Charsets.UTF_8);
StandardCharsets.UTF_8);
}
/**
@ -483,7 +483,7 @@ private static String buildUserName(Token<BlockTokenIdentifier> blockToken) {
*/
private char[] buildClientPassword(Token<BlockTokenIdentifier> blockToken) {
return new String(Base64.encodeBase64(blockToken.getPassword(), false),
Charsets.UTF_8).toCharArray();
StandardCharsets.UTF_8).toCharArray();
}
/**

View File

@ -27,6 +27,7 @@
import java.io.InputStreamReader;
import java.io.IOException;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
@ -84,7 +85,7 @@ private CombinedHostsFileReader() {
if (hostFile.length() > 0) {
try (Reader input =
new InputStreamReader(
Files.newInputStream(hostFile.toPath()), "UTF-8")) {
Files.newInputStream(hostFile.toPath()), StandardCharsets.UTF_8)) {
allDNs = objectMapper.readValue(input, DatanodeAdminProperties[].class);
} catch (JsonMappingException jme) {
// The old format doesn't have json top-level token to enclose
@ -103,7 +104,7 @@ private CombinedHostsFileReader() {
List<DatanodeAdminProperties> all = new ArrayList<>();
try (Reader input =
new InputStreamReader(Files.newInputStream(Paths.get(hostsFilePath)),
"UTF-8")) {
StandardCharsets.UTF_8)) {
Iterator<DatanodeAdminProperties> iterator =
objectReader.readValues(jsonFactory.createParser(input));
while (iterator.hasNext()) {

View File

@ -21,6 +21,7 @@
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Set;
@ -62,7 +63,7 @@ public static void writeFile(final String hostsFile,
try (Writer output =
new OutputStreamWriter(Files.newOutputStream(Paths.get(hostsFile)),
"UTF-8")) {
StandardCharsets.UTF_8)) {
objectMapper.writeValue(output, allDNs);
}
}

View File

@ -137,7 +137,6 @@
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.util.Preconditions;
/** A FileSystem for HDFS over the web. */
@ -1792,7 +1791,7 @@ public DirectoryEntries listStatusBatch(Path f, byte[] token) throws
}
DirectoryListing listing = new FsPathResponseRunner<DirectoryListing>(
GetOpParam.Op.LISTSTATUS_BATCH,
f, new StartAfterParam(new String(prevKey, Charsets.UTF_8))) {
f, new StartAfterParam(new String(prevKey, StandardCharsets.UTF_8))) {
@Override
DirectoryListing decodeResponse(Map<?, ?> json) throws IOException {
return JsonUtilClient.toDirectoryListing(json);

View File

@ -22,6 +22,7 @@
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@ -204,7 +205,7 @@ public String call() throws Exception {
if (n <= 0) {
break;
}
sb.append(new String(buf, 0, n, "UTF-8"));
sb.append(new String(buf, 0, n, StandardCharsets.UTF_8));
}
return sb.toString();
} finally {

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.fs.http.client;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -24,7 +25,6 @@
import java.util.List;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.MapType;
import org.apache.hadoop.classification.InterfaceAudience;
@ -796,7 +796,7 @@ public DirectoryEntries listStatusBatch(Path f, byte[] token) throws
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.LISTSTATUS_BATCH.toString());
if (token != null) {
params.put(START_AFTER_PARAM, new String(token, Charsets.UTF_8));
params.put(START_AFTER_PARAM, new String(token, StandardCharsets.UTF_8));
}
HttpURLConnection conn = getConnection(
Operation.LISTSTATUS_BATCH.getMethod(),
@ -811,7 +811,7 @@ public DirectoryEntries listStatusBatch(Path f, byte[] token) throws
byte[] newToken = null;
if (statuses.length > 0) {
newToken = statuses[statuses.length - 1].getPath().getName().toString()
.getBytes(Charsets.UTF_8);
.getBytes(StandardCharsets.UTF_8);
}
// Parse the remainingEntries boolean into hasMore
final long remainingEntries = (Long) listing.get(REMAINING_ENTRIES_JSON);

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.fs.http.server;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@ -91,6 +90,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.security.AccessControlException;
import java.security.PrivilegedExceptionAction;
import java.text.MessageFormat;
@ -422,7 +422,7 @@ public InputStream run() throws Exception {
HttpFSParametersProvider.StartAfterParam.class);
byte[] token = HttpFSUtils.EMPTY_BYTES;
if (startAfter != null) {
token = startAfter.getBytes(Charsets.UTF_8);
token = startAfter.getBytes(StandardCharsets.UTF_8);
}
FSOperations.FSListStatusBatch command = new FSOperations
.FSListStatusBatch(path, token);

View File

@ -25,7 +25,7 @@
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.EnumSet;
import io.netty.buffer.ByteBuf;
@ -681,15 +681,15 @@ READLINK3Response readlink(XDR xdr, SecurityHandler securityHandler,
}
int rtmax = config.getInt(NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_KEY,
NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_DEFAULT);
if (rtmax < target.getBytes(Charset.forName("UTF-8")).length) {
if (rtmax < target.getBytes(StandardCharsets.UTF_8).length) {
LOG.error("Link size: {} is larger than max transfer size: {}",
target.getBytes(Charset.forName("UTF-8")).length, rtmax);
target.getBytes(StandardCharsets.UTF_8).length, rtmax);
return new READLINK3Response(Nfs3Status.NFS3ERR_IO, postOpAttr,
new byte[0]);
}
return new READLINK3Response(Nfs3Status.NFS3_OK, postOpAttr,
target.getBytes(Charset.forName("UTF-8")));
target.getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
LOG.warn("Readlink error", e);
@ -1515,7 +1515,7 @@ private DirectoryListing listPaths(DFSClient dfsClient, String dirFileIdPath,
}
// This happens when startAfter was just deleted
LOG.info("Cookie couldn't be found: {}, do listing from beginning",
new String(startAfter, Charset.forName("UTF-8")));
new String(startAfter, StandardCharsets.UTF_8));
dlisting = dfsClient
.listPaths(dirFileIdPath, HdfsFileStatus.EMPTY_NAME);
}
@ -1628,7 +1628,7 @@ public READDIR3Response readdir(XDR xdr, SecurityHandler securityHandler,
startAfter = HdfsFileStatus.EMPTY_NAME;
} else {
String inodeIdPath = Nfs3Utils.getFileIdPath(cookie);
startAfter = inodeIdPath.getBytes(Charset.forName("UTF-8"));
startAfter = inodeIdPath.getBytes(StandardCharsets.UTF_8);
}
dlisting = listPaths(dfsClient, dirFileIdPath, startAfter);
@ -1800,7 +1800,7 @@ READDIRPLUS3Response readdirplus(XDR xdr, SecurityHandler securityHandler,
startAfter = HdfsFileStatus.EMPTY_NAME;
} else {
String inodeIdPath = Nfs3Utils.getFileIdPath(cookie);
startAfter = inodeIdPath.getBytes(Charset.forName("UTF-8"));
startAfter = inodeIdPath.getBytes(StandardCharsets.UTF_8);
}
dlisting = listPaths(dfsClient, dirFileIdPath, startAfter);

View File

@ -28,6 +28,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
@ -62,7 +63,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* Negotiates SASL for DataTransferProtocol on behalf of a server. There are
@ -326,7 +326,7 @@ private char[] buildServerPassword(String userName) throws IOException {
byte[] tokenPassword = blockPoolTokenSecretManager.retrievePassword(
identifier);
return (new String(Base64.encodeBase64(tokenPassword, false),
Charsets.UTF_8)).toCharArray();
StandardCharsets.UTF_8)).toCharArray();
}
/**
@ -381,7 +381,7 @@ private IOStreamPair doSaslHandshake(Peer peer, OutputStream underlyingOut,
if (secret != null || bpid != null) {
// sanity check, if one is null, the other must also not be null
assert(secret != null && bpid != null);
String qop = new String(secret, Charsets.UTF_8);
String qop = new String(secret, StandardCharsets.UTF_8);
saslProps.put(Sasl.QOP, qop);
}
SaslParticipant sasl = SaslParticipant.createServerSaslParticipant(

View File

@ -25,6 +25,7 @@
import java.io.OutputStreamWriter;
import java.net.URL;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.security.PrivilegedExceptionAction;
@ -72,7 +73,6 @@
import org.apache.hadoop.util.Time;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
import org.apache.hadoop.thirdparty.protobuf.TextFormat;
@ -1105,7 +1105,7 @@ private void persistPaxosData(long segmentTxId,
// Write human-readable data after the protobuf. This is only
// to assist in debugging -- it's not parsed at all.
try(OutputStreamWriter writer =
new OutputStreamWriter(fos, Charsets.UTF_8)) {
new OutputStreamWriter(fos, StandardCharsets.UTF_8)) {
writer.write(String.valueOf(newData));
writer.write('\n');
writer.flush();

View File

@ -18,10 +18,10 @@
package org.apache.hadoop.hdfs.security.token.block;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.SecureRandom;
import java.util.Arrays;
@ -293,7 +293,7 @@ public Token<BlockTokenIdentifier> generateToken(String userId,
if (shouldWrapQOP) {
String qop = Server.getAuxiliaryPortEstablishedQOP();
if (qop != null) {
id.setHandshakeMsg(qop.getBytes(Charsets.UTF_8));
id.setHandshakeMsg(qop.getBytes(StandardCharsets.UTF_8));
}
}
return new Token<BlockTokenIdentifier>(id, this);

View File

@ -25,6 +25,7 @@
import java.lang.management.ManagementFactory;
import java.nio.channels.FileLock;
import java.nio.channels.OverlappingFileLockException;
import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.attribute.PosixFilePermission;
@ -53,7 +54,6 @@
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -944,7 +944,7 @@ FileLock tryLock() throws IOException {
LOG.error("Unable to acquire file lock on path {}", lockF);
throw new OverlappingFileLockException();
}
file.write(jvmName.getBytes(Charsets.UTF_8));
file.write(jvmName.getBytes(StandardCharsets.UTF_8));
LOG.info("Lock on {} acquired by nodename {}", lockF, jvmName);
} catch(OverlappingFileLockException oe) {
// Cannot read from the locked file on Windows.

View File

@ -42,7 +42,7 @@
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
@ -450,7 +450,7 @@ private NodePlan verifyPlanHash(String planID, String plan)
if ((planID == null) ||
(planID.length() != sha1Length) ||
!DigestUtils.sha1Hex(plan.getBytes(Charset.forName("UTF-8")))
!DigestUtils.sha1Hex(plan.getBytes(StandardCharsets.UTF_8))
.equalsIgnoreCase(planID)) {
LOG.error("Disk Balancer - Invalid plan hash.");
throw new DiskBalancerException("Invalid or mis-matched hash.",

View File

@ -35,6 +35,7 @@
import java.io.RandomAccessFile;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
@ -333,7 +334,7 @@ static File verifyIfValidPmemVolume(File pmemDir)
String uuidStr = UUID.randomUUID().toString();
String testFilePath = realPmemDir.getPath() + "/.verify.pmem." + uuidStr;
byte[] contents = uuidStr.getBytes("UTF-8");
byte[] contents = uuidStr.getBytes(StandardCharsets.UTF_8);
RandomAccessFile testFile = null;
MappedByteBuffer out = null;
try {

View File

@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hdfs.server.datanode.web.webhdfs;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import com.sun.jersey.api.ParamException;
import com.sun.jersey.api.container.ContainerException;
import io.netty.buffer.Unpooled;
@ -32,6 +31,7 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH;
import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_TYPE;
@ -83,7 +83,7 @@ static DefaultFullHttpResponse exceptionCaught(Throwable cause) {
s = INTERNAL_SERVER_ERROR;
}
final byte[] js = JsonUtil.toJsonString(e).getBytes(Charsets.UTF_8);
final byte[] js = JsonUtil.toJsonString(e).getBytes(StandardCharsets.UTF_8);
DefaultFullHttpResponse resp =
new DefaultFullHttpResponse(HTTP_1_1, s, Unpooled.wrappedBuffer(js));

View File

@ -96,6 +96,7 @@
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SNAPSHOT_DIFF_LISTING_LIMIT_DEFAULT;
import static org.apache.hadoop.hdfs.DFSUtil.isParentEntry;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.text.CaseUtils;
@ -343,7 +344,6 @@
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap;
import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder;
@ -1982,7 +1982,7 @@ void metaSave(String filename) throws IOException {
File file = new File(System.getProperty("hadoop.log.dir"), filename);
PrintWriter out = new PrintWriter(new BufferedWriter(
new OutputStreamWriter(Files.newOutputStream(file.toPath()),
Charsets.UTF_8)));
StandardCharsets.UTF_8)));
metaSave(out);
out.flush();
out.close();
@ -4217,7 +4217,7 @@ DirectoryListing getListing(String src, byte[] startAfter,
public byte[] getSrcPathsHash(String[] srcs) {
synchronized (digest) {
for (String src : srcs) {
digest.update(src.getBytes(Charsets.UTF_8));
digest.update(src.getBytes(StandardCharsets.UTF_8));
}
byte[] result = digest.digest();
digest.reset();

View File

@ -26,6 +26,7 @@
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets;
import java.security.Principal;
import java.security.PrivilegedExceptionAction;
import java.util.Base64;
@ -124,7 +125,6 @@
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import com.sun.jersey.spi.container.ResourceFilters;
/** Web-hdfs NameNode implementation. */
@ -1339,7 +1339,7 @@ protected Response get(
{
byte[] start = HdfsFileStatus.EMPTY_NAME;
if (startAfter != null && startAfter.getValue() != null) {
start = startAfter.getValue().getBytes(Charsets.UTF_8);
start = startAfter.getValue().getBytes(StandardCharsets.UTF_8);
}
final DirectoryListing listing = getDirectoryListing(cp, fullpath, start);
final String js = JsonUtil.toJsonString(listing);
@ -1532,7 +1532,7 @@ private static StreamingOutput getListingStream(final ClientProtocol cp,
@Override
public void write(final OutputStream outstream) throws IOException {
final PrintWriter out = new PrintWriter(new OutputStreamWriter(
outstream, Charsets.UTF_8));
outstream, StandardCharsets.UTF_8));
out.println("{\"" + FileStatus.class.getSimpleName() + "es\":{\""
+ FileStatus.class.getSimpleName() + "\":[");

View File

@ -22,6 +22,7 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Stack;
import org.apache.hadoop.classification.InterfaceAudience;
@ -41,7 +42,6 @@
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* OfflineEditsXmlLoader walks an EditsVisitor over an OEV XML file
@ -75,7 +75,7 @@ public OfflineEditsXmlLoader(OfflineEditsVisitor visitor,
File inputFile, OfflineEditsViewer.Flags flags) throws FileNotFoundException {
this.visitor = visitor;
this.fileReader =
new InputStreamReader(new FileInputStream(inputFile), Charsets.UTF_8);
new InputStreamReader(new FileInputStream(inputFile), StandardCharsets.UTF_8);
this.fixTxIds = flags.getFixTxIds();
}

View File

@ -21,6 +21,7 @@
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.HashMap;
@ -30,7 +31,6 @@
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* StatisticsEditsVisitor implements text version of EditsVisitor
@ -53,7 +53,7 @@ public class StatisticsEditsVisitor implements OfflineEditsVisitor {
* @param out Name of file to write output to
*/
public StatisticsEditsVisitor(OutputStream out) throws IOException {
this.out = new PrintWriter(new OutputStreamWriter(out, Charsets.UTF_8));
this.out = new PrintWriter(new OutputStreamWriter(out, StandardCharsets.UTF_8));
}
/** Start the visitor */

View File

@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hdfs.tools.offlineImageViewer;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFutureListener;
@ -37,6 +36,7 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;
@ -124,7 +124,7 @@ public void channelRead0(ChannelHandlerContext ctx, HttpRequest request)
DefaultFullHttpResponse resp = new DefaultFullHttpResponse(HTTP_1_1,
HttpResponseStatus.OK, Unpooled.wrappedBuffer(content
.getBytes(Charsets.UTF_8)));
.getBytes(StandardCharsets.UTF_8)));
resp.headers().set(CONTENT_TYPE, APPLICATION_JSON_UTF8);
resp.headers().set(CONTENT_LENGTH, resp.content().readableBytes());
resp.headers().set(CONNECTION, CLOSE);
@ -142,7 +142,7 @@ public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause)
Exception e = cause instanceof Exception ? (Exception) cause : new
Exception(cause);
final String output = JsonUtil.toJsonString(e);
ByteBuf content = Unpooled.wrappedBuffer(output.getBytes(Charsets.UTF_8));
ByteBuf content = Unpooled.wrappedBuffer(output.getBytes(StandardCharsets.UTF_8));
final DefaultFullHttpResponse resp = new DefaultFullHttpResponse(
HTTP_1_1, INTERNAL_SERVER_ERROR, content);

View File

@ -19,6 +19,7 @@
import java.io.DataInputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
@ -320,7 +321,7 @@ private void processINodesUC(DataInputStream in, ImageVisitor v,
for(int i = 0; i < numINUC; i++) {
v.visitEnclosingElement(ImageElement.INODE_UNDER_CONSTRUCTION);
byte [] name = FSImageSerialization.readBytes(in);
String n = new String(name, "UTF8");
String n = new String(name, StandardCharsets.UTF_8);
v.visit(ImageElement.INODE_PATH, n);
if (NameNodeLayoutVersion.supports(Feature.ADD_INODE_ID, imageVersion)) {

View File

@ -36,7 +36,7 @@
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.security.DigestOutputStream;
@ -1840,7 +1840,7 @@ public static void run(String inputPath, String outputPath)
Files.deleteIfExists(Paths.get(outputPath));
fout = Files.newOutputStream(Paths.get(outputPath));
fis = Files.newInputStream(Paths.get(inputPath));
reader = new InputStreamReader(fis, Charset.forName("UTF-8"));
reader = new InputStreamReader(fis, StandardCharsets.UTF_8);
out = new CountingOutputStream(
new DigestOutputStream(
new BufferedOutputStream(fout), digester));

View File

@ -26,11 +26,11 @@
import java.io.InputStream;
import java.io.PrintStream;
import java.io.RandomAccessFile;
import java.io.UnsupportedEncodingException;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
@ -419,9 +419,8 @@ private static byte[] toBytes(long value) {
return ByteBuffer.allocate(8).putLong(value).array();
}
private static byte[] toBytes(String value)
throws UnsupportedEncodingException {
return value.getBytes("UTF-8");
private static byte[] toBytes(String value) {
return value.getBytes(StandardCharsets.UTF_8);
}
private static long toLong(byte[] bytes) {
@ -430,11 +429,7 @@ private static long toLong(byte[] bytes) {
}
private static String toString(byte[] bytes) throws IOException {
try {
return new String(bytes, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new IOException(e);
}
return new String(bytes, StandardCharsets.UTF_8);
}
@Override

View File

@ -19,10 +19,10 @@
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* TextWriterImageProcessor mixes in the ability for ImageVisitor
@ -61,7 +61,7 @@ public TextWriterImageVisitor(String filename, boolean printToScreen)
super();
this.printToScreen = printToScreen;
fw = new OutputStreamWriter(Files.newOutputStream(Paths.get(filename)),
Charsets.UTF_8);
StandardCharsets.UTF_8);
okToWrite = true;
}

View File

@ -23,6 +23,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.security.DigestInputStream;
import java.security.MessageDigest;
@ -35,7 +36,6 @@
import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* Static functions for dealing with files of the same format
@ -75,7 +75,7 @@ public static void verifySavedMD5(File dataFile, MD5Hash expectedMD5)
private static Matcher readStoredMd5(File md5File) throws IOException {
BufferedReader reader =
new BufferedReader(new InputStreamReader(
Files.newInputStream(md5File.toPath()), Charsets.UTF_8));
Files.newInputStream(md5File.toPath()), StandardCharsets.UTF_8));
String md5Line;
try {
md5Line = reader.readLine();
@ -155,7 +155,7 @@ private static void saveMD5File(File dataFile, String digestString)
String md5Line = digestString + " *" + dataFile.getName() + "\n";
AtomicFileOutputStream afos = new AtomicFileOutputStream(md5File);
afos.write(md5Line.getBytes(Charsets.UTF_8));
afos.write(md5Line.getBytes(StandardCharsets.UTF_8));
afos.close();
if (LOG.isDebugEnabled()) {

View File

@ -22,14 +22,13 @@
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* Class that represents a file on disk which persistently stores
* a single <code>long</code> value. The file is updated atomically
@ -77,7 +76,7 @@ public void set(long newVal) throws IOException {
public static void writeFile(File file, long val) throws IOException {
AtomicFileOutputStream fos = new AtomicFileOutputStream(file);
try {
fos.write(String.valueOf(val).getBytes(Charsets.UTF_8));
fos.write(String.valueOf(val).getBytes(StandardCharsets.UTF_8));
fos.write('\n');
fos.close();
fos = null;
@ -93,7 +92,7 @@ public static long readFile(File file, long defaultVal) throws IOException {
if (file.exists()) {
BufferedReader br =
new BufferedReader(new InputStreamReader(new FileInputStream(
file), Charsets.UTF_8));
file), StandardCharsets.UTF_8));
try {
val = Long.parseLong(br.readLine());
br.close();

View File

@ -53,6 +53,7 @@
import java.net.URL;
import java.net.URLConnection;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
@ -70,7 +71,6 @@
import java.util.UUID;
import java.util.concurrent.TimeoutException;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.base.Strings;
@ -985,7 +985,7 @@ public static void appendFileNewBlock(DistributedFileSystem fs,
* @return url content as string (UTF-8 encoding assumed)
*/
public static String urlGet(URL url) throws IOException {
return new String(urlGetBytes(url), Charsets.UTF_8);
return new String(urlGetBytes(url), StandardCharsets.UTF_8);
}
/**
@ -1438,7 +1438,7 @@ public static void runOperations(MiniDFSCluster cluster,
Short permission = 0777;
filesystem.setPermission(pathFileCreate, new FsPermission(permission));
// OP_SET_OWNER 8
filesystem.setOwner(pathFileCreate, new String("newOwner"), null);
filesystem.setOwner(pathFileCreate, "newOwner", null);
// OP_CLOSE 9 see above
// OP_SET_GENSTAMP 10 see above
// OP_SET_NS_QUOTA 11 obsolete

View File

@ -23,6 +23,7 @@
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.concurrent.TimeoutException;
@ -46,7 +47,7 @@ public class TestBalancerBandwidth {
final static private int DEFAULT_BANDWIDTH = 1024*1024;
public static final Logger LOG =
LoggerFactory.getLogger(TestBalancerBandwidth.class);
private static final Charset UTF8 = Charset.forName("UTF-8");
private static final Charset UTF8 = StandardCharsets.UTF_8;
private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
private final PrintStream outStream = new PrintStream(outContent);

View File

@ -23,6 +23,7 @@
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.List;
@ -41,7 +42,6 @@
import org.junit.After;
import org.junit.Test;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* This test ensures the appropriate response (successful or failure) from
@ -312,8 +312,8 @@ public void testRollback() throws Exception {
for (File f : baseDirs) {
UpgradeUtilities.corruptFile(
new File(f,"VERSION"),
"layoutVersion".getBytes(Charsets.UTF_8),
"xxxxxxxxxxxxx".getBytes(Charsets.UTF_8));
"layoutVersion".getBytes(StandardCharsets.UTF_8),
"xxxxxxxxxxxxx".getBytes(StandardCharsets.UTF_8));
}
startNameNodeShouldFail("file VERSION has layoutVersion missing");

View File

@ -29,6 +29,7 @@
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.regex.Pattern;
import org.slf4j.Logger;
@ -49,7 +50,6 @@
import org.junit.Ignore;
import org.junit.Test;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
/**
@ -335,8 +335,8 @@ public void testUpgrade() throws Exception {
for (File f : baseDirs) {
UpgradeUtilities.corruptFile(
new File(f,"VERSION"),
"layoutVersion".getBytes(Charsets.UTF_8),
"xxxxxxxxxxxxx".getBytes(Charsets.UTF_8));
"layoutVersion".getBytes(StandardCharsets.UTF_8),
"xxxxxxxxxxxxx".getBytes(StandardCharsets.UTF_8));
}
startNameNodeShouldFail(StartupOption.UPGRADE);
UpgradeUtilities.createEmptyDirs(nameNodeDirs);

View File

@ -160,7 +160,7 @@ public void testDatanodeReportMissingBlock() throws Exception {
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
Path p = new Path("/testDatanodeReportMissingBlock");
DFSTestUtil.writeFile(fs, p, new String("testdata"));
DFSTestUtil.writeFile(fs, p, "testdata");
LocatedBlock lb = fs.getClient().getLocatedBlocks(p.toString(), 0).get(0);
assertEquals(3, lb.getLocations().length);
ExtendedBlock b = lb.getBlock();

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hdfs;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.CommonConfigurationKeys;
@ -287,7 +288,7 @@ public void testMultipleNNPortOverwriteDownStream() throws Exception {
private void doTest(FileSystem fs, Path path) throws Exception {
FileSystemTestHelper.createFile(fs, path, NUM_BLOCKS, BLOCK_SIZE);
assertArrayEquals(FileSystemTestHelper.getFileData(NUM_BLOCKS, BLOCK_SIZE),
DFSTestUtil.readFile(fs, path).getBytes("UTF-8"));
DFSTestUtil.readFile(fs, path).getBytes(StandardCharsets.UTF_8));
BlockLocation[] blockLocations = fs.getFileBlockLocations(path, 0,
Long.MAX_VALUE);
assertNotNull(blockLocations);

View File

@ -32,6 +32,7 @@
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.List;
import java.util.Scanner;
@ -66,7 +67,6 @@
import org.junit.Rule;
import org.junit.Test;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.junit.rules.Timeout;
import org.slf4j.Logger;
import org.slf4j.event.Level;
@ -1216,7 +1216,7 @@ public void testSetSpaceQuotaWhenStorageTypeIsWrong() throws Exception {
String[] args =
{ "-setSpaceQuota", "100", "-storageType", "COLD", "/testDir" };
admin.run(args);
String errOutput = new String(err.toByteArray(), Charsets.UTF_8);
String errOutput = new String(err.toByteArray(), StandardCharsets.UTF_8);
assertTrue(
errOutput.contains(StorageType.getTypesSupportingQuota().toString()));
} finally {

View File

@ -32,6 +32,7 @@
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketTimeoutException;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.atomic.AtomicBoolean;
import org.slf4j.LoggerFactory;
@ -200,7 +201,7 @@ private void doTest(HdfsConfiguration conf) throws IOException {
fs = FileSystem.get(cluster.getURI(), conf);
FileSystemTestHelper.createFile(fs, PATH, NUM_BLOCKS, BLOCK_SIZE);
assertArrayEquals(FileSystemTestHelper.getFileData(NUM_BLOCKS, BLOCK_SIZE),
DFSTestUtil.readFile(fs, PATH).getBytes("UTF-8"));
DFSTestUtil.readFile(fs, PATH).getBytes(StandardCharsets.UTF_8));
BlockLocation[] blockLocations = fs.getFileBlockLocations(PATH, 0,
Long.MAX_VALUE);
assertNotNull(blockLocations);

View File

@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hdfs.qjournal.server;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.primitives.Bytes;
import org.apache.hadoop.thirdparty.com.google.common.primitives.Ints;
import org.apache.hadoop.conf.Configuration;
@ -54,6 +53,7 @@
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
@ -278,7 +278,7 @@ public void testJournal() throws Exception {
ch.newEpoch(1).get();
ch.setEpoch(1);
ch.startLogSegment(1, NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION).get();
ch.sendEdits(1L, 1, 1, "hello".getBytes(Charsets.UTF_8)).get();
ch.sendEdits(1L, 1, 1, "hello".getBytes(StandardCharsets.UTF_8)).get();
metrics = MetricsAsserts.getMetrics(
journal.getMetrics().getName());
@ -291,7 +291,7 @@ public void testJournal() throws Exception {
beginTimestamp = lastJournalTimestamp;
ch.setCommittedTxId(100L);
ch.sendEdits(1L, 2, 1, "goodbye".getBytes(Charsets.UTF_8)).get();
ch.sendEdits(1L, 2, 1, "goodbye".getBytes(StandardCharsets.UTF_8)).get();
metrics = MetricsAsserts.getMetrics(
journal.getMetrics().getName());

View File

@ -553,7 +553,7 @@ public void testDNShouldNotDeleteBlockONTooManyOpenFiles()
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
Path p = new Path("/testShouldThrowTMP");
DFSTestUtil.writeFile(fs, p, new String("testdata"));
DFSTestUtil.writeFile(fs, p, "testdata");
//Before DN throws too many open files
verifyBlockLocations(fs, p, 1);
Mockito.doThrow(new FileNotFoundException("Too many open files")).

View File

@ -60,7 +60,7 @@ public class TestFavoredNodesEndToEnd {
private static Configuration conf;
private final static int NUM_DATA_NODES = 10;
private final static int NUM_FILES = 10;
private final static byte[] SOME_BYTES = new String("foo").getBytes();
private final static byte[] SOME_BYTES = "foo".getBytes();
private static DistributedFileSystem dfs;
private static ArrayList<DataNode> datanodes;

View File

@ -919,7 +919,7 @@ public void testFsckReplicaDetails() throws Exception {
dfs = cluster.getFileSystem();
// create files
final String testFile = new String("/testfile");
final String testFile = "/testfile";
final Path path = new Path(testFile);
DFSTestUtil.createFile(dfs, path, fileSize, replFactor, 1000L);
DFSTestUtil.waitReplication(dfs, path, replFactor);
@ -1202,7 +1202,7 @@ public void testFsckMissingReplicas() throws IOException {
assertNotNull("Failed to get FileSystem", dfs);
// Create a file that will be intentionally under-replicated
final String pathString = new String("/testfile");
final String pathString = "/testfile";
final Path path = new Path(pathString);
long fileLen = blockSize * numBlocks;
DFSTestUtil.createFile(dfs, path, fileLen, replFactor, 1);
@ -1263,7 +1263,7 @@ public void testFsckMisPlacedReplicas() throws IOException {
assertNotNull("Failed to get FileSystem", dfs);
// Create a file that will be intentionally under-replicated
final String pathString = new String("/testfile");
final String pathString = "/testfile";
final Path path = new Path(pathString);
long fileLen = blockSize * numBlocks;
DFSTestUtil.createFile(dfs, path, fileLen, replFactor, 1);
@ -1436,7 +1436,7 @@ public void testBlockIdCK() throws Exception {
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
final String pathString = new String("/testfile");
final String pathString = "/testfile";
final Path path = new Path(pathString);
util.createFile(dfs, path, 1024, replFactor, 1000L);
util.waitReplication(dfs, path, replFactor);
@ -1490,7 +1490,7 @@ public void testBlockIdCKDecommission() throws Exception {
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
final String pathString = new String("/testfile");
final String pathString = "/testfile";
final Path path = new Path(pathString);
util.createFile(dfs, path, 1024, replFactor, 1000L);
util.waitReplication(dfs, path, replFactor);
@ -1577,7 +1577,7 @@ public void testBlockIdCKMaintenance() throws Exception {
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
final String pathString = new String("/testfile");
final String pathString = "/testfile";
final Path path = new Path(pathString);
util.createFile(dfs, path, 1024, replFactor, 1000L);
util.waitReplication(dfs, path, replFactor);
@ -1694,7 +1694,7 @@ public void testBlockIdCKStaleness() throws Exception {
setName(getClass().getSimpleName()).setNumFiles(1).build();
// Create one file.
final String pathString = new String("/testfile");
final String pathString = "/testfile";
final Path path = new Path(pathString);
util.createFile(fs, path, 1024L, replFactor, 1024L);
util.waitReplication(fs, path, replFactor);
@ -1780,7 +1780,7 @@ public void testBlockIdCKCorruption() throws Exception {
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
final String pathString = new String("/testfile");
final String pathString = "/testfile";
final Path path = new Path(pathString);
util.createFile(dfs, path, 1024, repFactor, 1000L);
util.waitReplication(dfs, path, repFactor);
@ -1937,7 +1937,7 @@ public void testFsckWithDecommissionedReplicas() throws Exception {
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
final String testFile = new String("/testfile");
final String testFile = "/testfile";
final Path path = new Path(testFile);
util.createFile(dfs, path, fileSize, replFactor, 1000L);
util.waitReplication(dfs, path, replFactor);
@ -2020,7 +2020,7 @@ public void testFsckWithMaintenanceReplicas() throws Exception {
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
final String testFile = new String("/testfile");
final String testFile = "/testfile";
final Path path = new Path(testFile);
util.createFile(dfs, path, 1024, replFactor, 1000L);
util.waitReplication(dfs, path, replFactor);
@ -2394,7 +2394,7 @@ private void testUpgradeDomain(boolean defineUpgradeDomain,
}
// create files
final String testFile = new String("/testfile");
final String testFile = "/testfile";
final Path path = new Path(testFile);
DFSTestUtil.createFile(dfs, path, fileSize, replFactor, 1000L);
DFSTestUtil.waitReplication(dfs, path, replFactor);

View File

@ -1163,7 +1163,7 @@ public void testFilesInGetListingOps() throws Exception {
HdfsFileStatus.EMPTY_NAME, false);
assertTrue(dl.getPartialListing().length == 3);
String f2 = new String("f2");
String f2 = "f2";
dl = cluster.getNameNodeRpc().getListing("/tmp", f2.getBytes(), false);
assertTrue(dl.getPartialListing().length == 1);

View File

@ -24,6 +24,7 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
@ -245,7 +246,7 @@ public void testFinalState() throws Exception {
*/
private String doGetAndReturnResponseBody() throws IOException {
servlet.doGet(req, resp);
return new String(respOut.toByteArray(), "UTF-8");
return new String(respOut.toByteArray(), StandardCharsets.UTF_8);
}
/**

View File

@ -19,8 +19,8 @@
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
@ -56,8 +56,8 @@ public class TestDFSAdminWithHA {
private static String newLine = System.getProperty("line.separator");
private void assertOutputMatches(String string) {
String errOutput = new String(err.toByteArray(), Charsets.UTF_8);
String output = new String(out.toByteArray(), Charsets.UTF_8);
String errOutput = new String(err.toByteArray(), StandardCharsets.UTF_8);
String output = new String(out.toByteArray(), StandardCharsets.UTF_8);
if (!errOutput.matches(string) && !output.matches(string)) {
fail("Expected output to match '" + string +
@ -70,8 +70,8 @@ private void assertOutputMatches(String string) {
}
private void assertOutputMatches(String outMessage, String errMessage) {
String errOutput = new String(err.toByteArray(), Charsets.UTF_8);
String output = new String(out.toByteArray(), Charsets.UTF_8);
String errOutput = new String(err.toByteArray(), StandardCharsets.UTF_8);
String output = new String(out.toByteArray(), StandardCharsets.UTF_8);
if (!errOutput.matches(errMessage) || !output.matches(outMessage)) {
fail("Expected output to match '" + outMessage + " and " + errMessage +

View File

@ -26,6 +26,7 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -48,7 +49,6 @@
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
public class TestDFSHAAdmin {
@ -435,8 +435,8 @@ private Object runTool(String ... args) throws Exception {
outBytes.reset();
LOG.info("Running: DFSHAAdmin " + Joiner.on(" ").join(args));
int ret = tool.run(args);
errOutput = new String(errOutBytes.toByteArray(), Charsets.UTF_8);
output = new String(outBytes.toByteArray(), Charsets.UTF_8);
errOutput = new String(errOutBytes.toByteArray(), StandardCharsets.UTF_8);
output = new String(outBytes.toByteArray(), StandardCharsets.UTF_8);
LOG.info("Err_output:\n" + errOutput + "\nOutput:\n" + output);
return ret;
}

View File

@ -27,6 +27,7 @@
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -45,7 +46,6 @@
import org.junit.Before;
import org.junit.Test;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import org.apache.hadoop.thirdparty.com.google.common.base.Joiner;
import org.apache.hadoop.thirdparty.com.google.common.io.Files;
@ -232,7 +232,7 @@ public void testFencer() throws Exception {
assertEquals(0, runTool("-ns", "minidfs-ns", "-failover", "nn2", "nn1"));
// Fencer has not run yet, since none of the above required fencing
assertEquals("", Files.asCharSource(tmpFile, Charsets.UTF_8).read());
assertEquals("", Files.asCharSource(tmpFile, StandardCharsets.UTF_8).read());
// Test failover with fencer and forcefence option
assertEquals(0, runTool("-failover", "nn1", "nn2", "--forcefence"));
@ -240,7 +240,7 @@ public void testFencer() throws Exception {
// The fence script should run with the configuration from the target
// node, rather than the configuration from the fencing node. Strip
// out any trailing spaces and CR/LFs which may be present on Windows.
String fenceCommandOutput = Files.asCharSource(tmpFile, Charsets.UTF_8)
String fenceCommandOutput = Files.asCharSource(tmpFile, StandardCharsets.UTF_8)
.read().replaceAll(" *[\r\n]+", "");
assertEquals("minidfs-ns.nn1 " + nn1Port + " nn1", fenceCommandOutput);
tmpFile.delete();
@ -325,7 +325,7 @@ private int runTool(String ... args) throws Exception {
errOutBytes.reset();
LOG.info("Running: DFSHAAdmin " + Joiner.on(" ").join(args));
int ret = tool.run(args);
errOutput = new String(errOutBytes.toByteArray(), Charsets.UTF_8);
errOutput = new String(errOutBytes.toByteArray(), StandardCharsets.UTF_8);
LOG.info("Output:\n" + errOutput);
return ret;
}

View File

@ -27,6 +27,7 @@
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.Map;
@ -341,7 +342,7 @@ public void testLengthParamLongerThanFile() throws IOException {
byte[] respBody = new byte[content.length()];
is = conn.getInputStream();
IOUtils.readFully(is, respBody, 0, content.length());
assertEquals(content, new String(respBody, "US-ASCII"));
assertEquals(content, new String(respBody, StandardCharsets.US_ASCII));
} finally {
IOUtils.closeStream(is);
if (conn != null) {
@ -392,7 +393,7 @@ public void testOffsetPlusLengthParamsLongerThanFile() throws IOException {
byte[] respBody = new byte[content.length() - 1];
is = conn.getInputStream();
IOUtils.readFully(is, respBody, 0, content.length() - 1);
assertEquals(content.substring(1), new String(respBody, "US-ASCII"));
assertEquals(content.substring(1), new String(respBody, StandardCharsets.US_ASCII));
} finally {
IOUtils.closeStream(is);
if (conn != null) {

View File

@ -31,6 +31,7 @@
import java.net.Socket;
import java.net.SocketTimeoutException;
import java.nio.channels.SocketChannel;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -332,7 +333,7 @@ public void run() {
// Write response.
out = clientSocket.getOutputStream();
out.write(temporaryRedirect().getBytes("UTF-8"));
out.write(temporaryRedirect().getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
// Fail the test on any I/O error in the server thread.
LOG.error("unexpected IOException in server thread", e);

View File

@ -39,7 +39,7 @@ public class TestHdfsConfigFields extends TestConfigurationFieldsBase {
@Override
public void initializeMemberVariables() {
xmlFilename = new String("hdfs-default.xml");
xmlFilename = "hdfs-default.xml";
configurationClasses = new Class[] { HdfsClientConfigKeys.class,
HdfsClientConfigKeys.Failover.class,
HdfsClientConfigKeys.StripedRead.class, DFSConfigKeys.class,

View File

@ -28,6 +28,7 @@
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.nio.charset.StandardCharsets;
import java.util.function.Supplier;
import java.io.File;
import java.io.FileInputStream;
@ -2097,7 +2098,7 @@ public static String slurp(File f) throws IOException {
String contents = null;
try {
in.read(buf, 0, len);
contents = new String(buf, "UTF-8");
contents = new String(buf, StandardCharsets.UTF_8);
} finally {
in.close();
}

View File

@ -21,6 +21,7 @@
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.ArrayList;
import java.util.Arrays;
@ -31,7 +32,6 @@
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* <code>JobQueueClient</code> is interface provided to the user in order to get
@ -148,7 +148,7 @@ private void displayQueueList() throws IOException {
JobQueueInfo[] rootQueues = jc.getRootQueues();
for (JobQueueInfo queue : rootQueues) {
printJobQueueInfo(queue, new PrintWriter(new OutputStreamWriter(
System.out, Charsets.UTF_8)));
System.out, StandardCharsets.UTF_8)));
}
}
@ -187,7 +187,7 @@ private void displayQueueInfo(String queue, boolean showJobs)
return;
}
printJobQueueInfo(jobQueueInfo, new PrintWriter(new OutputStreamWriter(
System.out, Charsets.UTF_8)));
System.out, StandardCharsets.UTF_8)));
if (showJobs && (jobQueueInfo.getChildren() == null ||
jobQueueInfo.getChildren().size() == 0)) {
JobStatus[] jobs = jobQueueInfo.getJobStatuses();

View File

@ -27,6 +27,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
@ -56,7 +57,6 @@
import org.apache.log4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* A simple logger to handle the task-specific user logs.
@ -114,7 +114,7 @@ private static LogFileDetail getLogFileDetail(TaskAttemptID taskid,
File indexFile = getIndexFile(taskid, isCleanup);
BufferedReader fis = new BufferedReader(new InputStreamReader(
SecureIOUtils.openForRead(indexFile, obtainLogDirOwner(taskid), null),
Charsets.UTF_8));
StandardCharsets.UTF_8));
//the format of the index file is
//LOG_DIR: <the dir where the task logs are really stored>
//stdout:<start-offset in the stdout file> <length>

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.mapred;
import java.io.*;
import java.nio.charset.StandardCharsets;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -27,7 +28,6 @@
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.*;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
* An {@link InputFormat} for plain text files. Files are broken into lines.
@ -62,7 +62,7 @@ public RecordReader<LongWritable, Text> getRecordReader(
String delimiter = job.get("textinputformat.record.delimiter");
byte[] recordDelimiterBytes = null;
if (null != delimiter) {
recordDelimiterBytes = delimiter.getBytes(Charsets.UTF_8);
recordDelimiterBytes = delimiter.getBytes(StandardCharsets.UTF_8);
}
return new LineRecordReader(job, (FileSplit) genericSplit,
recordDelimiterBytes);

View File

@ -22,6 +22,7 @@
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
@ -63,7 +64,6 @@
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.yarn.api.records.ReservationId;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
@InterfaceAudience.Private
@InterfaceStability.Unstable
@ -409,7 +409,7 @@ private void readTokensFromFiles(Configuration conf, Credentials credentials)
for(Map.Entry<String, String> ent: nm.entrySet()) {
credentials.addSecretKey(new Text(ent.getKey()), ent.getValue()
.getBytes(Charsets.UTF_8));
.getBytes(StandardCharsets.UTF_8));
}
} catch (JsonMappingException | JsonParseException e) {
LOG.warn("couldn't parse Token Cache JSON file with user secret keys");

View File

@ -33,6 +33,7 @@
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import java.util.Map;
@ -72,7 +73,7 @@ public void print(PrintStream ps) throws IOException {
printTaskSummary();
printTasks();
writer = new OutputStreamWriter(ps, "UTF-8");
writer = new OutputStreamWriter(ps, StandardCharsets.UTF_8);
json.write(writer);
writer.flush();
} catch (JSONException je) {

View File

@ -32,7 +32,8 @@
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
import java.nio.charset.StandardCharsets;
/** An {@link InputFormat} for plain text files. Files are broken into lines.
* Either linefeed or carriage-return are used to signal end of line. Keys are
@ -49,7 +50,7 @@ public class TextInputFormat extends FileInputFormat<LongWritable, Text> {
"textinputformat.record.delimiter");
byte[] recordDelimiterBytes = null;
if (null != delimiter)
recordDelimiterBytes = delimiter.getBytes(Charsets.UTF_8);
recordDelimiterBytes = delimiter.getBytes(StandardCharsets.UTF_8);
return new LineRecordReader(recordDelimiterBytes);
}

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapreduce.lib.partition;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
@ -90,12 +90,7 @@ public int getPartition(K2 key, V2 value, int numReduceTasks) {
return getPartition(key.toString().hashCode(), numReduceTasks);
}
try {
keyBytes = key.toString().getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("The current system does not " +
"support UTF-8 encoding!", e);
}
keyBytes = key.toString().getBytes(StandardCharsets.UTF_8);
// return 0 if the key is empty
if (keyBytes.length == 0) {
return 0;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapreduce.lib.partition;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.ArrayList;
import java.util.StringTokenizer;
@ -61,13 +61,8 @@ public String toString() {
private boolean keySpecSeen = false;
public void setKeyFieldSeparator(String keyFieldSeparator) {
try {
this.keyFieldSeparator =
keyFieldSeparator.getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("The current system does not " +
"support UTF-8 encoding!", e);
}
keyFieldSeparator.getBytes(StandardCharsets.UTF_8);
}
/** Required for backcompatibility with num.key.fields.for.partition in

View File

@ -23,6 +23,7 @@
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import javax.crypto.SecretKey;
import javax.servlet.http.HttpServletRequest;
@ -34,7 +35,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
/**
*
@ -56,7 +56,7 @@ public class SecureShuffleUtils {
*/
public static String generateHash(byte[] msg, SecretKey key) {
return new String(Base64.encodeBase64(generateByteHash(msg, key)),
Charsets.UTF_8);
StandardCharsets.UTF_8);
}
/**
@ -70,7 +70,6 @@ private static byte[] generateByteHash(byte[] msg, SecretKey key) {
/**
* verify that hash equals to HMacHash(msg)
* @param newHash
* @return true if is the same
*/
private static boolean verifyHash(byte[] hash, byte[] msg, SecretKey key) {
@ -87,7 +86,7 @@ private static boolean verifyHash(byte[] hash, byte[] msg, SecretKey key) {
*/
public static String hashFromString(String enc_str, SecretKey key)
throws IOException {
return generateHash(enc_str.getBytes(Charsets.UTF_8), key);
return generateHash(enc_str.getBytes(StandardCharsets.UTF_8), key);
}
/**
@ -98,9 +97,9 @@ public static String hashFromString(String enc_str, SecretKey key)
*/
public static void verifyReply(String base64Hash, String msg, SecretKey key)
throws IOException {
byte[] hash = Base64.decodeBase64(base64Hash.getBytes(Charsets.UTF_8));
byte[] hash = Base64.decodeBase64(base64Hash.getBytes(StandardCharsets.UTF_8));
boolean res = verifyHash(hash, msg.getBytes(Charsets.UTF_8), key);
boolean res = verifyHash(hash, msg.getBytes(StandardCharsets.UTF_8), key);
if(res != true) {
throw new IOException("Verification of the hashReply failed");
@ -148,7 +147,7 @@ public static String toHex(byte[] ba) {
for (byte b : ba) {
ps.printf("%x", b);
}
strHex = baos.toString("UTF-8");
strHex = new String(baos.toByteArray(), StandardCharsets.UTF_8);
} catch (UnsupportedEncodingException e) {
}
return strHex;

Some files were not shown because too many files have changed in this diff Show More