HADOOP-19041. Use StandardCharsets in more places (#6449)
This commit is contained in:
parent
347521c95d
commit
97c5a6efba
@ -21,12 +21,11 @@
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.IllegalCharsetNameException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
@ -426,12 +425,8 @@ DER get(int... tags) {
|
||||
}
|
||||
|
||||
String getAsString() {
|
||||
try {
|
||||
return new String(bb.array(), bb.arrayOffset() + bb.position(),
|
||||
bb.remaining(), "UTF-8");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new IllegalCharsetNameException("UTF-8"); // won't happen.
|
||||
}
|
||||
bb.remaining(), StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -3565,7 +3565,7 @@ private void checkForOverride(Properties properties, String name, String attr, S
|
||||
* @throws IOException raised on errors performing I/O.
|
||||
*/
|
||||
public void writeXml(OutputStream out) throws IOException {
|
||||
writeXml(new OutputStreamWriter(out, "UTF-8"));
|
||||
writeXml(new OutputStreamWriter(out, StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
public void writeXml(Writer out) throws IOException {
|
||||
|
@ -18,6 +18,7 @@
|
||||
package org.apache.hadoop.fs;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.commons.codec.DecoderException;
|
||||
import org.apache.commons.codec.binary.Base64;
|
||||
@ -76,7 +77,7 @@ public static byte[] decodeValue(String value) throws IOException {
|
||||
String en = value.substring(0, 2);
|
||||
if (value.startsWith("\"") && value.endsWith("\"")) {
|
||||
value = value.substring(1, value.length()-1);
|
||||
result = value.getBytes("utf-8");
|
||||
result = value.getBytes(StandardCharsets.UTF_8);
|
||||
} else if (en.equalsIgnoreCase(HEX_PREFIX)) {
|
||||
value = value.substring(2, value.length());
|
||||
try {
|
||||
@ -90,7 +91,7 @@ public static byte[] decodeValue(String value) throws IOException {
|
||||
}
|
||||
}
|
||||
if (result == null) {
|
||||
result = value.getBytes("utf-8");
|
||||
result = value.getBytes(StandardCharsets.UTF_8);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
@ -114,7 +115,7 @@ public static String encodeValue(byte[] value, XAttrCodec encoding)
|
||||
} else if (encoding == BASE64) {
|
||||
return BASE64_PREFIX + base64.encodeToString(value);
|
||||
} else {
|
||||
return "\"" + new String(value, "utf-8") + "\"";
|
||||
return "\"" + new String(value, StandardCharsets.UTF_8) + "\"";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -387,7 +387,7 @@ public void testMultiByteCharacters() throws IOException {
|
||||
String name = "multi_byte_\u611b_name";
|
||||
String value = "multi_byte_\u0641_value";
|
||||
out = new BufferedWriter(new OutputStreamWriter(
|
||||
new FileOutputStream(CONFIG_MULTI_BYTE), "UTF-8"));
|
||||
new FileOutputStream(CONFIG_MULTI_BYTE), StandardCharsets.UTF_8));
|
||||
startConfig();
|
||||
declareProperty(name, value, value);
|
||||
endConfig();
|
||||
|
@ -32,6 +32,7 @@
|
||||
|
||||
import java.io.*;
|
||||
import java.net.URI;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.EnumSet;
|
||||
@ -673,7 +674,7 @@ public void testFSOutputStreamBuilder() throws Exception {
|
||||
fileSys.createFile(path).recursive();
|
||||
FSDataOutputStream out = builder.build();
|
||||
String content = "Create with a generic type of createFile!";
|
||||
byte[] contentOrigin = content.getBytes("UTF8");
|
||||
byte[] contentOrigin = content.getBytes(StandardCharsets.UTF_8);
|
||||
out.write(contentOrigin);
|
||||
out.close();
|
||||
|
||||
|
@ -106,7 +106,7 @@ static URL createURL(Path path, Map<String, String> params, Map<String,
|
||||
if (multiValuedParams != null) {
|
||||
for (Map.Entry<String, List<String>> multiValuedEntry :
|
||||
multiValuedParams.entrySet()) {
|
||||
String name = URLEncoder.encode(multiValuedEntry.getKey(), "UTF8");
|
||||
String name = URLEncoder.encode(multiValuedEntry.getKey(), "UTF-8");
|
||||
List<String> values = multiValuedEntry.getValue();
|
||||
for (String value : values) {
|
||||
sb.append(separator).append(name).append("=").
|
||||
|
@ -26,6 +26,7 @@
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.Iterator;
|
||||
@ -148,7 +149,7 @@ TextWriter createWriter(Path file, CompressionCodec codec, String delim,
|
||||
}
|
||||
OutputStream tmp = fs.create(file);
|
||||
java.io.Writer out = new BufferedWriter(new OutputStreamWriter(
|
||||
(null == codec) ? tmp : codec.createOutputStream(tmp), "UTF-8"));
|
||||
(null == codec) ? tmp : codec.createOutputStream(tmp), StandardCharsets.UTF_8));
|
||||
return new TextWriter(out, delim);
|
||||
}
|
||||
|
||||
@ -379,7 +380,7 @@ public Iterator<FileRegion> iterator() {
|
||||
FRIterator i = new FRIterator();
|
||||
try {
|
||||
BufferedReader r =
|
||||
new BufferedReader(new InputStreamReader(createStream(), "UTF-8"));
|
||||
new BufferedReader(new InputStreamReader(createStream(), StandardCharsets.UTF_8));
|
||||
iterators.put(i, r);
|
||||
i.pending = nextInternal(i);
|
||||
} catch (IOException e) {
|
||||
|
@ -29,6 +29,7 @@
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Arrays;
|
||||
import java.util.Queue;
|
||||
@ -1063,7 +1064,7 @@ void receiveBlock(
|
||||
// send a special ack upstream.
|
||||
if (datanode.isRestarting() && isClient && !isTransfer) {
|
||||
try (Writer out = new OutputStreamWriter(
|
||||
replicaInfo.createRestartMetaStream(), "UTF-8")) {
|
||||
replicaInfo.createRestartMetaStream(), StandardCharsets.UTF_8)) {
|
||||
// write out the current time.
|
||||
out.write(Long.toString(Time.now() + restartBudget));
|
||||
out.flush();
|
||||
|
@ -28,6 +28,7 @@
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
@ -399,7 +400,7 @@ void saveDfsUsed() {
|
||||
try {
|
||||
long used = getDfsUsed();
|
||||
try (Writer out = new OutputStreamWriter(
|
||||
Files.newOutputStream(outFile.toPath()), "UTF-8")) {
|
||||
Files.newOutputStream(outFile.toPath()), StandardCharsets.UTF_8)) {
|
||||
// mtime is written last, so that truncated writes won't be valid.
|
||||
out.write(Long.toString(used) + " " + Long.toString(timer.now()));
|
||||
// This is only called as part of the volume shutdown.
|
||||
|
@ -27,6 +27,7 @@
|
||||
import java.io.RandomAccessFile;
|
||||
import java.net.URI;
|
||||
import java.nio.channels.ClosedChannelException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.Collection;
|
||||
@ -929,7 +930,7 @@ public void save() throws IOException {
|
||||
boolean success = false;
|
||||
try (BufferedWriter writer = new BufferedWriter(
|
||||
new OutputStreamWriter(fileIoProvider.getFileOutputStream(
|
||||
FsVolumeImpl.this, getTempSaveFile()), "UTF-8"))) {
|
||||
FsVolumeImpl.this, getTempSaveFile()), StandardCharsets.UTF_8))) {
|
||||
WRITER.writeValue(writer, state);
|
||||
success = true;
|
||||
} finally {
|
||||
|
@ -66,6 +66,7 @@
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.URI;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.DateFormat;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
@ -740,7 +741,7 @@ private static Options buildCliOptions() {
|
||||
private static String[] readPathFile(String file) throws IOException {
|
||||
List<String> list = Lists.newArrayList();
|
||||
BufferedReader reader = new BufferedReader(
|
||||
new InputStreamReader(new FileInputStream(file), "UTF-8"));
|
||||
new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8));
|
||||
try {
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
|
@ -26,6 +26,7 @@
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@ -207,7 +208,7 @@ private Integer listCorruptFileBlocks(String dir, String baseUrl)
|
||||
}
|
||||
InputStream stream = connection.getInputStream();
|
||||
BufferedReader input = new BufferedReader(new InputStreamReader(
|
||||
stream, "UTF-8"));
|
||||
stream, StandardCharsets.UTF_8));
|
||||
try {
|
||||
String line = null;
|
||||
while ((line = input.readLine()) != null) {
|
||||
@ -376,7 +377,7 @@ else if (args[idx].equals("-replicaDetails")) {
|
||||
}
|
||||
InputStream stream = connection.getInputStream();
|
||||
BufferedReader input = new BufferedReader(new InputStreamReader(
|
||||
stream, "UTF-8"));
|
||||
stream, StandardCharsets.UTF_8));
|
||||
String line = null;
|
||||
String lastLine = NamenodeFsck.CORRUPT_STATUS;
|
||||
int errCode = -1;
|
||||
|
@ -622,7 +622,7 @@ private INodeSection.INode.Builder processINodeXml(Node node)
|
||||
inodeBld.setId(id);
|
||||
String name = node.removeChildStr(SECTION_NAME);
|
||||
if (name != null) {
|
||||
inodeBld.setName(ByteString.copyFrom(name, "UTF8"));
|
||||
inodeBld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
|
||||
}
|
||||
switch (type) {
|
||||
case "FILE":
|
||||
@ -838,7 +838,7 @@ private void processSymlinkXml(Node node,
|
||||
}
|
||||
String target = node.removeChildStr(INODE_SECTION_TARGET);
|
||||
if (target != null) {
|
||||
bld.setTarget(ByteString.copyFrom(target, "UTF8"));
|
||||
bld.setTarget(ByteString.copyFrom(target, StandardCharsets.UTF_8));
|
||||
}
|
||||
Long lval = node.removeChildLong(INODE_SECTION_MTIME);
|
||||
if (lval != null) {
|
||||
@ -900,7 +900,7 @@ private INodeSection.XAttrFeatureProto.Builder xattrsXmlToProto(Node xattrs)
|
||||
}
|
||||
val = new HexBinaryAdapter().unmarshal(valHex);
|
||||
} else {
|
||||
val = valStr.getBytes("UTF8");
|
||||
val = valStr.getBytes(StandardCharsets.UTF_8);
|
||||
}
|
||||
b.setValue(ByteString.copyFrom(val));
|
||||
|
||||
@ -1232,7 +1232,7 @@ public void process() throws IOException {
|
||||
}
|
||||
String name = inodeRef.removeChildStr("name");
|
||||
if (name != null) {
|
||||
bld.setName(ByteString.copyFrom(name, "UTF8"));
|
||||
bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
|
||||
}
|
||||
Integer dstSnapshotId = inodeRef.removeChildInt(
|
||||
INODE_REFERENCE_SECTION_DST_SNAPSHOT_ID);
|
||||
@ -1468,7 +1468,7 @@ private void processDirDiffEntry() throws IOException {
|
||||
bld.setChildrenSize(childrenSize);
|
||||
String name = dirDiff.removeChildStr(SECTION_NAME);
|
||||
if (name != null) {
|
||||
bld.setName(ByteString.copyFrom(name, "UTF8"));
|
||||
bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
|
||||
}
|
||||
Node snapshotCopy = dirDiff.removeChild(
|
||||
SNAPSHOT_DIFF_SECTION_SNAPSHOT_COPY);
|
||||
@ -1514,7 +1514,7 @@ private void processDirDiffEntry() throws IOException {
|
||||
}
|
||||
created.verifyNoRemainingKeys("created");
|
||||
FsImageProto.SnapshotDiffSection.CreatedListEntry.newBuilder().
|
||||
setName(ByteString.copyFrom(cleName, "UTF8")).
|
||||
setName(ByteString.copyFrom(cleName, StandardCharsets.UTF_8)).
|
||||
build().writeDelimitedTo(out);
|
||||
actualCreatedListSize++;
|
||||
}
|
||||
@ -1571,7 +1571,7 @@ private void processFileDiffEntry() throws IOException {
|
||||
}
|
||||
String name = fileDiff.removeChildStr(SECTION_NAME);
|
||||
if (name != null) {
|
||||
bld.setName(ByteString.copyFrom(name, "UTF8"));
|
||||
bld.setName(ByteString.copyFrom(name, StandardCharsets.UTF_8));
|
||||
}
|
||||
Node snapshotCopy = fileDiff.removeChild(
|
||||
SNAPSHOT_DIFF_SECTION_SNAPSHOT_COPY);
|
||||
|
@ -18,6 +18,7 @@
|
||||
package org.apache.hadoop.hdfs;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.Permission;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.text.SimpleDateFormat;
|
||||
@ -1917,7 +1918,7 @@ private static void corrupt(
|
||||
char c = content.charAt(0);
|
||||
sb.setCharAt(0, ++c);
|
||||
for(MaterializedReplica replica : replicas) {
|
||||
replica.corruptData(sb.toString().getBytes("UTF8"));
|
||||
replica.corruptData(sb.toString().getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -45,6 +45,7 @@
|
||||
import java.net.ServerSocket;
|
||||
import java.net.SocketTimeoutException;
|
||||
import java.net.URI;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.ArrayList;
|
||||
@ -1885,7 +1886,7 @@ public void testDFSDataOutputStreamBuilderForCreation() throws Exception {
|
||||
.replication((short) 1)
|
||||
.blockSize(4096)
|
||||
.build()) {
|
||||
byte[] contentOrigin = content.getBytes("UTF8");
|
||||
byte[] contentOrigin = content.getBytes(StandardCharsets.UTF_8);
|
||||
out1.write(contentOrigin);
|
||||
}
|
||||
|
||||
|
@ -194,10 +194,10 @@ private static void writeFile(FileSystem fs, Path path, byte[] buffer,
|
||||
*/
|
||||
public static Configuration initializeStorageStateConf(int numDirs,
|
||||
Configuration conf) {
|
||||
StringBuffer nameNodeDirs =
|
||||
new StringBuffer(new File(TEST_ROOT_DIR, "name1").toString());
|
||||
StringBuffer dataNodeDirs =
|
||||
new StringBuffer(new File(TEST_ROOT_DIR, "data1").toString());
|
||||
StringBuilder nameNodeDirs =
|
||||
new StringBuilder(new File(TEST_ROOT_DIR, "name1").toString());
|
||||
StringBuilder dataNodeDirs =
|
||||
new StringBuilder(new File(TEST_ROOT_DIR, "data1").toString());
|
||||
for (int i = 2; i <= numDirs; i++) {
|
||||
nameNodeDirs.append("," + new File(TEST_ROOT_DIR, "name"+i));
|
||||
dataNodeDirs.append("," + new File(TEST_ROOT_DIR, "data"+i));
|
||||
|
@ -33,6 +33,7 @@
|
||||
import java.io.Writer;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
@ -147,7 +148,7 @@ public FileRegion next() {
|
||||
newFile.getAbsolutePath());
|
||||
newFile.createNewFile();
|
||||
Writer writer = new OutputStreamWriter(
|
||||
new FileOutputStream(newFile.getAbsolutePath()), "utf-8");
|
||||
new FileOutputStream(newFile.getAbsolutePath()), StandardCharsets.UTF_8);
|
||||
for(int i=0; i< BLK_LEN/(Integer.SIZE/8); i++) {
|
||||
writer.write(currentCount);
|
||||
}
|
||||
|
@ -39,6 +39,7 @@
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.function.LongFunction;
|
||||
|
||||
@ -433,7 +434,7 @@ private static INodeDirectory createINodeDirectory(INodeDirectory parent,
|
||||
PermissionStatus permStatus = PermissionStatus.createImmutable(owner, group,
|
||||
FsPermission.createImmutable(perm));
|
||||
INodeDirectory inodeDirectory = new INodeDirectory(
|
||||
HdfsConstants.GRANDFATHER_INODE_ID, name.getBytes("UTF-8"), permStatus, 0L);
|
||||
HdfsConstants.GRANDFATHER_INODE_ID, name.getBytes(StandardCharsets.UTF_8), permStatus, 0L);
|
||||
parent.addChild(inodeDirectory);
|
||||
return inodeDirectory;
|
||||
}
|
||||
@ -443,8 +444,8 @@ private static INodeFile createINodeFile(INodeDirectory parent, String name,
|
||||
PermissionStatus permStatus = PermissionStatus.createImmutable(owner, group,
|
||||
FsPermission.createImmutable(perm));
|
||||
INodeFile inodeFile = new INodeFile(HdfsConstants.GRANDFATHER_INODE_ID,
|
||||
name.getBytes("UTF-8"), permStatus, 0L, 0L, null, REPLICATION,
|
||||
PREFERRED_BLOCK_SIZE);
|
||||
name.getBytes(StandardCharsets.UTF_8), permStatus, 0L, 0L, null,
|
||||
REPLICATION, PREFERRED_BLOCK_SIZE);
|
||||
parent.addChild(inodeFile);
|
||||
return inodeFile;
|
||||
}
|
||||
|
@ -316,7 +316,7 @@ public void testLengthParamLongerThanFile() throws IOException {
|
||||
String content = "testLengthParamLongerThanFile";
|
||||
FSDataOutputStream testFileOut = webhdfs.create(testFile);
|
||||
try {
|
||||
testFileOut.write(content.getBytes("US-ASCII"));
|
||||
testFileOut.write(content.getBytes(StandardCharsets.US_ASCII));
|
||||
} finally {
|
||||
IOUtils.closeStream(testFileOut);
|
||||
}
|
||||
@ -366,7 +366,7 @@ public void testOffsetPlusLengthParamsLongerThanFile() throws IOException {
|
||||
String content = "testOffsetPlusLengthParamsLongerThanFile";
|
||||
FSDataOutputStream testFileOut = webhdfs.create(testFile);
|
||||
try {
|
||||
testFileOut.write(content.getBytes("US-ASCII"));
|
||||
testFileOut.write(content.getBytes(StandardCharsets.US_ASCII));
|
||||
} finally {
|
||||
IOUtils.closeStream(testFileOut);
|
||||
}
|
||||
|
@ -210,7 +210,7 @@ protected void doUnregistration()
|
||||
|| jobImpl.getInternalState() == JobStateInternal.ERROR) {
|
||||
finishState = FinalApplicationStatus.FAILED;
|
||||
}
|
||||
StringBuffer sb = new StringBuffer();
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (String s : job.getDiagnostics()) {
|
||||
sb.append(s).append("\n");
|
||||
}
|
||||
|
@ -120,7 +120,7 @@ public JobInfo(Job job, Boolean hasAccess) {
|
||||
|
||||
List<String> diagnostics = job.getDiagnostics();
|
||||
if (diagnostics != null && !diagnostics.isEmpty()) {
|
||||
StringBuffer b = new StringBuffer();
|
||||
StringBuilder b = new StringBuilder();
|
||||
for (String diag : diagnostics) {
|
||||
b.append(diag);
|
||||
}
|
||||
|
@ -32,6 +32,7 @@
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Map;
|
||||
import java.util.jar.JarOutputStream;
|
||||
import java.util.zip.ZipEntry;
|
||||
@ -339,12 +340,12 @@ private Path makeJar(Path p, int index) throws FileNotFoundException,
|
||||
IOException {
|
||||
FileOutputStream fos =
|
||||
new FileOutputStream(new File(p.toUri().getPath()));
|
||||
JarOutputStream jos = new JarOutputStream(fos);
|
||||
try (JarOutputStream jos = new JarOutputStream(fos)) {
|
||||
ZipEntry ze = new ZipEntry("distributed.jar.inside" + index);
|
||||
jos.putNextEntry(ze);
|
||||
jos.write(("inside the jar!" + index).getBytes());
|
||||
jos.closeEntry();
|
||||
jos.close();
|
||||
}
|
||||
localFs.setPermission(p, new FsPermission("700"));
|
||||
return p;
|
||||
}
|
||||
@ -354,12 +355,12 @@ private Path makeArchive(String archiveFile, String filename)
|
||||
Path archive = new Path(testRootDir, archiveFile);
|
||||
Path file = new Path(testRootDir, filename);
|
||||
DataOutputStream out = localFs.create(archive);
|
||||
ZipOutputStream zos = new ZipOutputStream(out);
|
||||
try (ZipOutputStream zos = new ZipOutputStream(out)) {
|
||||
ZipEntry ze = new ZipEntry(file.toString());
|
||||
zos.putNextEntry(ze);
|
||||
zos.write(input.getBytes("UTF-8"));
|
||||
zos.write(input.getBytes(StandardCharsets.UTF_8));
|
||||
zos.closeEntry();
|
||||
zos.close();
|
||||
}
|
||||
return archive;
|
||||
}
|
||||
}
|
||||
|
@ -92,7 +92,7 @@ public void after() {
|
||||
|
||||
private static LineReader makeStream(String str) throws IOException {
|
||||
return new LineReader(new ByteArrayInputStream(
|
||||
str.getBytes("UTF-8")), defaultConf);
|
||||
str.getBytes(StandardCharsets.UTF_8)), defaultConf);
|
||||
}
|
||||
|
||||
private static void writeFile(FileSystem fs, Path name,
|
||||
|
@ -26,6 +26,7 @@
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.NumberFormat;
|
||||
|
||||
public class TestFieldSelection {
|
||||
@ -60,7 +61,7 @@ public static void launch() throws Exception {
|
||||
TestMRFieldSelection.constructInputOutputData(inputData,
|
||||
expectedOutput, numOfInputLines);
|
||||
FSDataOutputStream fileOut = fs.create(new Path(INPUT_DIR, inputFile));
|
||||
fileOut.write(inputData.toString().getBytes("utf-8"));
|
||||
fileOut.write(inputData.toString().getBytes(StandardCharsets.UTF_8));
|
||||
fileOut.close();
|
||||
|
||||
System.out.println("inputData:");
|
||||
|
@ -30,6 +30,7 @@
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
||||
@ -134,9 +135,7 @@ public void testFormat() throws Exception {
|
||||
}
|
||||
}
|
||||
private LineReader makeStream(String str) throws IOException {
|
||||
return new LineReader(new ByteArrayInputStream
|
||||
(str.getBytes("UTF-8")),
|
||||
defaultConf);
|
||||
return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), defaultConf);
|
||||
}
|
||||
@Test
|
||||
public void testUTF8() throws Exception {
|
||||
|
@ -42,6 +42,7 @@
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TestTextInputFormat {
|
||||
@ -330,14 +331,10 @@ private void verifyPartitions(int length, int numSplits, Path file,
|
||||
}
|
||||
|
||||
private static LineReader makeStream(String str) throws IOException {
|
||||
return new LineReader(new ByteArrayInputStream
|
||||
(str.getBytes("UTF-8")),
|
||||
defaultConf);
|
||||
return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), defaultConf);
|
||||
}
|
||||
private static LineReader makeStream(String str, int bufsz) throws IOException {
|
||||
return new LineReader(new ByteArrayInputStream
|
||||
(str.getBytes("UTF-8")),
|
||||
bufsz);
|
||||
return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), bufsz);
|
||||
}
|
||||
|
||||
@Test (timeout=5000)
|
||||
|
@ -26,6 +26,7 @@
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
import java.text.NumberFormat;
|
||||
|
||||
@ -55,8 +56,8 @@ public static void launch() throws Exception {
|
||||
fs.mkdirs(INPUT_DIR);
|
||||
fs.delete(OUTPUT_DIR, true);
|
||||
|
||||
StringBuffer inputData = new StringBuffer();
|
||||
StringBuffer expectedOutput = new StringBuffer();
|
||||
StringBuilder inputData = new StringBuilder();
|
||||
StringBuilder expectedOutput = new StringBuilder();
|
||||
expectedOutput.append("max\t19\n");
|
||||
expectedOutput.append("min\t1\n");
|
||||
|
||||
@ -76,7 +77,7 @@ public static void launch() throws Exception {
|
||||
expectedOutput.append("uniq_count\t15\n");
|
||||
|
||||
|
||||
fileOut.write(inputData.toString().getBytes("utf-8"));
|
||||
fileOut.write(inputData.toString().getBytes(StandardCharsets.UTF_8));
|
||||
fileOut.close();
|
||||
|
||||
System.out.println("inputData:");
|
||||
|
@ -30,6 +30,7 @@
|
||||
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.NumberFormat;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
@ -61,8 +62,8 @@ public static void launch() throws Exception {
|
||||
fs.mkdirs(INPUT_DIR);
|
||||
fs.delete(OUTPUT_DIR, true);
|
||||
|
||||
StringBuffer inputData = new StringBuffer();
|
||||
StringBuffer expectedOutput = new StringBuffer();
|
||||
StringBuilder inputData = new StringBuilder();
|
||||
StringBuilder expectedOutput = new StringBuilder();
|
||||
expectedOutput.append("max\t19\n");
|
||||
expectedOutput.append("min\t1\n");
|
||||
|
||||
@ -82,7 +83,7 @@ public static void launch() throws Exception {
|
||||
expectedOutput.append("uniq_count\t15\n");
|
||||
|
||||
|
||||
fileOut.write(inputData.toString().getBytes("utf-8"));
|
||||
fileOut.write(inputData.toString().getBytes(StandardCharsets.UTF_8));
|
||||
fileOut.close();
|
||||
|
||||
System.out.println("inputData:");
|
||||
|
@ -45,6 +45,7 @@
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TestMRKeyValueTextInputFormat {
|
||||
@ -253,9 +254,7 @@ public void testSplitableCodecs() throws Exception {
|
||||
}
|
||||
|
||||
private LineReader makeStream(String str) throws IOException {
|
||||
return new LineReader(new ByteArrayInputStream
|
||||
(str.getBytes("UTF-8")),
|
||||
defaultConf);
|
||||
return new LineReader(new ByteArrayInputStream(str.getBytes(UTF_8)), defaultConf);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -14,6 +14,7 @@
|
||||
|
||||
package org.apache.hadoop.maven.plugin.cmakebuilder;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Locale;
|
||||
import org.apache.hadoop.maven.plugin.util.Exec;
|
||||
import org.apache.maven.execution.MavenSession;
|
||||
@ -165,7 +166,7 @@ private void writeStatusFile(String status) throws IOException {
|
||||
testName + ".pstatus"));
|
||||
BufferedWriter out = null;
|
||||
try {
|
||||
out = new BufferedWriter(new OutputStreamWriter(fos, "UTF8"));
|
||||
out = new BufferedWriter(new OutputStreamWriter(fos, StandardCharsets.UTF_8));
|
||||
out.write(status + "\n");
|
||||
} finally {
|
||||
if (out != null) {
|
||||
|
@ -691,7 +691,7 @@ public void map(LongWritable key, HarEntry value,
|
||||
if (value.isDir()) {
|
||||
towrite = encodeName(relPath.toString())
|
||||
+ " dir " + propStr + " 0 0 ";
|
||||
StringBuffer sbuff = new StringBuffer();
|
||||
StringBuilder sbuff = new StringBuilder();
|
||||
sbuff.append(towrite);
|
||||
for (String child: value.children) {
|
||||
sbuff.append(encodeName(child) + " ");
|
||||
|
@ -24,6 +24,7 @@
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.net.URLDecoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
@ -98,12 +99,7 @@ public void signRequest(HttpURLConnection connection, final long contentLength)
|
||||
}
|
||||
|
||||
private String computeHmac256(final String stringToSign) {
|
||||
byte[] utf8Bytes;
|
||||
try {
|
||||
utf8Bytes = stringToSign.getBytes(AbfsHttpConstants.UTF_8);
|
||||
} catch (final UnsupportedEncodingException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
byte[] utf8Bytes = stringToSign.getBytes(StandardCharsets.UTF_8);
|
||||
byte[] hmac;
|
||||
synchronized (this) {
|
||||
hmac = hmacSha256.doFinal(utf8Bytes);
|
||||
|
@ -390,7 +390,7 @@ public void testProperties() throws Exception {
|
||||
fs.create(reqPath).close();
|
||||
|
||||
final String propertyName = "user.mime_type";
|
||||
final byte[] propertyValue = "text/plain".getBytes("utf-8");
|
||||
final byte[] propertyValue = "text/plain".getBytes(StandardCharsets.UTF_8);
|
||||
fs.setXAttr(reqPath, propertyName, propertyValue);
|
||||
|
||||
assertArrayEquals(propertyValue, fs.getXAttr(reqPath, propertyName));
|
||||
|
@ -28,6 +28,7 @@
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.Channels;
|
||||
import java.nio.channels.ReadableByteChannel;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
@ -180,7 +181,7 @@ public void setSeed() throws Exception {
|
||||
LOG.info("Creating " + newFile.toString());
|
||||
newFile.createNewFile();
|
||||
Writer writer = new OutputStreamWriter(
|
||||
new FileOutputStream(newFile.getAbsolutePath()), "utf-8");
|
||||
new FileOutputStream(newFile.getAbsolutePath()), StandardCharsets.UTF_8);
|
||||
for(int j=0; j < baseFileLen*i; j++) {
|
||||
writer.write("0");
|
||||
}
|
||||
|
@ -222,7 +222,7 @@ public String toString() {
|
||||
// Gets the stringified version of DataStatistics
|
||||
static String stringifyDataStatistics(DataStatistics stats) {
|
||||
if (stats != null) {
|
||||
StringBuffer buffer = new StringBuffer();
|
||||
StringBuilder buffer = new StringBuilder();
|
||||
String compressionStatus = stats.isDataCompressed()
|
||||
? "Compressed"
|
||||
: "Uncompressed";
|
||||
|
@ -128,7 +128,7 @@ static URL qualifyHost(URL url) {
|
||||
static final String regexpSpecials = "[]()?*+|.!^-\\~@";
|
||||
|
||||
public static String regexpEscape(String plain) {
|
||||
StringBuffer buf = new StringBuffer();
|
||||
StringBuilder buf = new StringBuilder();
|
||||
char[] ch = plain.toCharArray();
|
||||
int csup = ch.length;
|
||||
for (int c = 0; c < csup; c++) {
|
||||
|
@ -19,11 +19,8 @@
|
||||
package org.apache.hadoop.streaming;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner;
|
||||
|
||||
import org.junit.Test;
|
||||
@ -59,7 +56,7 @@ protected void createInput() throws IOException
|
||||
{
|
||||
DataOutputStream out = new DataOutputStream(
|
||||
new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
|
||||
out.write(input.getBytes("UTF-8"));
|
||||
out.write(input.getBytes(StandardCharsets.UTF_8));
|
||||
out.close();
|
||||
}
|
||||
|
||||
|
@ -19,11 +19,8 @@
|
||||
package org.apache.hadoop.streaming;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import org.junit.Test;
|
||||
@ -56,7 +53,7 @@ protected void createInput() throws IOException
|
||||
{
|
||||
DataOutputStream out = new DataOutputStream(
|
||||
new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
|
||||
out.write(input.getBytes("UTF-8"));
|
||||
out.write(input.getBytes(StandardCharsets.UTF_8));
|
||||
out.close();
|
||||
}
|
||||
|
||||
|
@ -21,6 +21,7 @@
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
/**
|
||||
* This class tests StreamXmlRecordReader
|
||||
@ -44,9 +45,9 @@ protected void createInput() throws IOException
|
||||
FileOutputStream out = new FileOutputStream(INPUT_FILE.getAbsoluteFile());
|
||||
String dummyXmlStartTag = "<PATTERN>\n";
|
||||
String dummyXmlEndTag = "</PATTERN>\n";
|
||||
out.write(dummyXmlStartTag.getBytes("UTF-8"));
|
||||
out.write(input.getBytes("UTF-8"));
|
||||
out.write(dummyXmlEndTag.getBytes("UTF-8"));
|
||||
out.write(dummyXmlStartTag.getBytes(StandardCharsets.UTF_8));
|
||||
out.write(input.getBytes(StandardCharsets.UTF_8));
|
||||
out.write(dummyXmlEndTag.getBytes(StandardCharsets.UTF_8));
|
||||
out.close();
|
||||
}
|
||||
|
||||
|
@ -19,6 +19,7 @@
|
||||
package org.apache.hadoop.streaming;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
@ -120,7 +121,7 @@ protected void createInput() throws IOException
|
||||
{
|
||||
DataOutputStream out = getFileSystem().create(new Path(
|
||||
INPUT_FILE.getPath()));
|
||||
out.write(getInputData().getBytes("UTF-8"));
|
||||
out.write(getInputData().getBytes(StandardCharsets.UTF_8));
|
||||
out.close();
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,7 @@
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||
@ -68,7 +69,7 @@ protected void createInput() throws IOException
|
||||
{
|
||||
DataOutputStream out = new DataOutputStream(
|
||||
new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
|
||||
out.write(input.getBytes("UTF-8"));
|
||||
out.write(input.getBytes(StandardCharsets.UTF_8));
|
||||
out.close();
|
||||
}
|
||||
|
||||
|
@ -22,11 +22,8 @@
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
||||
/**
|
||||
* This class tests hadoopStreaming with customized separator in MapReduce local mode.
|
||||
@ -64,7 +61,7 @@ protected void createInput() throws IOException
|
||||
{
|
||||
DataOutputStream out = new DataOutputStream(
|
||||
new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
|
||||
out.write(input.getBytes("UTF-8"));
|
||||
out.write(input.getBytes(StandardCharsets.UTF_8));
|
||||
out.close();
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,7 @@
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
@ -48,7 +49,7 @@ public TestTypedBytesStreaming() throws IOException {
|
||||
|
||||
protected void createInput() throws IOException {
|
||||
DataOutputStream out = new DataOutputStream(new FileOutputStream(INPUT_FILE.getAbsoluteFile()));
|
||||
out.write(input.getBytes("UTF-8"));
|
||||
out.write(input.getBytes(StandardCharsets.UTF_8));
|
||||
out.close();
|
||||
}
|
||||
|
||||
|
@ -101,9 +101,9 @@ public void createInput() throws IOException {
|
||||
FileOutputStream out = new FileOutputStream(INPUT_FILE.getAbsoluteFile());
|
||||
String dummyXmlStartTag = "<PATTERN>\n";
|
||||
String dummyXmlEndTag = "</PATTERN>\n";
|
||||
out.write(dummyXmlStartTag.getBytes("UTF-8"));
|
||||
out.write(input.getBytes("UTF-8"));
|
||||
out.write(dummyXmlEndTag.getBytes("UTF-8"));
|
||||
out.write(dummyXmlStartTag.getBytes(StandardCharsets.UTF_8));
|
||||
out.write(input.getBytes(StandardCharsets.UTF_8));
|
||||
out.write(dummyXmlEndTag.getBytes(StandardCharsets.UTF_8));
|
||||
out.close();
|
||||
}
|
||||
|
||||
|
@ -25,6 +25,7 @@
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
@ -157,7 +158,7 @@ public void testHeaderNodeManagers() throws Exception {
|
||||
System.setErr(out);
|
||||
topcli.showTopScreen();
|
||||
out.flush();
|
||||
actual = outStream.toString("UTF-8");
|
||||
actual = outStream.toString(StandardCharsets.UTF_8.name());
|
||||
}
|
||||
|
||||
String expected = "NodeManager(s)"
|
||||
|
@ -38,6 +38,7 @@
|
||||
import java.io.PrintStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.DecimalFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
@ -364,7 +365,7 @@ public void testGetContainers() throws Exception {
|
||||
verify(client).getContainers(attemptId);
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
OutputStreamWriter stream =
|
||||
new OutputStreamWriter(baos, "UTF-8");
|
||||
new OutputStreamWriter(baos, StandardCharsets.UTF_8);
|
||||
PrintWriter pw = new PrintWriter(stream);
|
||||
pw.println("Total number of containers :3");
|
||||
pw.printf(ApplicationCLI.CONTAINER_PATTERN, "Container-Id", "Start Time",
|
||||
|
@ -30,6 +30,7 @@
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.io.Writer;
|
||||
import java.net.URI;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
@ -159,7 +160,7 @@ private void writeSrcFileAndALog(Path srcFilePath, String fileName, final long l
|
||||
|
||||
File outputFile = new File(new File(srcFilePath.toString()), fileName);
|
||||
FileOutputStream os = new FileOutputStream(outputFile);
|
||||
final OutputStreamWriter osw = new OutputStreamWriter(os, "UTF8");
|
||||
final OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8);
|
||||
final int ch = filler;
|
||||
|
||||
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
|
||||
@ -473,7 +474,7 @@ private OutputStreamWriter getOutputStreamWriter(Path srcFilePath,
|
||||
}
|
||||
File outputFile = new File(new File(srcFilePath.toString()), fileName);
|
||||
FileOutputStream os = new FileOutputStream(outputFile);
|
||||
OutputStreamWriter osw = new OutputStreamWriter(os, "UTF8");
|
||||
OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8);
|
||||
return osw;
|
||||
}
|
||||
}
|
||||
|
@ -24,6 +24,7 @@
|
||||
import java.io.InputStream;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@ -227,6 +228,6 @@ private String readInputStream(InputStream input) throws Exception {
|
||||
while ((read = input.read(buffer)) >= 0) {
|
||||
data.write(buffer, 0, read);
|
||||
}
|
||||
return new String(data.toByteArray(), "UTF-8");
|
||||
return new String(data.toByteArray(), StandardCharsets.UTF_8);
|
||||
}
|
||||
}
|
||||
|
@ -21,6 +21,7 @@
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
@ -585,7 +586,7 @@ private Map<String, Token<AMRMTokenIdentifier>> recoverSubClusterAMRMTokenIdenti
|
||||
// entry for subClusterId -> UAM AMRMTokenIdentifier
|
||||
String scId = key.substring(NMSS_SECONDARY_SC_PREFIX.length());
|
||||
Token<AMRMTokenIdentifier> aMRMTokenIdentifier = new Token<>();
|
||||
aMRMTokenIdentifier.decodeFromUrlString(new String(value, STRING_TO_BYTE_FORMAT));
|
||||
aMRMTokenIdentifier.decodeFromUrlString(new String(value, StandardCharsets.UTF_8));
|
||||
uamMap.put(scId, aMRMTokenIdentifier);
|
||||
LOG.debug("Recovered UAM in {} from NMSS.", scId);
|
||||
}
|
||||
@ -1345,7 +1346,7 @@ private List<SubClusterId> registerAndAllocateWithNewSubClusters(
|
||||
} else if (getNMStateStore() != null) {
|
||||
getNMStateStore().storeAMRMProxyAppContextEntry(attemptId,
|
||||
NMSS_SECONDARY_SC_PREFIX + subClusterId,
|
||||
token.encodeToUrlString().getBytes(STRING_TO_BYTE_FORMAT));
|
||||
token.encodeToUrlString().getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
LOG.error("Failed to persist UAM token from {} Application {}",
|
||||
@ -1884,7 +1885,7 @@ public void callback(AllocateResponse response) {
|
||||
try {
|
||||
getNMStateStore().storeAMRMProxyAppContextEntry(attemptId,
|
||||
NMSS_SECONDARY_SC_PREFIX + subClusterId.getId(),
|
||||
newToken.encodeToUrlString().getBytes(STRING_TO_BYTE_FORMAT));
|
||||
newToken.encodeToUrlString().getBytes(StandardCharsets.UTF_8));
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error storing UAM token as AMRMProxy "
|
||||
+ "context entry in NMSS for {}.", attemptId, e);
|
||||
|
@ -31,6 +31,7 @@
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
@ -73,7 +74,7 @@ private void checkDiskScheduler() {
|
||||
// are using the CFQ scheduler. If they aren't print a warning
|
||||
try {
|
||||
byte[] contents = Files.readAllBytes(Paths.get(PARTITIONS_FILE));
|
||||
data = new String(contents, "UTF-8").trim();
|
||||
data = new String(contents, StandardCharsets.UTF_8).trim();
|
||||
} catch (IOException e) {
|
||||
String msg = "Couldn't read " + PARTITIONS_FILE +
|
||||
"; can't determine disk scheduler type";
|
||||
@ -96,7 +97,7 @@ private void checkDiskScheduler() {
|
||||
if (schedulerFile.exists()) {
|
||||
try {
|
||||
byte[] contents = Files.readAllBytes(Paths.get(schedulerPath));
|
||||
String schedulerString = new String(contents, "UTF-8").trim();
|
||||
String schedulerString = new String(contents, StandardCharsets.UTF_8).trim();
|
||||
if (!schedulerString.contains("[cfq]")) {
|
||||
LOG.warn("Device " + partition + " does not use the CFQ"
|
||||
+ " scheduler; disk isolation using "
|
||||
|
@ -37,6 +37,7 @@
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@ -132,7 +133,7 @@ public static boolean cpuLimitsExist(String path)
|
||||
File quotaFile = new File(path,
|
||||
CPU.getName() + "." + CGroupsHandler.CGROUP_CPU_QUOTA_US);
|
||||
if (quotaFile.exists()) {
|
||||
String contents = FileUtils.readFileToString(quotaFile, "UTF-8");
|
||||
String contents = FileUtils.readFileToString(quotaFile, StandardCharsets.UTF_8);
|
||||
int quotaUS = Integer.parseInt(contents.trim());
|
||||
if (quotaUS != -1) {
|
||||
return true;
|
||||
|
@ -39,6 +39,7 @@
|
||||
import org.apache.hadoop.yarn.util.SystemClock;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Arrays;
|
||||
@ -216,7 +217,7 @@ static Map<String, Set<String>> parseMtab(String mtab)
|
||||
|
||||
try {
|
||||
FileInputStream fis = new FileInputStream(new File(mtab));
|
||||
in = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
|
||||
in = new BufferedReader(new InputStreamReader(fis, StandardCharsets.UTF_8));
|
||||
|
||||
for (String str = in.readLine(); str != null;
|
||||
str = in.readLine()) {
|
||||
@ -474,7 +475,7 @@ private void logLineFromTasksFile(File cgf) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
try (BufferedReader inl =
|
||||
new BufferedReader(new InputStreamReader(new FileInputStream(cgf
|
||||
+ "/tasks"), "UTF-8"))) {
|
||||
+ "/tasks"), StandardCharsets.UTF_8))) {
|
||||
str = inl.readLine();
|
||||
if (str != null) {
|
||||
LOG.debug("First line in cgroup tasks file: {} {}", cgf, str);
|
||||
@ -559,7 +560,7 @@ public void updateCGroupParam(CGroupController controller, String cGroupId,
|
||||
|
||||
try {
|
||||
File file = new File(cGroupParamPath);
|
||||
Writer w = new OutputStreamWriter(new FileOutputStream(file), "UTF-8");
|
||||
Writer w = new OutputStreamWriter(new FileOutputStream(file), StandardCharsets.UTF_8);
|
||||
pw = new PrintWriter(w);
|
||||
pw.write(value);
|
||||
} catch (IOException e) {
|
||||
@ -595,7 +596,7 @@ public String getCGroupParam(CGroupController controller, String cGroupId,
|
||||
|
||||
try {
|
||||
byte[] contents = Files.readAllBytes(Paths.get(cGroupParamPath));
|
||||
return new String(contents, "UTF-8").trim();
|
||||
return new String(contents, StandardCharsets.UTF_8).trim();
|
||||
} catch (IOException e) {
|
||||
throw new ResourceHandlerException(
|
||||
"Unable to read from " + cGroupParamPath);
|
||||
|
@ -31,6 +31,7 @@
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.BitSet;
|
||||
import java.util.HashMap;
|
||||
@ -620,7 +621,7 @@ public PrivilegedOperation commitBatchToTempFile()
|
||||
|
||||
try (
|
||||
Writer writer = new OutputStreamWriter(new FileOutputStream(tcCmds),
|
||||
"UTF-8");
|
||||
StandardCharsets.UTF_8);
|
||||
PrintWriter printWriter = new PrintWriter(writer)) {
|
||||
for (String command : commands) {
|
||||
printWriter.println(command);
|
||||
|
@ -48,6 +48,7 @@
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
@ -258,7 +259,7 @@ private String writeCommandToTempFile(ContainerExecContext ctx)
|
||||
TMP_FILE_SUFFIX, cmdDir);
|
||||
try (
|
||||
Writer writer = new OutputStreamWriter(
|
||||
new FileOutputStream(commandFile.toString()), "UTF-8");
|
||||
new FileOutputStream(commandFile.toString()), StandardCharsets.UTF_8);
|
||||
PrintWriter printWriter = new PrintWriter(writer);
|
||||
) {
|
||||
Map<String, List<String>> cmd = new HashMap<String, List<String>>();
|
||||
|
@ -38,6 +38,7 @@
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@ -56,7 +57,7 @@ private String writeEnvFile(DockerRunCommand cmd, String filePrefix,
|
||||
TMP_ENV_FILE_SUFFIX, cmdDir);
|
||||
try (
|
||||
Writer envWriter = new OutputStreamWriter(
|
||||
new FileOutputStream(dockerEnvFile), "UTF-8");
|
||||
new FileOutputStream(dockerEnvFile), StandardCharsets.UTF_8);
|
||||
PrintWriter envPrintWriter = new PrintWriter(envWriter);
|
||||
) {
|
||||
for (Map.Entry<String, String> entry : cmd.getEnv()
|
||||
@ -94,7 +95,7 @@ public String writeCommandToTempFile(DockerCommand cmd,
|
||||
TMP_FILE_SUFFIX, cmdDir);
|
||||
try (
|
||||
Writer writer = new OutputStreamWriter(
|
||||
new FileOutputStream(dockerCommandFile.toString()), "UTF-8");
|
||||
new FileOutputStream(dockerCommandFile.toString()), StandardCharsets.UTF_8);
|
||||
PrintWriter printWriter = new PrintWriter(writer);
|
||||
) {
|
||||
printWriter.println("[docker-command-execution]");
|
||||
|
@ -38,6 +38,7 @@
|
||||
import java.io.StringWriter;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
@ -104,7 +105,7 @@ private void init() throws ContainerExecutionException {
|
||||
uc.setRequestProperty("X-Requested-With", "Curl");
|
||||
|
||||
StringWriter writer = new StringWriter();
|
||||
IOUtils.copy(uc.getInputStream(), writer, "utf-8");
|
||||
IOUtils.copy(uc.getInputStream(), writer, StandardCharsets.UTF_8);
|
||||
cliOptions = writer.toString();
|
||||
|
||||
LOG.info("Additional docker CLI options from plugin to run GPU "
|
||||
|
@ -27,6 +27,7 @@
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
@ -220,7 +221,7 @@ private void updateCgroup(String controller, String groupName, String param,
|
||||
PrintWriter pw = null;
|
||||
try {
|
||||
File file = new File(path + "/" + param);
|
||||
Writer w = new OutputStreamWriter(new FileOutputStream(file), "UTF-8");
|
||||
Writer w = new OutputStreamWriter(new FileOutputStream(file), StandardCharsets.UTF_8);
|
||||
pw = new PrintWriter(w);
|
||||
pw.write(value);
|
||||
} catch (IOException e) {
|
||||
@ -249,7 +250,7 @@ private void logLineFromTasksFile(File cgf) {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
try (BufferedReader inl =
|
||||
new BufferedReader(new InputStreamReader(new FileInputStream(cgf
|
||||
+ "/tasks"), "UTF-8"))) {
|
||||
+ "/tasks"), StandardCharsets.UTF_8))) {
|
||||
str = inl.readLine();
|
||||
if (str != null) {
|
||||
LOG.debug("First line in cgroup tasks file: {} {}", cgf, str);
|
||||
@ -403,7 +404,7 @@ private Map<String, Set<String>> parseMtab() throws IOException {
|
||||
|
||||
try {
|
||||
FileInputStream fis = new FileInputStream(new File(getMtabFileName()));
|
||||
in = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
|
||||
in = new BufferedReader(new InputStreamReader(fis, StandardCharsets.UTF_8));
|
||||
|
||||
for (String str = in.readLine(); str != null;
|
||||
str = in.readLine()) {
|
||||
|
@ -22,13 +22,14 @@
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||
|
||||
/**
|
||||
* Helper functionality to read the pid from a file.
|
||||
@ -57,7 +58,7 @@ public static String getProcessId(Path path) throws IOException {
|
||||
File file = new File(path.toString());
|
||||
if (file.exists()) {
|
||||
FileInputStream fis = new FileInputStream(file);
|
||||
bufReader = new BufferedReader(new InputStreamReader(fis, "UTF-8"));
|
||||
bufReader = new BufferedReader(new InputStreamReader(fis, StandardCharsets.UTF_8));
|
||||
|
||||
while (true) {
|
||||
String line = bufReader.readLine();
|
||||
|
@ -346,7 +346,7 @@ private void performRuleConversion(FairScheduler fs)
|
||||
if (!rulesToFile) {
|
||||
String json =
|
||||
((ByteArrayOutputStream)mappingRulesOutputStream)
|
||||
.toString(StandardCharsets.UTF_8.displayName());
|
||||
.toString(StandardCharsets.UTF_8.name());
|
||||
capacitySchedulerConfig.setMappingRuleJson(json);
|
||||
}
|
||||
} else {
|
||||
|
@ -817,7 +817,7 @@ public void testAppSubmit(String acceptMedia, String contentMedia)
|
||||
HashMap<String, String> tokens = new HashMap<>();
|
||||
HashMap<String, String> secrets = new HashMap<>();
|
||||
secrets.put("secret1", Base64.encodeBase64String(
|
||||
"mysecret".getBytes("UTF8")));
|
||||
"mysecret".getBytes(StandardCharsets.UTF_8)));
|
||||
credentials.setSecrets(secrets);
|
||||
credentials.setTokens(tokens);
|
||||
ApplicationSubmissionContextInfo appInfo = new ApplicationSubmissionContextInfo();
|
||||
@ -840,7 +840,7 @@ public void testAppSubmit(String acceptMedia, String contentMedia)
|
||||
appInfo.getContainerLaunchContextInfo().setEnvironment(environment);
|
||||
appInfo.getContainerLaunchContextInfo().setAcls(acls);
|
||||
appInfo.getContainerLaunchContextInfo().getAuxillaryServiceData()
|
||||
.put("test", Base64.encodeBase64URLSafeString("value12".getBytes("UTF8")));
|
||||
.put("test", Base64.encodeBase64URLSafeString("value12".getBytes(StandardCharsets.UTF_8)));
|
||||
appInfo.getContainerLaunchContextInfo().setCredentials(credentials);
|
||||
appInfo.getResource().setMemory(1024);
|
||||
appInfo.getResource().setvCores(1);
|
||||
|
@ -31,6 +31,7 @@
|
||||
import java.io.StringWriter;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.concurrent.Callable;
|
||||
@ -250,7 +251,7 @@ public void testDelegationTokenAuth() throws Exception {
|
||||
InputStream errorStream = conn.getErrorStream();
|
||||
String error = "";
|
||||
BufferedReader reader = null;
|
||||
reader = new BufferedReader(new InputStreamReader(errorStream, "UTF8"));
|
||||
reader = new BufferedReader(new InputStreamReader(errorStream, StandardCharsets.UTF_8));
|
||||
for (String line; (line = reader.readLine()) != null;) {
|
||||
error += line;
|
||||
}
|
||||
@ -356,7 +357,7 @@ public Void call() throws Exception {
|
||||
assertEquals(Status.OK.getStatusCode(), conn.getResponseCode());
|
||||
BufferedReader reader = null;
|
||||
try {
|
||||
reader = new BufferedReader(new InputStreamReader(response, "UTF8"));
|
||||
reader = new BufferedReader(new InputStreamReader(response, StandardCharsets.UTF_8));
|
||||
for (String line; (line = reader.readLine()) != null;) {
|
||||
JSONObject obj = new JSONObject(line);
|
||||
if (obj.has("token")) {
|
||||
@ -432,7 +433,7 @@ public String call() throws Exception {
|
||||
InputStream response = conn.getInputStream();
|
||||
assertEquals(Status.OK.getStatusCode(), conn.getResponseCode());
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(
|
||||
response, "UTF8"))) {
|
||||
response, StandardCharsets.UTF_8))) {
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
JSONObject obj = new JSONObject(line);
|
||||
@ -490,7 +491,7 @@ static void setupConn(HttpURLConnection conn, String method,
|
||||
conn.setRequestProperty("Content-Type", contentType + ";charset=UTF8");
|
||||
if (body != null && !body.isEmpty()) {
|
||||
OutputStream stream = conn.getOutputStream();
|
||||
stream.write(body.getBytes("UTF8"));
|
||||
stream.write(body.getBytes(StandardCharsets.UTF_8));
|
||||
stream.close();
|
||||
}
|
||||
}
|
||||
|
@ -28,6 +28,7 @@
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Map;
|
||||
import java.util.HashMap;
|
||||
|
||||
@ -181,7 +182,7 @@ public void testWebServiceAccess() throws Exception {
|
||||
InputStream errorStream = conn.getErrorStream();
|
||||
String error = "";
|
||||
BufferedReader reader = new BufferedReader(
|
||||
new InputStreamReader(errorStream, "UTF8"));
|
||||
new InputStreamReader(errorStream, StandardCharsets.UTF_8));
|
||||
for (String line; (line = reader.readLine()) != null;) {
|
||||
error += line;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user