HDFS-10707. Replace org.apache.commons.io.Charsets with java.nio.charset.StandardCharsets. Contributed by Vincent Poon.

This commit is contained in:
Akira Ajisaka 2016-08-02 17:07:59 +09:00
parent 6890d5b472
commit a5fb298e56
12 changed files with 34 additions and 27 deletions

View File

@ -20,7 +20,6 @@
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.primitives.SignedBytes; import com.google.common.primitives.SignedBytes;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.key.KeyProvider; import org.apache.hadoop.crypto.key.KeyProvider;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
@ -71,6 +70,7 @@
import java.net.Socket; import java.net.Socket;
import java.net.URI; import java.net.URI;
import java.nio.channels.SocketChannel; import java.nio.channels.SocketChannel;
import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
@ -95,7 +95,7 @@ public class DFSUtilClient {
* Converts a string to a byte array using UTF8 encoding. * Converts a string to a byte array using UTF8 encoding.
*/ */
public static byte[] string2Bytes(String str) { public static byte[] string2Bytes(String str) {
return str.getBytes(Charsets.UTF_8); return str.getBytes(StandardCharsets.UTF_8);
} }
/** /**

View File

@ -17,7 +17,6 @@
*/ */
package org.apache.hadoop.fs.http.client; package org.apache.hadoop.fs.http.client;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.json.simple.parser.JSONParser; import org.json.simple.parser.JSONParser;
@ -29,6 +28,7 @@
import java.net.URI; import java.net.URI;
import java.net.URL; import java.net.URL;
import java.net.URLEncoder; import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -128,7 +128,8 @@ static URL createURL(Path path, Map<String, String> params, Map<String,
static Object jsonParse(HttpURLConnection conn) throws IOException { static Object jsonParse(HttpURLConnection conn) throws IOException {
try { try {
JSONParser parser = new JSONParser(); JSONParser parser = new JSONParser();
return parser.parse(new InputStreamReader(conn.getInputStream(), Charsets.UTF_8)); return parser.parse(
new InputStreamReader(conn.getInputStream(), StandardCharsets.UTF_8));
} catch (ParseException ex) { } catch (ParseException ex) {
throw new IOException("JSON parser error, " + ex.getMessage(), ex); throw new IOException("JSON parser error, " + ex.getMessage(), ex);
} }

View File

@ -17,7 +17,6 @@
*/ */
package org.apache.hadoop.fs.http.server; package org.apache.hadoop.fs.http.server;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.web.WebHdfsConstants; import org.apache.hadoop.hdfs.web.WebHdfsConstants;
@ -32,6 +31,7 @@
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.Reader; import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
@ -83,7 +83,7 @@ protected Properties getConfiguration(String configPrefix,
try { try {
StringBuilder secret = new StringBuilder(); StringBuilder secret = new StringBuilder();
Reader reader = new InputStreamReader(new FileInputStream( Reader reader = new InputStreamReader(new FileInputStream(
signatureSecretFile), Charsets.UTF_8); signatureSecretFile), StandardCharsets.UTF_8);
int c = reader.read(); int c = reader.read();
while (c > -1) { while (c > -1) {
secret.append((char)c); secret.append((char)c);

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.lib.wsrs; package org.apache.hadoop.lib.wsrs;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.json.simple.JSONObject; import org.json.simple.JSONObject;
@ -34,6 +33,7 @@
import java.io.Writer; import java.io.Writer;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import java.lang.reflect.Type; import java.lang.reflect.Type;
import java.nio.charset.StandardCharsets;
import java.util.Map; import java.util.Map;
@Provider @Provider
@ -56,7 +56,8 @@ public long getSize(Map map, Class<?> aClass, Type type, Annotation[] annotation
public void writeTo(Map map, Class<?> aClass, Type type, Annotation[] annotations, public void writeTo(Map map, Class<?> aClass, Type type, Annotation[] annotations,
MediaType mediaType, MultivaluedMap<String, Object> stringObjectMultivaluedMap, MediaType mediaType, MultivaluedMap<String, Object> stringObjectMultivaluedMap,
OutputStream outputStream) throws IOException, WebApplicationException { OutputStream outputStream) throws IOException, WebApplicationException {
Writer writer = new OutputStreamWriter(outputStream, Charsets.UTF_8); Writer writer =
new OutputStreamWriter(outputStream, StandardCharsets.UTF_8);
JSONObject.writeJSONString(map, writer); JSONObject.writeJSONString(map, writer);
writer.write(ENTER); writer.write(ENTER);
writer.flush(); writer.flush();

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.lib.wsrs; package org.apache.hadoop.lib.wsrs;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.json.simple.JSONStreamAware; import org.json.simple.JSONStreamAware;
@ -34,6 +33,7 @@
import java.io.Writer; import java.io.Writer;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import java.lang.reflect.Type; import java.lang.reflect.Type;
import java.nio.charset.StandardCharsets;
@Provider @Provider
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
@ -56,7 +56,8 @@ public long getSize(JSONStreamAware jsonStreamAware, Class<?> aClass, Type type,
public void writeTo(JSONStreamAware jsonStreamAware, Class<?> aClass, Type type, Annotation[] annotations, public void writeTo(JSONStreamAware jsonStreamAware, Class<?> aClass, Type type, Annotation[] annotations,
MediaType mediaType, MultivaluedMap<String, Object> stringObjectMultivaluedMap, MediaType mediaType, MultivaluedMap<String, Object> stringObjectMultivaluedMap,
OutputStream outputStream) throws IOException, WebApplicationException { OutputStream outputStream) throws IOException, WebApplicationException {
Writer writer = new OutputStreamWriter(outputStream, Charsets.UTF_8); Writer writer =
new OutputStreamWriter(outputStream, StandardCharsets.UTF_8);
jsonStreamAware.writeJSONString(writer); jsonStreamAware.writeJSONString(writer);
writer.write(ENTER); writer.write(ENTER);
writer.flush(); writer.flush();

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hdfs.server.datanode.web.webhdfs; package org.apache.hadoop.hdfs.server.datanode.web.webhdfs;
import io.netty.handler.codec.http.QueryStringDecoder; import io.netty.handler.codec.http.QueryStringDecoder;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
@ -45,6 +44,7 @@
import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -58,8 +58,8 @@ class ParameterParser {
private final Map<String, List<String>> params; private final Map<String, List<String>> params;
ParameterParser(QueryStringDecoder decoder, Configuration conf) { ParameterParser(QueryStringDecoder decoder, Configuration conf) {
this.path = decodeComponent(decoder.path().substring this.path = decodeComponent(decoder.path().substring(WEBHDFS_PREFIX_LENGTH),
(WEBHDFS_PREFIX_LENGTH), Charsets.UTF_8); StandardCharsets.UTF_8);
this.params = decoder.parameters(); this.params = decoder.parameters();
this.conf = conf; this.conf = conf;
} }
@ -137,7 +137,8 @@ public boolean createParent() {
} }
public EnumSet<CreateFlag> createFlag() { public EnumSet<CreateFlag> createFlag() {
String cf = decodeComponent(param(CreateFlagParam.NAME), Charsets.UTF_8); String cf =
decodeComponent(param(CreateFlagParam.NAME), StandardCharsets.UTF_8);
return new CreateFlagParam(cf).getValue(); return new CreateFlagParam(cf).getValue();
} }

View File

@ -57,10 +57,10 @@
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.URI; import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.EnumSet; import java.util.EnumSet;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -283,7 +283,8 @@ private void onGetFileChecksum(ChannelHandlerContext ctx) throws IOException {
} finally { } finally {
IOUtils.cleanup(LOG, dfsclient); IOUtils.cleanup(LOG, dfsclient);
} }
final byte[] js = JsonUtil.toJsonString(checksum).getBytes(Charsets.UTF_8); final byte[] js =
JsonUtil.toJsonString(checksum).getBytes(StandardCharsets.UTF_8);
resp = resp =
new DefaultFullHttpResponse(HTTP_1_1, OK, Unpooled.wrappedBuffer(js)); new DefaultFullHttpResponse(HTTP_1_1, OK, Unpooled.wrappedBuffer(js));

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hdfs.server.namenode; package org.apache.hadoop.hdfs.server.namenode;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.InvalidPathException; import org.apache.hadoop.fs.InvalidPathException;
import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.UnresolvedLinkException;
@ -33,6 +32,7 @@
import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.AbstractMap; import java.util.AbstractMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -121,7 +121,8 @@ static HdfsFileStatus mkdirs(FSNamesystem fsn, String src,
static Map.Entry<INodesInPath, String> createAncestorDirectories( static Map.Entry<INodesInPath, String> createAncestorDirectories(
FSDirectory fsd, INodesInPath iip, PermissionStatus permission) FSDirectory fsd, INodesInPath iip, PermissionStatus permission)
throws IOException { throws IOException {
final String last = new String(iip.getLastLocalName(), Charsets.UTF_8); final String last =
new String(iip.getLastLocalName(), StandardCharsets.UTF_8);
INodesInPath existing = iip.getExistingINodes(); INodesInPath existing = iip.getExistingINodes();
List<String> children = iip.getPath(existing.length(), List<String> children = iip.getPath(existing.length(),
iip.length() - existing.length()); iip.length() - existing.length());
@ -189,7 +190,7 @@ private static INodesInPath createSingleDirectory(FSDirectory fsd,
throws IOException { throws IOException {
assert fsd.hasWriteLock(); assert fsd.hasWriteLock();
existing = unprotectedMkdir(fsd, fsd.allocateNewInodeId(), existing, existing = unprotectedMkdir(fsd, fsd.allocateNewInodeId(), existing,
localName.getBytes(Charsets.UTF_8), perm, null, now()); localName.getBytes(StandardCharsets.UTF_8), perm, null, now());
if (existing == null) { if (existing == null) {
return null; return null;
} }

View File

@ -20,7 +20,6 @@
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.DirectoryListingStartAfterNotFoundException; import org.apache.hadoop.fs.DirectoryListingStartAfterNotFoundException;
import org.apache.hadoop.fs.FileEncryptionInfo; import org.apache.hadoop.fs.FileEncryptionInfo;
@ -46,6 +45,7 @@
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import static org.apache.hadoop.util.Time.now; import static org.apache.hadoop.util.Time.now;
@ -55,7 +55,8 @@ static DirectoryListing getListingInt(FSDirectory fsd, final String srcArg,
byte[] startAfter, boolean needLocation) throws IOException { byte[] startAfter, boolean needLocation) throws IOException {
byte[][] pathComponents = FSDirectory byte[][] pathComponents = FSDirectory
.getPathComponentsForReservedPath(srcArg); .getPathComponentsForReservedPath(srcArg);
final String startAfterString = new String(startAfter, Charsets.UTF_8); final String startAfterString =
new String(startAfter, StandardCharsets.UTF_8);
String src = null; String src = null;
if (fsd.isPermissionEnabled()) { if (fsd.isPermissionEnabled()) {

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hdfs.server.namenode; package org.apache.hadoop.hdfs.server.namenode;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.CryptoProtocolVersion; import org.apache.hadoop.crypto.CryptoProtocolVersion;
@ -60,6 +59,7 @@
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@ -610,7 +610,7 @@ private static INodesInPath addFile(
} }
INodeFile newNode = newINodeFile(fsd.allocateNewInodeId(), permissions, INodeFile newNode = newINodeFile(fsd.allocateNewInodeId(), permissions,
modTime, modTime, replication, preferredBlockSize, ecPolicy != null); modTime, modTime, replication, preferredBlockSize, ecPolicy != null);
newNode.setLocalName(localName.getBytes(Charsets.UTF_8)); newNode.setLocalName(localName.getBytes(StandardCharsets.UTF_8));
newNode.toUnderConstruction(clientName, clientMachine); newNode.toUnderConstruction(clientName, clientMachine);
newiip = fsd.addINode(existing, newNode); newiip = fsd.addINode(existing, newNode);
} finally { } finally {

View File

@ -21,9 +21,9 @@
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.RandomAccessFile; import java.io.RandomAccessFile;
import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature; import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature;
@ -34,7 +34,7 @@
@InterfaceAudience.Private @InterfaceAudience.Private
public final class FSImageUtil { public final class FSImageUtil {
public static final byte[] MAGIC_HEADER = public static final byte[] MAGIC_HEADER =
"HDFSIMG1".getBytes(Charsets.UTF_8); "HDFSIMG1".getBytes(StandardCharsets.UTF_8);
public static final int FILE_VERSION = 1; public static final int FILE_VERSION = 1;
public static boolean checkFileFormat(RandomAccessFile file) public static boolean checkFileFormat(RandomAccessFile file)

View File

@ -17,7 +17,6 @@
*/ */
package org.apache.hadoop.hdfs.server.namenode; package org.apache.hadoop.hdfs.server.namenode;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.fs.permission.PermissionStatus;
@ -27,6 +26,7 @@
import org.mockito.stubbing.Answer; import org.mockito.stubbing.Answer;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT;
@ -124,7 +124,7 @@ private static FSNamesystem setupFileSystem() throws IOException {
"hdfs", "supergroup", "hdfs", "supergroup",
FsPermission.createImmutable((short) 0x1ff)); FsPermission.createImmutable((short) 0x1ff));
final INodeFile file = new INodeFile( final INodeFile file = new INodeFile(
MOCK_INODE_ID, FILE_NAME.getBytes(Charsets.UTF_8), MOCK_INODE_ID, FILE_NAME.getBytes(StandardCharsets.UTF_8),
perm, 1, 1, new BlockInfo[] {}, (short) 1, perm, 1, 1, new BlockInfo[] {}, (short) 1,
DFS_BLOCK_SIZE_DEFAULT); DFS_BLOCK_SIZE_DEFAULT);
fsn.getFSDirectory().addINode(iip, file); fsn.getFSDirectory().addINode(iip, file);