diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java index d7faa68160..111c8c4cc6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java @@ -20,7 +20,6 @@ import com.google.common.base.Joiner; import com.google.common.collect.Maps; import com.google.common.primitives.SignedBytes; -import org.apache.commons.io.Charsets; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.key.KeyProvider; import org.apache.hadoop.fs.BlockLocation; @@ -71,6 +70,7 @@ import java.net.Socket; import java.net.URI; import java.nio.channels.SocketChannel; +import java.nio.charset.StandardCharsets; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.Collections; @@ -95,7 +95,7 @@ public class DFSUtilClient { * Converts a string to a byte array using UTF8 encoding. */ public static byte[] string2Bytes(String str) { - return str.getBytes(Charsets.UTF_8); + return str.getBytes(StandardCharsets.UTF_8); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java index 45d9805d91..95e26d799f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSUtils.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.fs.http.client; -import org.apache.commons.io.Charsets; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.Path; import org.json.simple.parser.JSONParser; @@ -29,6 +28,7 @@ import java.net.URI; import java.net.URL; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; import java.text.MessageFormat; import java.util.List; import java.util.Map; @@ -128,7 +128,8 @@ static URL createURL(Path path, Map params, Map -1) { secret.append((char)c); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java index 0809a85705..05bb9a1cad 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONMapProvider.java @@ -18,7 +18,6 @@ package org.apache.hadoop.lib.wsrs; -import org.apache.commons.io.Charsets; import org.apache.hadoop.classification.InterfaceAudience; import org.json.simple.JSONObject; @@ -34,6 +33,7 @@ import java.io.Writer; import java.lang.annotation.Annotation; import java.lang.reflect.Type; +import java.nio.charset.StandardCharsets; import java.util.Map; @Provider @@ -56,7 +56,8 @@ public long getSize(Map map, Class aClass, Type type, Annotation[] annotation public void writeTo(Map map, Class aClass, Type type, Annotation[] annotations, MediaType mediaType, MultivaluedMap stringObjectMultivaluedMap, OutputStream outputStream) throws IOException, WebApplicationException { - Writer writer = new OutputStreamWriter(outputStream, Charsets.UTF_8); + Writer writer = + new OutputStreamWriter(outputStream, StandardCharsets.UTF_8); JSONObject.writeJSONString(map, writer); writer.write(ENTER); writer.flush(); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java index 15ea8e6bc8..7e1f98ba68 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/JSONProvider.java @@ -18,7 +18,6 @@ package org.apache.hadoop.lib.wsrs; -import org.apache.commons.io.Charsets; import org.apache.hadoop.classification.InterfaceAudience; import org.json.simple.JSONStreamAware; @@ -34,6 +33,7 @@ import java.io.Writer; import java.lang.annotation.Annotation; import java.lang.reflect.Type; +import java.nio.charset.StandardCharsets; @Provider @Produces(MediaType.APPLICATION_JSON) @@ -56,7 +56,8 @@ public long getSize(JSONStreamAware jsonStreamAware, Class aClass, Type type, public void writeTo(JSONStreamAware jsonStreamAware, Class aClass, Type type, Annotation[] annotations, MediaType mediaType, MultivaluedMap stringObjectMultivaluedMap, OutputStream outputStream) throws IOException, WebApplicationException { - Writer writer = new OutputStreamWriter(outputStream, Charsets.UTF_8); + Writer writer = + new OutputStreamWriter(outputStream, StandardCharsets.UTF_8); jsonStreamAware.writeJSONString(writer); writer.write(ENTER); writer.flush(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java index 440a5322f8..3c7016e43b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdfs.server.datanode.web.webhdfs; import io.netty.handler.codec.http.QueryStringDecoder; -import org.apache.commons.io.Charsets; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.permission.FsPermission; @@ -45,6 +44,7 @@ import java.io.IOException; import java.net.URI; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.EnumSet; import java.util.List; import java.util.Map; @@ -58,8 +58,8 @@ class ParameterParser { private final Map> params; ParameterParser(QueryStringDecoder decoder, Configuration conf) { - this.path = decodeComponent(decoder.path().substring - (WEBHDFS_PREFIX_LENGTH), Charsets.UTF_8); + this.path = decodeComponent(decoder.path().substring(WEBHDFS_PREFIX_LENGTH), + StandardCharsets.UTF_8); this.params = decoder.parameters(); this.conf = conf; } @@ -137,7 +137,8 @@ public boolean createParent() { } public EnumSet createFlag() { - String cf = decodeComponent(param(CreateFlagParam.NAME), Charsets.UTF_8); + String cf = + decodeComponent(param(CreateFlagParam.NAME), StandardCharsets.UTF_8); return new CreateFlagParam(cf).getValue(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java index 0a8f40d6e8..d354612ddc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java @@ -57,10 +57,10 @@ import java.net.InetSocketAddress; import java.net.URI; import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; import java.security.PrivilegedExceptionAction; import java.util.EnumSet; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -283,7 +283,8 @@ private void onGetFileChecksum(ChannelHandlerContext ctx) throws IOException { } finally { IOUtils.cleanup(LOG, dfsclient); } - final byte[] js = JsonUtil.toJsonString(checksum).getBytes(Charsets.UTF_8); + final byte[] js = + JsonUtil.toJsonString(checksum).getBytes(StandardCharsets.UTF_8); resp = new DefaultFullHttpResponse(HTTP_1_1, OK, Unpooled.wrappedBuffer(js)); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirMkdirOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirMkdirOp.java index f51427f2b3..ae73f9ce37 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirMkdirOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirMkdirOp.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdfs.server.namenode; import com.google.common.base.Preconditions; -import org.apache.commons.io.Charsets; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.InvalidPathException; import org.apache.hadoop.fs.UnresolvedLinkException; @@ -33,6 +32,7 @@ import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.AbstractMap; import java.util.List; import java.util.Map; @@ -121,7 +121,8 @@ static HdfsFileStatus mkdirs(FSNamesystem fsn, String src, static Map.Entry createAncestorDirectories( FSDirectory fsd, INodesInPath iip, PermissionStatus permission) throws IOException { - final String last = new String(iip.getLastLocalName(), Charsets.UTF_8); + final String last = + new String(iip.getLastLocalName(), StandardCharsets.UTF_8); INodesInPath existing = iip.getExistingINodes(); List children = iip.getPath(existing.length(), iip.length() - existing.length()); @@ -189,7 +190,7 @@ private static INodesInPath createSingleDirectory(FSDirectory fsd, throws IOException { assert fsd.hasWriteLock(); existing = unprotectedMkdir(fsd, fsd.allocateNewInodeId(), existing, - localName.getBytes(Charsets.UTF_8), perm, null, now()); + localName.getBytes(StandardCharsets.UTF_8), perm, null, now()); if (existing == null) { return null; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java index 3acfcc9aa9..0604b9ae82 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java @@ -20,7 +20,6 @@ import com.google.common.base.Preconditions; -import org.apache.commons.io.Charsets; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.DirectoryListingStartAfterNotFoundException; import org.apache.hadoop.fs.FileEncryptionInfo; @@ -46,6 +45,7 @@ import java.io.FileNotFoundException; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import static org.apache.hadoop.util.Time.now; @@ -55,7 +55,8 @@ static DirectoryListing getListingInt(FSDirectory fsd, final String srcArg, byte[] startAfter, boolean needLocation) throws IOException { byte[][] pathComponents = FSDirectory .getPathComponentsForReservedPath(srcArg); - final String startAfterString = new String(startAfter, Charsets.UTF_8); + final String startAfterString = + new String(startAfter, StandardCharsets.UTF_8); String src = null; if (fsd.isPermissionEnabled()) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirWriteFileOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirWriteFileOp.java index 01743ba6c1..d428c95d4e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirWriteFileOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirWriteFileOp.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdfs.server.namenode; import com.google.common.base.Preconditions; -import org.apache.commons.io.Charsets; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CryptoProtocolVersion; @@ -60,6 +59,7 @@ import java.io.FileNotFoundException; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -610,7 +610,7 @@ private static INodesInPath addFile( } INodeFile newNode = newINodeFile(fsd.allocateNewInodeId(), permissions, modTime, modTime, replication, preferredBlockSize, ecPolicy != null); - newNode.setLocalName(localName.getBytes(Charsets.UTF_8)); + newNode.setLocalName(localName.getBytes(StandardCharsets.UTF_8)); newNode.toUnderConstruction(clientName, clientMachine); newiip = fsd.addINode(existing, newNode); } finally { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageUtil.java index 388a1bf0cc..5dbc80c3f0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageUtil.java @@ -21,9 +21,9 @@ import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; +import java.nio.charset.StandardCharsets; import java.util.Arrays; -import org.apache.commons.io.Charsets; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature; @@ -34,7 +34,7 @@ @InterfaceAudience.Private public final class FSImageUtil { public static final byte[] MAGIC_HEADER = - "HDFSIMG1".getBytes(Charsets.UTF_8); + "HDFSIMG1".getBytes(StandardCharsets.UTF_8); public static final int FILE_VERSION = 1; public static boolean checkFileFormat(RandomAccessFile file) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGetBlockLocations.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGetBlockLocations.java index 7a3a8d6e8c..eec5c98b9e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGetBlockLocations.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGetBlockLocations.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hdfs.server.namenode; -import org.apache.commons.io.Charsets; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; @@ -27,6 +26,7 @@ import org.mockito.stubbing.Answer; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT; @@ -124,7 +124,7 @@ private static FSNamesystem setupFileSystem() throws IOException { "hdfs", "supergroup", FsPermission.createImmutable((short) 0x1ff)); final INodeFile file = new INodeFile( - MOCK_INODE_ID, FILE_NAME.getBytes(Charsets.UTF_8), + MOCK_INODE_ID, FILE_NAME.getBytes(StandardCharsets.UTF_8), perm, 1, 1, new BlockInfo[] {}, (short) 1, DFS_BLOCK_SIZE_DEFAULT); fsn.getFSDirectory().addINode(iip, file);