diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index ee5bf14c83..ebc12ff04b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -478,7 +478,8 @@ private Path makeAbsolute(Path f) { return f.isAbsolute()? f: new Path(workingDir, f); } - static Map jsonParse(final HttpURLConnection c, + @VisibleForTesting + public static Map jsonParse(final HttpURLConnection c, final boolean useErrorStream) throws IOException { if (c.getContentLength() == 0) { return null; diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java index a812f198f5..accec4627e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java @@ -19,6 +19,8 @@ import static org.apache.hadoop.util.StringUtils.getTrimmedStringCollection; +import org.apache.hadoop.fs.InvalidPathException; +import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; @@ -419,6 +421,9 @@ private URI redirectURI(final Router router, final UserGroupInformation ugi, final DoAsParam doAsUser, final String path, final HttpOpParam.Op op, final long openOffset, final String excludeDatanodes, final Param... parameters) throws URISyntaxException, IOException { + if (!DFSUtil.isValidName(path)) { + throw new InvalidPathException(path); + } final DatanodeInfo dn = chooseDatanode(router, path, op, openOffset, excludeDatanodes); diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterWebHdfsMethods.java index 8e82d44c4d..7c3643f5a5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterWebHdfsMethods.java @@ -23,9 +23,12 @@ import static org.junit.Assert.fail; import java.io.FileNotFoundException; +import java.io.IOException; import java.net.HttpURLConnection; import java.net.URL; import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -34,6 +37,7 @@ import org.apache.hadoop.hdfs.server.federation.RouterConfigBuilder; import org.apache.hadoop.hdfs.server.federation.StateStoreDFSCluster; import org.apache.hadoop.hdfs.server.federation.resolver.order.DestinationOrder; +import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -145,4 +149,24 @@ public void testGetNsFromDataNodeNetworkLocation() { assertEquals("", RouterWebHdfsMethods .getNsFromDataNodeNetworkLocation("whatever-rack-info1")); } + + @Test + public void testWebHdfsCreateWithInvalidPath() throws Exception { + // A path name include duplicated slashes. + String path = "//tmp//file"; + assertResponse(path); + } + + private void assertResponse(String path) throws IOException { + URL url = new URL(getUri(path)); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn.setRequestMethod("PUT"); + // Assert response code. + assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, conn.getResponseCode()); + // Assert exception. + Map response = WebHdfsFileSystem.jsonParse(conn, true); + assertEquals("InvalidPathException", + ((LinkedHashMap) response.get("RemoteException")).get("exception")); + conn.disconnect(); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java index 5910a80700..a3250c213c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java @@ -55,6 +55,7 @@ import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.Response.Status; +import org.apache.hadoop.fs.InvalidPathException; import org.apache.hadoop.fs.QuotaUsage; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.protocol.HdfsConstants; @@ -401,6 +402,9 @@ private URI redirectURI(ResponseBuilder rb, final NameNode namenode, final String path, final HttpOpParam.Op op, final long openOffset, final long blocksize, final String excludeDatanodes, final Param... parameters) throws URISyntaxException, IOException { + if (!DFSUtil.isValidName(path)) { + throw new InvalidPathException(path); + } final DatanodeInfo dn; final NamenodeProtocols np = getRPCServer(namenode); HdfsFileStatus status = null; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java index 2461d22dde..c4f53b0561 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java @@ -55,6 +55,7 @@ import java.util.Collection; import java.util.EnumSet; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.Map; import java.util.NoSuchElementException; import java.util.Random; @@ -522,6 +523,38 @@ public void testCreateWithNoDN() throws Exception { } } + @Test + public void testWebHdfsCreateWithInvalidPath() throws Exception { + final Configuration conf = WebHdfsTestUtil.createConf(); + cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build(); + // A path name include duplicated slashes. + String path = "//tmp//file"; + assertResponse(path); + } + + private String getUri(String path) { + final String user = System.getProperty("user.name"); + final StringBuilder uri = new StringBuilder(cluster.getHttpUri(0)); + uri.append("/webhdfs/v1"). + append(path). + append("?op=CREATE"). + append("&user.name=" + user); + return uri.toString(); + } + + private void assertResponse(String path) throws IOException { + URL url = new URL(getUri(path)); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn.setRequestMethod("PUT"); + // Assert response code. + assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, conn.getResponseCode()); + // Assert exception. + Map response = WebHdfsFileSystem.jsonParse(conn, true); + assertEquals("InvalidPathException", + ((LinkedHashMap) response.get("RemoteException")).get("exception")); + conn.disconnect(); + } + /** * Test allow and disallow snapshot through WebHdfs. Verifying webhdfs with * Distributed filesystem methods.