diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/PermissionParam.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/PermissionParam.java index 530fd3f3a1..42ff69db73 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/PermissionParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/PermissionParam.java @@ -28,11 +28,25 @@ public class PermissionParam extends ShortParam { private static final Domain DOMAIN = new Domain(NAME, 8); - private static final short DEFAULT_PERMISSION = 0755; + private static final short DEFAULT_DIR_PERMISSION = 0755; - /** @return the default FsPermission. */ - public static FsPermission getDefaultFsPermission() { - return new FsPermission(DEFAULT_PERMISSION); + private static final short DEFAULT_FILE_PERMISSION = 0644; + + private static final short DEFAULT_SYMLINK_PERMISSION = 0777; + + /** @return the default FsPermission for directory. */ + public static FsPermission getDefaultDirFsPermission() { + return new FsPermission(DEFAULT_DIR_PERMISSION); + } + + /** @return the default FsPermission for file. */ + public static FsPermission getDefaultFileFsPermission() { + return new FsPermission(DEFAULT_FILE_PERMISSION); + } + + /** @return the default FsPermission for symlink. */ + public static FsPermission getDefaultSymLinkFsPermission() { + return new FsPermission(DEFAULT_SYMLINK_PERMISSION); } /** @@ -57,8 +71,18 @@ public String getName() { } /** @return the represented FsPermission. */ - public FsPermission getFsPermission() { - final Short v = getValue(); - return new FsPermission(v != null? v: DEFAULT_PERMISSION); + public FsPermission getFileFsPermission() { + return this.getFsPermission(DEFAULT_FILE_PERMISSION); } + + /** @return the represented FsPermission. */ + public FsPermission getDirFsPermission() { + return this.getFsPermission(DEFAULT_DIR_PERMISSION); + } + + private FsPermission getFsPermission(short defaultPermission){ + final Short v = getValue(); + return new FsPermission(v != null? v: defaultPermission); + } + } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java index 5d85dc4211..440a5322f8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/ParameterParser.java @@ -104,7 +104,8 @@ short replication() { } FsPermission permission() { - return new PermissionParam(param(PermissionParam.NAME)).getFsPermission(); + return new PermissionParam(param(PermissionParam.NAME)). + getFileFsPermission(); } boolean overwrite() { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java index b80d473895..7104d25344 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java @@ -507,14 +507,16 @@ private Response put( } case MKDIRS: { - final boolean b = np.mkdirs(fullpath, permission.getFsPermission(), true); + final boolean b = np.mkdirs(fullpath, + permission.getDirFsPermission(), true); final String js = JsonUtil.toJsonString("boolean", b); return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } case CREATESYMLINK: { np.createSymlink(destination.getValue(), fullpath, - PermissionParam.getDefaultFsPermission(), createParent.getValue()); + PermissionParam.getDefaultSymLinkFsPermission(), + createParent.getValue()); return Response.ok().type(MediaType.APPLICATION_OCTET_STREAM).build(); } case RENAME: @@ -547,7 +549,7 @@ private Response put( } case SETPERMISSION: { - np.setPermission(fullpath, permission.getFsPermission()); + np.setPermission(fullpath, permission.getDirFsPermission()); return Response.ok().type(MediaType.APPLICATION_OCTET_STREAM).build(); } case SETTIMES: diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsCreatePermissions.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsCreatePermissions.java new file mode 100644 index 0000000000..68fc26f99e --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/web/resources/TestWebHdfsCreatePermissions.java @@ -0,0 +1,124 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.server.namenode.web.resources; + +import java.net.HttpURLConnection; +import java.net.URL; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hdfs.DFSTestUtil; +import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; +import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; +import org.apache.log4j.Level; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +/** + * Test WebHDFS files/directories creation to make sure it follows same rules + * from dfs CLI for specifying files/directories permissions. + */ +public class TestWebHdfsCreatePermissions { + static final Log LOG = LogFactory.getLog(TestWebHdfsCreatePermissions.class); + { + DFSTestUtil.setNameNodeLogLevel(Level.ALL); + } + + private MiniDFSCluster cluster; + + @Before + public void initializeMiniDFSCluster() throws Exception { + final Configuration conf = WebHdfsTestUtil.createConf(); + this.cluster = new MiniDFSCluster.Builder(conf).build(); + } + + @After + public void tearDown() throws Exception { + if (cluster != null) { + cluster.shutdown(); + } + } + + private void testPermissions(int expectedResponse, + String expectedPermission, + String path, + String... params) throws Exception { + final String user = System.getProperty("user.name"); + final StringBuilder uri = new StringBuilder(cluster.getHttpUri(0)); + uri.append("/webhdfs/v1"). + append(path). + append("?user.name="). + append(user). + append("&"); + for (String param : params) { + uri.append(param).append("&"); + } + LOG.info(uri.toString()); + try { + URL url = new URL(uri.toString()); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn.setRequestMethod("PUT"); + Assert.assertEquals(expectedResponse, conn.getResponseCode()); + + NamenodeProtocols namenode = cluster.getNameNode().getRpcServer(); + FsPermission resultingPermission = namenode.getFileInfo(path). + getPermission(); + Assert.assertEquals(expectedPermission, resultingPermission.toString()); + } finally { + cluster.shutdown(); + } + } + + @Test + public void testCreateDirNoPermissions() throws Exception { + testPermissions(HttpURLConnection.HTTP_OK, + "rwxr-xr-x", + "/path", + "op=MKDIRS"); + } + + @Test + public void testCreateDir777Permissions() throws Exception { + testPermissions(HttpURLConnection.HTTP_OK, + "rwxrwxrwx", + "/test777", + "op=MKDIRS&permission=777"); + } + + @Test + public void testCreateFileNoPermissions() throws Exception { + testPermissions(HttpURLConnection.HTTP_CREATED, + "rw-r--r--", + "/test-file", + "op=CREATE"); + } + + @Test + public void testCreateFile666Permissions() throws Exception { + testPermissions(HttpURLConnection.HTTP_CREATED, + "rw-rw-rw-", + "/test-file", + "op=CREATE&permission=666"); + } + +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java index 3728df0ec7..aebec890c4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/resources/TestParam.java @@ -164,7 +164,8 @@ public void testOwnerParam() { @Test public void testPermissionParam() { final PermissionParam p = new PermissionParam(PermissionParam.DEFAULT); - Assert.assertEquals(new FsPermission((short)0755), p.getFsPermission()); + Assert.assertEquals(new FsPermission((short)0755), p.getDirFsPermission()); + Assert.assertEquals(new FsPermission((short)0644), p.getFileFsPermission()); new PermissionParam("0");