HDFS-5035. getFileLinkStatus and rename do not correctly check permissions of symlinks (Andrew Wang via Colin Patrick McCabe)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1507525 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Colin McCabe 2013-07-26 23:54:17 +00:00
parent 16fe97bebf
commit 02d5830a00
4 changed files with 280 additions and 98 deletions

View File

@ -267,6 +267,9 @@ Release 2.3.0 - UNRELEASED
HDFS-5034. Remove debug prints from GetFileLinkInfo (Andrew Wang via Colin
Patrick McCabe)
HDFS-5035. getFileLinkStatus and rename do not correctly check permissions
of symlinks. (Andrew Wang via Colin Patrick McCabe)
Release 2.1.1-beta - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -2950,8 +2950,13 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
// of rewriting the dst
String actualdst = dir.isDir(dst)?
dst + Path.SEPARATOR + new Path(src).getName(): dst;
checkParentAccess(pc, src, FsAction.WRITE);
checkAncestorAccess(pc, actualdst, FsAction.WRITE);
// Rename does not operates on link targets
// Do not resolveLink when checking permissions of src and dst
// Check write access to parent of src
checkPermission(pc, src, false, null, FsAction.WRITE, null, null, false);
// Check write access to ancestor of dst
checkPermission(pc, actualdst, false, FsAction.WRITE, null, null, null,
false);
}
if (dir.renameTo(src, dst)) {
@ -3010,8 +3015,12 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
Options.Rename... options) throws IOException {
assert hasWriteLock();
if (isPermissionEnabled) {
checkParentAccess(pc, src, FsAction.WRITE);
checkAncestorAccess(pc, dst, FsAction.WRITE);
// Rename does not operates on link targets
// Do not resolveLink when checking permissions of src and dst
// Check write access to parent of src
checkPermission(pc, src, false, null, FsAction.WRITE, null, null, false);
// Check write access to ancestor of dst
checkPermission(pc, dst, false, FsAction.WRITE, null, null, null, false);
}
dir.renameTo(src, dst, options);
@ -3244,7 +3253,7 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
checkOperation(OperationCategory.READ);
src = FSDirectory.resolvePath(src, pathComponents, dir);
if (isPermissionEnabled) {
checkTraverse(pc, src);
checkPermission(pc, src, false, null, null, null, null, resolveLink);
}
stat = dir.getFileInfo(src, resolveLink);
} catch (AccessControlException e) {

View File

@ -20,10 +20,8 @@ package org.apache.hadoop.security;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.Random;
import org.apache.commons.logging.Log;
@ -32,17 +30,14 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.StringUtils;
import org.junit.Test;
@ -315,92 +310,4 @@ public class TestPermission {
return false;
}
}
@Test(timeout = 30000)
public void testSymlinkPermissions() throws Exception {
final Configuration conf = new HdfsConfiguration();
conf.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, true);
conf.set(FsPermission.UMASK_LABEL, "000");
MiniDFSCluster cluster = null;
FileSystem fs = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
cluster.waitActive();
FileContext fc = FileContext.getFileContext(conf);
fs = FileSystem.get(conf);
// Create initial test files
final Path testDir = new Path("/symtest");
final Path target = new Path(testDir, "target");
final Path link = new Path(testDir, "link");
fs.mkdirs(testDir);
DFSTestUtil.createFile(fs, target, 1024, (short)3, 0xBEEFl);
fc.createSymlink(target, link, false);
// Non-super user to run commands with
final UserGroupInformation user = UserGroupInformation
.createRemoteUser("myuser");
// Case 1: parent directory is read-only
fs.setPermission(testDir, new FsPermission((short)0555));
try {
user.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException {
FileContext myfc = FileContext.getFileContext(conf);
myfc.delete(link, false);
return null;
}
});
fail("Deleted symlink without write permissions on parent!");
} catch (AccessControlException e) {
GenericTestUtils.assertExceptionContains("Permission denied", e);
}
// Case 2: target is not readable
fs.setPermission(target, new FsPermission((short)0000));
try {
user.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException {
FileContext myfc = FileContext.getFileContext(conf);
myfc.open(link).read();
return null;
}
});
fail("Read link target even though target does not have" +
" read permissions!");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains("Permission denied", e);
}
// Case 3: parent directory is read/write
fs.setPermission(testDir, new FsPermission((short)0777));
user.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException {
FileContext myfc = FileContext.getFileContext(conf);
myfc.delete(link, false);
return null;
}
});
// Make sure only the link was deleted
assertTrue("Target should not have been deleted!",
fc.util().exists(target));
assertFalse("Link should have been deleted!",
fc.util().exists(link));
} finally {
try {
if(fs != null) fs.close();
} catch(Exception e) {
LOG.error(StringUtils.stringifyException(e));
}
try {
if(cluster != null) cluster.shutdown();
} catch(Exception e) {
LOG.error(StringUtils.stringifyException(e));
}
}
}
}

View File

@ -0,0 +1,263 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileSystemTestWrapper;
import org.apache.hadoop.fs.Options.Rename;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestPermissionSymlinks {
private static final Log LOG = LogFactory.getLog(TestPermissionSymlinks.class);
private static final Configuration conf = new HdfsConfiguration();
// Non-super user to run commands with
private static final UserGroupInformation user = UserGroupInformation
.createRemoteUser("myuser");
private static final Path linkParent = new Path("/symtest1");
private static final Path targetParent = new Path("/symtest2");
private static final Path link = new Path(linkParent, "link");
private static final Path target = new Path(targetParent, "target");
private static MiniDFSCluster cluster;
private static FileSystem fs;
private static FileSystemTestWrapper wrapper;
@BeforeClass
public static void beforeClassSetUp() throws Exception {
conf.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, true);
conf.set(FsPermission.UMASK_LABEL, "000");
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
cluster.waitActive();
fs = cluster.getFileSystem();
wrapper = new FileSystemTestWrapper(fs);
}
@AfterClass
public static void afterClassTearDown() throws Exception {
if (fs != null) {
fs.close();
}
if (cluster != null) {
cluster.shutdown();
}
}
@Before
public void setUp() throws Exception {
// Create initial test files
fs.mkdirs(linkParent);
fs.mkdirs(targetParent);
DFSTestUtil.createFile(fs, target, 1024, (short)3, 0xBEEFl);
wrapper.createSymlink(target, link, false);
}
@After
public void tearDown() throws Exception {
// Wipe out everything
fs.delete(linkParent, true);
fs.delete(targetParent, true);
}
@Test(timeout = 5000)
public void testDelete() throws Exception {
// Try to delete where the symlink's parent dir is not writable
fs.setPermission(linkParent, new FsPermission((short) 0555));
try {
user.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException {
FileContext myfc = FileContext.getFileContext(conf);
myfc.delete(link, false);
return null;
}
});
fail("Deleted symlink without write permissions on parent!");
} catch (AccessControlException e) {
GenericTestUtils.assertExceptionContains("Permission denied", e);
}
// Try a delete where the symlink parent dir is writable,
// but the target's parent and target are not
fs.setPermission(linkParent, new FsPermission((short) 0777));
fs.setPermission(targetParent, new FsPermission((short) 0555));
fs.setPermission(target, new FsPermission((short) 0555));
user.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException {
FileContext myfc = FileContext.getFileContext(conf);
myfc.delete(link, false);
return null;
}
});
// Make sure only the link was deleted
assertTrue("Target should not have been deleted!",
wrapper.exists(target));
assertFalse("Link should have been deleted!",
wrapper.exists(link));
}
@Test(timeout = 5000)
public void testReadWhenTargetNotReadable() throws Exception {
fs.setPermission(target, new FsPermission((short) 0000));
try {
user.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException {
FileContext myfc = FileContext.getFileContext(conf);
myfc.open(link).read();
return null;
}
});
fail("Read link target even though target does not have"
+ " read permissions!");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains("Permission denied", e);
}
}
@Test(timeout = 5000)
public void testFileStatus() throws Exception {
// Try to getFileLinkStatus the link when the target is not readable
fs.setPermission(target, new FsPermission((short) 0000));
user.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException {
FileContext myfc = FileContext.getFileContext(conf);
FileStatus stat = myfc.getFileLinkStatus(link);
assertEquals("Expected link's FileStatus path to match link!",
link.makeQualified(fs.getUri(), fs.getWorkingDirectory()), stat.getPath());
Path linkTarget = myfc.getLinkTarget(link);
assertEquals("Expected link's target to match target!",
target, linkTarget);
return null;
}
});
}
@Test(timeout = 5000)
public void testRenameLinkTargetNotWritableFC() throws Exception {
// Rename the link when the target and parent are not writable
fs.setPermission(target, new FsPermission((short) 0555));
fs.setPermission(targetParent, new FsPermission((short) 0555));
user.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException {
// First FileContext
FileContext myfc = FileContext.getFileContext(conf);
Path newlink = new Path(linkParent, "newlink");
myfc.rename(link, newlink, Rename.NONE);
Path linkTarget = myfc.getLinkTarget(newlink);
assertEquals("Expected link's target to match target!",
target, linkTarget);
return null;
}
});
assertTrue("Expected target to exist", wrapper.exists(target));
}
@Test(timeout = 5000)
public void testRenameSrcNotWritableFC() throws Exception {
// Rename the link when the target and parent are not writable
fs.setPermission(linkParent, new FsPermission((short) 0555));
try {
user.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException {
FileContext myfc = FileContext.getFileContext(conf);
Path newlink = new Path(targetParent, "newlink");
myfc.rename(link, newlink, Rename.NONE);
return null;
}
});
fail("Renamed link even though link's parent is not writable!");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains("Permission denied", e);
}
}
// Need separate FileSystem tests since the server-side impl is different
// See {@link ClientProtocol#rename} and {@link ClientProtocol#rename2}.
@Test(timeout = 5000)
public void testRenameLinkTargetNotWritableFS() throws Exception {
// Rename the link when the target and parent are not writable
fs.setPermission(target, new FsPermission((short) 0555));
fs.setPermission(targetParent, new FsPermission((short) 0555));
user.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException {
// First FileContext
FileSystem myfs = FileSystem.get(conf);
Path newlink = new Path(linkParent, "newlink");
myfs.rename(link, newlink);
Path linkTarget = myfs.getLinkTarget(newlink);
assertEquals("Expected link's target to match target!",
target, linkTarget);
return null;
}
});
assertTrue("Expected target to exist", wrapper.exists(target));
}
@Test(timeout = 5000)
public void testRenameSrcNotWritableFS() throws Exception {
// Rename the link when the target and parent are not writable
fs.setPermission(linkParent, new FsPermission((short) 0555));
try {
user.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws IOException {
FileSystem myfs = FileSystem.get(conf);
Path newlink = new Path(targetParent, "newlink");
myfs.rename(link, newlink);
return null;
}
});
fail("Renamed link even though link's parent is not writable!");
} catch (IOException e) {
GenericTestUtils.assertExceptionContains("Permission denied", e);
}
}
}