HDFS-12357. Let NameNode to bypass external attribute provider for configured users. Contributed by Yongjun Zhang, Arun Suresh.
This commit is contained in:
parent
5ff74167dd
commit
d77ed238a9
@ -641,6 +641,9 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
|
||||
public static final String DFS_DATANODE_MIN_SUPPORTED_NAMENODE_VERSION_KEY = "dfs.datanode.min.supported.namenode.version";
|
||||
public static final String DFS_DATANODE_MIN_SUPPORTED_NAMENODE_VERSION_DEFAULT = "2.1.0-beta";
|
||||
public static final String DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_KEY = "dfs.namenode.inode.attributes.provider.class";
|
||||
public static final String DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_BYPASS_USERS_KEY = "dfs.namenode.inode.attributes.provider.bypass.users";
|
||||
public static final String DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_BYPASS_USERS_DEFAULT = "";
|
||||
|
||||
public static final String DFS_DATANODE_BP_READY_TIMEOUT_KEY = "dfs.datanode.bp-ready.timeout";
|
||||
public static final long DFS_DATANODE_BP_READY_TIMEOUT_DEFAULT = 20;
|
||||
|
||||
|
@ -74,6 +74,7 @@
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.SortedSet;
|
||||
@ -202,6 +203,10 @@ private static INodeDirectory createRoot(FSNamesystem namesystem) {
|
||||
|
||||
private INodeAttributeProvider attributeProvider;
|
||||
|
||||
// A HashSet of principals of users for whom the external attribute provider
|
||||
// will be bypassed
|
||||
private HashSet<String> usersToBypassExtAttrProvider = null;
|
||||
|
||||
public void setINodeAttributeProvider(INodeAttributeProvider provider) {
|
||||
attributeProvider = provider;
|
||||
}
|
||||
@ -357,6 +362,49 @@ public enum DirOp {
|
||||
this.quotaInitThreads = conf.getInt(
|
||||
DFSConfigKeys.DFS_NAMENODE_QUOTA_INIT_THREADS_KEY,
|
||||
DFSConfigKeys.DFS_NAMENODE_QUOTA_INIT_THREADS_DEFAULT);
|
||||
|
||||
initUsersToBypassExtProvider(conf);
|
||||
}
|
||||
|
||||
private void initUsersToBypassExtProvider(Configuration conf) {
|
||||
String[] bypassUsers = conf.getTrimmedStrings(
|
||||
DFSConfigKeys.DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_BYPASS_USERS_KEY,
|
||||
DFSConfigKeys.DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_BYPASS_USERS_DEFAULT);
|
||||
for(int i = 0; i < bypassUsers.length; i++) {
|
||||
String tmp = bypassUsers[i].trim();
|
||||
if (!tmp.isEmpty()) {
|
||||
if (usersToBypassExtAttrProvider == null) {
|
||||
usersToBypassExtAttrProvider = new HashSet<String>();
|
||||
}
|
||||
LOG.info("Add user " + tmp + " to the list that will bypass external"
|
||||
+ " attribute provider.");
|
||||
usersToBypassExtAttrProvider.add(tmp);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a given user is configured to bypass external attribute provider.
|
||||
* @param user user principal
|
||||
* @return true if the user is to bypass external attribute provider
|
||||
*/
|
||||
private boolean isUserBypassingExtAttrProvider(final String user) {
|
||||
return (usersToBypassExtAttrProvider != null) &&
|
||||
usersToBypassExtAttrProvider.contains(user);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return attributeProvider or null if ugi is to bypass attributeProvider.
|
||||
* @param ugi
|
||||
* @return configured attributeProvider or null
|
||||
*/
|
||||
private INodeAttributeProvider getUserFilteredAttributeProvider(
|
||||
UserGroupInformation ugi) {
|
||||
if (attributeProvider == null ||
|
||||
(ugi != null && isUserBypassingExtAttrProvider(ugi.getUserName()))) {
|
||||
return null;
|
||||
}
|
||||
return attributeProvider;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1711,7 +1759,7 @@ FSPermissionChecker getPermissionChecker()
|
||||
FSPermissionChecker getPermissionChecker(String fsOwner, String superGroup,
|
||||
UserGroupInformation ugi) throws AccessControlException {
|
||||
return new FSPermissionChecker(
|
||||
fsOwner, superGroup, ugi, attributeProvider);
|
||||
fsOwner, superGroup, ugi, getUserFilteredAttributeProvider(ugi));
|
||||
}
|
||||
|
||||
void checkOwner(FSPermissionChecker pc, INodesInPath iip)
|
||||
@ -1896,18 +1944,20 @@ void resetLastInodeIdWithoutChecking(long newValue) {
|
||||
}
|
||||
|
||||
INodeAttributes getAttributes(INodesInPath iip)
|
||||
throws FileNotFoundException {
|
||||
throws IOException {
|
||||
INode node = FSDirectory.resolveLastINode(iip);
|
||||
int snapshot = iip.getPathSnapshotId();
|
||||
INodeAttributes nodeAttrs = node.getSnapshotINode(snapshot);
|
||||
if (attributeProvider != null) {
|
||||
UserGroupInformation ugi = NameNode.getRemoteUser();
|
||||
INodeAttributeProvider ap = this.getUserFilteredAttributeProvider(ugi);
|
||||
if (ap != null) {
|
||||
// permission checking sends the full components array including the
|
||||
// first empty component for the root. however file status
|
||||
// related calls are expected to strip out the root component according
|
||||
// to TestINodeAttributeProvider.
|
||||
byte[][] components = iip.getPathComponents();
|
||||
components = Arrays.copyOfRange(components, 1, components.length);
|
||||
nodeAttrs = attributeProvider.getAttributes(components, nodeAttrs);
|
||||
nodeAttrs = ap.getAttributes(components, nodeAttrs);
|
||||
}
|
||||
return nodeAttrs;
|
||||
}
|
||||
|
@ -4159,6 +4159,18 @@
|
||||
</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>dfs.namenode.authorization.provider.bypass.users</name>
|
||||
<value></value>
|
||||
<description>
|
||||
A list of user principals (in secure cluster) or user names (in insecure
|
||||
cluster) for whom the external attribute provider will be bypassed for all
|
||||
operations. This means file attributes stored in HDFS instead of the
|
||||
external provider will be used for permission checking and be returned when
|
||||
requested.
|
||||
</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>dfs.namenode.max-num-blocks-to-log</name>
|
||||
<value>1000</value>
|
||||
|
@ -24,6 +24,7 @@
|
||||
import java.util.Set;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
@ -33,19 +34,25 @@
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.hdfs.server.namenode.INodeAttributeProvider.AccessControlEnforcer;
|
||||
import org.apache.hadoop.security.AccessControlException;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
public class TestINodeAttributeProvider {
|
||||
private static final Logger LOG =
|
||||
LoggerFactory.getLogger(TestINodeAttributeProvider.class);
|
||||
|
||||
private MiniDFSCluster miniDFS;
|
||||
private static final Set<String> CALLED = new HashSet<String>();
|
||||
private static final short HDFS_PERMISSION = 0777;
|
||||
private static final short PROVIDER_PERMISSION = 0770;
|
||||
|
||||
public static class MyAuthorizationProvider extends INodeAttributeProvider {
|
||||
|
||||
@ -112,7 +119,8 @@ public short getFsPermissionShort() {
|
||||
|
||||
@Override
|
||||
public long getPermissionLong() {
|
||||
return (useDefault) ? inode.getPermissionLong() : 0770;
|
||||
return (useDefault) ? inode.getPermissionLong() :
|
||||
(long)PROVIDER_PERMISSION;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -177,6 +185,9 @@ public void setUp() throws IOException {
|
||||
conf.set(DFSConfigKeys.DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_KEY,
|
||||
MyAuthorizationProvider.class.getName());
|
||||
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
|
||||
conf.set(
|
||||
DFSConfigKeys.DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_BYPASS_USERS_KEY,
|
||||
" u2,, ,u3, ");
|
||||
EditLogFileOutputStream.setShouldSkipFsyncForTesting(true);
|
||||
miniDFS = new MiniDFSCluster.Builder(conf).build();
|
||||
}
|
||||
@ -195,8 +206,11 @@ public void cleanUp() throws IOException {
|
||||
public void testDelegationToProvider() throws Exception {
|
||||
Assert.assertTrue(CALLED.contains("start"));
|
||||
FileSystem fs = FileSystem.get(miniDFS.getConfiguration(0));
|
||||
fs.mkdirs(new Path("/tmp"));
|
||||
fs.setPermission(new Path("/tmp"), new FsPermission((short) 0777));
|
||||
final Path tmpPath = new Path("/tmp");
|
||||
final Path fooPath = new Path("/tmp/foo");
|
||||
|
||||
fs.mkdirs(tmpPath);
|
||||
fs.setPermission(tmpPath, new FsPermission(HDFS_PERMISSION));
|
||||
UserGroupInformation ugi = UserGroupInformation.createUserForTesting("u1",
|
||||
new String[]{"g1"});
|
||||
ugi.doAs(new PrivilegedExceptionAction<Void>() {
|
||||
@ -204,17 +218,19 @@ public void testDelegationToProvider() throws Exception {
|
||||
public Void run() throws Exception {
|
||||
FileSystem fs = FileSystem.get(miniDFS.getConfiguration(0));
|
||||
CALLED.clear();
|
||||
fs.mkdirs(new Path("/tmp/foo"));
|
||||
fs.mkdirs(fooPath);
|
||||
Assert.assertTrue(CALLED.contains("getAttributes"));
|
||||
Assert.assertTrue(CALLED.contains("checkPermission|null|null|null"));
|
||||
Assert.assertTrue(CALLED.contains("checkPermission|WRITE|null|null"));
|
||||
|
||||
CALLED.clear();
|
||||
fs.listStatus(new Path("/tmp/foo"));
|
||||
fs.listStatus(fooPath);
|
||||
Assert.assertTrue(CALLED.contains("getAttributes"));
|
||||
Assert.assertTrue(
|
||||
CALLED.contains("checkPermission|null|null|READ_EXECUTE"));
|
||||
|
||||
CALLED.clear();
|
||||
fs.getAclStatus(new Path("/tmp/foo"));
|
||||
fs.getAclStatus(fooPath);
|
||||
Assert.assertTrue(CALLED.contains("getAttributes"));
|
||||
Assert.assertTrue(CALLED.contains("checkPermission|null|null|null"));
|
||||
return null;
|
||||
@ -222,6 +238,81 @@ public Void run() throws Exception {
|
||||
});
|
||||
}
|
||||
|
||||
private class AssertHelper {
|
||||
private boolean bypass = true;
|
||||
AssertHelper(boolean bp) {
|
||||
bypass = bp;
|
||||
}
|
||||
public void doAssert(boolean x) {
|
||||
if (bypass) {
|
||||
Assert.assertFalse(x);
|
||||
} else {
|
||||
Assert.assertTrue(x);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void testBypassProviderHelper(final String[] users,
|
||||
final short expectedPermission, final boolean bypass) throws Exception {
|
||||
final AssertHelper asserter = new AssertHelper(bypass);
|
||||
|
||||
Assert.assertTrue(CALLED.contains("start"));
|
||||
|
||||
FileSystem fs = FileSystem.get(miniDFS.getConfiguration(0));
|
||||
final Path userPath = new Path("/user");
|
||||
final Path authz = new Path("/user/authz");
|
||||
final Path authzChild = new Path("/user/authz/child2");
|
||||
|
||||
fs.mkdirs(userPath);
|
||||
fs.setPermission(userPath, new FsPermission(HDFS_PERMISSION));
|
||||
fs.mkdirs(authz);
|
||||
fs.setPermission(authz, new FsPermission(HDFS_PERMISSION));
|
||||
fs.mkdirs(authzChild);
|
||||
fs.setPermission(authzChild, new FsPermission(HDFS_PERMISSION));
|
||||
for(String user : users) {
|
||||
UserGroupInformation ugiBypass =
|
||||
UserGroupInformation.createUserForTesting(user,
|
||||
new String[]{"g1"});
|
||||
ugiBypass.doAs(new PrivilegedExceptionAction<Void>() {
|
||||
@Override
|
||||
public Void run() throws Exception {
|
||||
FileSystem fs = FileSystem.get(miniDFS.getConfiguration(0));
|
||||
Assert.assertEquals(expectedPermission,
|
||||
fs.getFileStatus(authzChild).getPermission().toShort());
|
||||
asserter.doAssert(CALLED.contains("getAttributes"));
|
||||
asserter.doAssert(CALLED.contains("checkPermission|null|null|null"));
|
||||
|
||||
CALLED.clear();
|
||||
Assert.assertEquals(expectedPermission,
|
||||
fs.listStatus(userPath)[0].getPermission().toShort());
|
||||
asserter.doAssert(CALLED.contains("getAttributes"));
|
||||
asserter.doAssert(
|
||||
CALLED.contains("checkPermission|null|null|READ_EXECUTE"));
|
||||
|
||||
CALLED.clear();
|
||||
fs.getAclStatus(authzChild);
|
||||
asserter.doAssert(CALLED.contains("getAttributes"));
|
||||
asserter.doAssert(CALLED.contains("checkPermission|null|null|null"));
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAuthzDelegationToProvider() throws Exception {
|
||||
LOG.info("Test not bypassing provider");
|
||||
String[] users = {"u1"};
|
||||
testBypassProviderHelper(users, PROVIDER_PERMISSION, false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAuthzBypassingProvider() throws Exception {
|
||||
LOG.info("Test bypassing provider");
|
||||
String[] users = {"u2", "u3"};
|
||||
testBypassProviderHelper(users, HDFS_PERMISSION, true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCustomProvider() throws Exception {
|
||||
FileSystem fs = FileSystem.get(miniDFS.getConfiguration(0));
|
||||
|
Loading…
Reference in New Issue
Block a user