HADOOP-6951. Distinct minicluster services (e.g. NN and JT) overwrite each other's service policies. Contributed by Aaron T. Myers.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1002896 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Thomas White 2010-09-29 23:49:32 +00:00
parent 8db4b543b9
commit 642ed17a48
4 changed files with 34 additions and 7 deletions

View File

@ -250,6 +250,9 @@ Trunk (unreleased changes)
HADOOP-6940. RawLocalFileSystem's markSupported method misnamed markSupport. HADOOP-6940. RawLocalFileSystem's markSupported method misnamed markSupport.
(Tom White via eli). (Tom White via eli).
HADOOP-6951. Distinct minicluster services (e.g. NN and JT) overwrite each
other's service policies. (Aaron T. Myers via tomwhite)
Release 0.21.0 - Unreleased Release 0.21.0 - Unreleased
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -60,6 +60,7 @@
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.BytesWritable;
@ -78,6 +79,7 @@
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.SecretManager;
@ -182,6 +184,7 @@ public static String getRemoteAddress() {
private Configuration conf; private Configuration conf;
private SecretManager<TokenIdentifier> secretManager; private SecretManager<TokenIdentifier> secretManager;
private ServiceAuthorizationManager serviceAuthorizationManager = new ServiceAuthorizationManager();
private int maxQueueSize; private int maxQueueSize;
private final int maxRespSize; private final int maxRespSize;
@ -239,6 +242,22 @@ public RpcMetrics getRpcMetrics() {
return rpcMetrics; return rpcMetrics;
} }
/**
* Refresh the service authorization ACL for the service handled by this server.
*/
public void refreshServiceAcl(Configuration conf, PolicyProvider provider) {
serviceAuthorizationManager.refresh(conf, provider);
}
/**
* Returns a handle to the serviceAuthorizationManager (required in tests)
* @return instance of ServiceAuthorizationManager for this server
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public ServiceAuthorizationManager getServiceAuthorizationManager() {
return serviceAuthorizationManager;
}
/** A call queued for handling. */ /** A call queued for handling. */
private static class Call { private static class Call {
private int id; // the client's call id private int id; // the client's call id
@ -1652,7 +1671,7 @@ public void authorize(UserGroupInformation user,
throw new AuthorizationException("Unknown protocol: " + throw new AuthorizationException("Unknown protocol: " +
connection.getProtocol()); connection.getProtocol());
} }
ServiceAuthorizationManager.authorize(user, protocol, getConf(), hostname); serviceAuthorizationManager.authorize(user, protocol, getConf(), hostname);
} }
} }

View File

@ -20,6 +20,7 @@
import java.io.IOException; import java.io.IOException;
import java.util.IdentityHashMap; import java.util.IdentityHashMap;
import java.util.Map; import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -43,7 +44,7 @@ public class ServiceAuthorizationManager {
private static final Log LOG = LogFactory private static final Log LOG = LogFactory
.getLog(ServiceAuthorizationManager.class); .getLog(ServiceAuthorizationManager.class);
private static Map<Class<?>, AccessControlList> protocolToAcl = private Map<Class<?>, AccessControlList> protocolToAcl =
new IdentityHashMap<Class<?>, AccessControlList>(); new IdentityHashMap<Class<?>, AccessControlList>();
/** /**
@ -73,7 +74,7 @@ public class ServiceAuthorizationManager {
* @param hostname fully qualified domain name of the client * @param hostname fully qualified domain name of the client
* @throws AuthorizationException on authorization failure * @throws AuthorizationException on authorization failure
*/ */
public static void authorize(UserGroupInformation user, public void authorize(UserGroupInformation user,
Class<?> protocol, Class<?> protocol,
Configuration conf, Configuration conf,
String hostname String hostname
@ -129,7 +130,7 @@ public static void authorize(UserGroupInformation user,
AUDITLOG.info(AUTHZ_SUCCESSFULL_FOR + user + " for protocol="+protocol); AUDITLOG.info(AUTHZ_SUCCESSFULL_FOR + user + " for protocol="+protocol);
} }
public static synchronized void refresh(Configuration conf, public synchronized void refresh(Configuration conf,
PolicyProvider provider) { PolicyProvider provider) {
// Get the system property 'hadoop.policy.file' // Get the system property 'hadoop.policy.file'
String policyFile = String policyFile =
@ -158,4 +159,9 @@ public static synchronized void refresh(Configuration conf,
// Flip to the newly parsed permissions // Flip to the newly parsed permissions
protocolToAcl = newAcls; protocolToAcl = newAcls;
} }
// Package-protected for use in tests.
Set<Class<?>> getProtocolsWithAcls() {
return protocolToAcl.keySet();
}
} }

View File

@ -41,7 +41,6 @@
import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.security.authorize.Service; import org.apache.hadoop.security.authorize.Service;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.AccessControlException;
import static org.mockito.Mockito.*; import static org.mockito.Mockito.*;
@ -364,11 +363,11 @@ public Service[] getServices() {
} }
private void doRPCs(Configuration conf, boolean expectFailure) throws Exception { private void doRPCs(Configuration conf, boolean expectFailure) throws Exception {
ServiceAuthorizationManager.refresh(conf, new TestPolicyProvider());
Server server = RPC.getServer(TestProtocol.class, Server server = RPC.getServer(TestProtocol.class,
new TestImpl(), ADDRESS, 0, 5, true, conf, null); new TestImpl(), ADDRESS, 0, 5, true, conf, null);
server.refreshServiceAcl(conf, new TestPolicyProvider());
TestProtocol proxy = null; TestProtocol proxy = null;
server.start(); server.start();