HADOOP-10172. Cache SASL server factories (daryn)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1552389 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
42e7446add
commit
011a7b210d
@ -408,6 +408,8 @@ Release 2.4.0 - UNRELEASED
|
|||||||
HADOOP-10047. Add a direct-buffer based apis for compression. (Gopal V
|
HADOOP-10047. Add a direct-buffer based apis for compression. (Gopal V
|
||||||
via acmurthy)
|
via acmurthy)
|
||||||
|
|
||||||
|
HADOOP-10172. Cache SASL server factories (daryn)
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
||||||
HADOOP-9964. Fix deadlocks in TestHttpServer by synchronize
|
HADOOP-9964. Fix deadlocks in TestHttpServer by synchronize
|
||||||
|
@ -25,6 +25,10 @@
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
import java.security.Security;
|
import java.security.Security;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Enumeration;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
|
|
||||||
@ -38,6 +42,7 @@
|
|||||||
import javax.security.sasl.Sasl;
|
import javax.security.sasl.Sasl;
|
||||||
import javax.security.sasl.SaslException;
|
import javax.security.sasl.SaslException;
|
||||||
import javax.security.sasl.SaslServer;
|
import javax.security.sasl.SaslServer;
|
||||||
|
import javax.security.sasl.SaslServerFactory;
|
||||||
|
|
||||||
import org.apache.commons.codec.binary.Base64;
|
import org.apache.commons.codec.binary.Base64;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
@ -63,6 +68,7 @@ public class SaslRpcServer {
|
|||||||
public static final String SASL_DEFAULT_REALM = "default";
|
public static final String SASL_DEFAULT_REALM = "default";
|
||||||
public static final Map<String, String> SASL_PROPS =
|
public static final Map<String, String> SASL_PROPS =
|
||||||
new TreeMap<String, String>();
|
new TreeMap<String, String>();
|
||||||
|
private static SaslServerFactory saslFactory;
|
||||||
|
|
||||||
public static enum QualityOfProtection {
|
public static enum QualityOfProtection {
|
||||||
AUTHENTICATION("auth"),
|
AUTHENTICATION("auth"),
|
||||||
@ -151,7 +157,7 @@ public SaslServer create(Connection connection,
|
|||||||
new PrivilegedExceptionAction<SaslServer>() {
|
new PrivilegedExceptionAction<SaslServer>() {
|
||||||
@Override
|
@Override
|
||||||
public SaslServer run() throws SaslException {
|
public SaslServer run() throws SaslException {
|
||||||
return Sasl.createSaslServer(mechanism, protocol, serverId,
|
return saslFactory.createSaslServer(mechanism, protocol, serverId,
|
||||||
SaslRpcServer.SASL_PROPS, callback);
|
SaslRpcServer.SASL_PROPS, callback);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -180,6 +186,7 @@ public static void init(Configuration conf) {
|
|||||||
SASL_PROPS.put(Sasl.QOP, saslQOP.getSaslQop());
|
SASL_PROPS.put(Sasl.QOP, saslQOP.getSaslQop());
|
||||||
SASL_PROPS.put(Sasl.SERVER_AUTH, "true");
|
SASL_PROPS.put(Sasl.SERVER_AUTH, "true");
|
||||||
Security.addProvider(new SaslPlainServer.SecurityProvider());
|
Security.addProvider(new SaslPlainServer.SecurityProvider());
|
||||||
|
saslFactory = new FastSaslServerFactory(SASL_PROPS);
|
||||||
}
|
}
|
||||||
|
|
||||||
static String encodeIdentifier(byte[] identifier) {
|
static String encodeIdentifier(byte[] identifier) {
|
||||||
@ -363,4 +370,47 @@ public void handle(Callback[] callbacks) throws
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sasl.createSaslServer is 100-200X slower than caching the factories!
|
||||||
|
private static class FastSaslServerFactory implements SaslServerFactory {
|
||||||
|
private final Map<String,List<SaslServerFactory>> factoryCache =
|
||||||
|
new HashMap<String,List<SaslServerFactory>>();
|
||||||
|
|
||||||
|
FastSaslServerFactory(Map<String,?> props) {
|
||||||
|
final Enumeration<SaslServerFactory> factories =
|
||||||
|
Sasl.getSaslServerFactories();
|
||||||
|
while (factories.hasMoreElements()) {
|
||||||
|
SaslServerFactory factory = factories.nextElement();
|
||||||
|
for (String mech : factory.getMechanismNames(props)) {
|
||||||
|
if (!factoryCache.containsKey(mech)) {
|
||||||
|
factoryCache.put(mech, new ArrayList<SaslServerFactory>());
|
||||||
|
}
|
||||||
|
factoryCache.get(mech).add(factory);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SaslServer createSaslServer(String mechanism, String protocol,
|
||||||
|
String serverName, Map<String,?> props, CallbackHandler cbh)
|
||||||
|
throws SaslException {
|
||||||
|
SaslServer saslServer = null;
|
||||||
|
List<SaslServerFactory> factories = factoryCache.get(mechanism);
|
||||||
|
if (factories != null) {
|
||||||
|
for (SaslServerFactory factory : factories) {
|
||||||
|
saslServer = factory.createSaslServer(
|
||||||
|
mechanism, protocol, serverName, props, cbh);
|
||||||
|
if (saslServer != null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return saslServer;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String[] getMechanismNames(Map<String, ?> props) {
|
||||||
|
return factoryCache.keySet().toArray(new String[0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -137,7 +137,9 @@ public void setup() {
|
|||||||
LOG.info("Testing QOP:"+expectedQop);
|
LOG.info("Testing QOP:"+expectedQop);
|
||||||
LOG.info("---------------------------------");
|
LOG.info("---------------------------------");
|
||||||
conf = new Configuration();
|
conf = new Configuration();
|
||||||
conf.set(HADOOP_SECURITY_AUTHENTICATION, KERBEROS.toString());
|
// the specific tests for kerberos will enable kerberos. forcing it
|
||||||
|
// for all tests will cause tests to fail if the user has a TGT
|
||||||
|
conf.set(HADOOP_SECURITY_AUTHENTICATION, SIMPLE.toString());
|
||||||
conf.set("hadoop.rpc.protection", expectedQop.name().toLowerCase());
|
conf.set("hadoop.rpc.protection", expectedQop.name().toLowerCase());
|
||||||
UserGroupInformation.setConfiguration(conf);
|
UserGroupInformation.setConfiguration(conf);
|
||||||
enableSecretManager = null;
|
enableSecretManager = null;
|
||||||
|
Loading…
Reference in New Issue
Block a user