diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java index a070345a71..58d97cada7 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java @@ -219,6 +219,19 @@ public void init(FilterConfig filterConfig) throws ServletException { authHandlerClassName = authHandlerName; } + validity = Long.parseLong(config.getProperty(AUTH_TOKEN_VALIDITY, "36000")) + * 1000; //10 hours + initializeSecretProvider(filterConfig); + + initializeAuthHandler(authHandlerClassName, filterConfig); + + + cookieDomain = config.getProperty(COOKIE_DOMAIN, null); + cookiePath = config.getProperty(COOKIE_PATH, null); + } + + protected void initializeAuthHandler(String authHandlerClassName, FilterConfig filterConfig) + throws ServletException { try { Class klass = Thread.currentThread().getContextClassLoader().loadClass(authHandlerClassName); authHandler = (AuthenticationHandler) klass.newInstance(); @@ -230,9 +243,10 @@ public void init(FilterConfig filterConfig) throws ServletException { } catch (IllegalAccessException ex) { throw new ServletException(ex); } + } - validity = Long.parseLong(config.getProperty(AUTH_TOKEN_VALIDITY, "36000")) - * 1000; //10 hours + protected void initializeSecretProvider(FilterConfig filterConfig) + throws ServletException { secretProvider = (SignerSecretProvider) filterConfig.getServletContext(). getAttribute(SIGNER_SECRET_PROVIDER_ATTRIBUTE); if (secretProvider == null) { @@ -254,9 +268,6 @@ public void init(FilterConfig filterConfig) throws ServletException { customSecretProvider = true; } signer = new Signer(secretProvider); - - cookieDomain = config.getProperty(COOKIE_DOMAIN, null); - cookiePath = config.getProperty(COOKIE_PATH, null); } @SuppressWarnings("unchecked") diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java index 5d93fcfa1c..3b6b958ac1 100644 --- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java +++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java @@ -283,6 +283,8 @@ public byte[][] getAllSecrets() { filter = new AuthenticationFilter(); try { FilterConfig config = Mockito.mock(FilterConfig.class); + ServletContext sc = Mockito.mock(ServletContext.class); + Mockito.when(config.getServletContext()).thenReturn(sc); Mockito.when(config.getInitParameter(AuthenticationFilter.AUTH_TYPE)).thenReturn("kerberos"); Mockito.when(config.getInitParameterNames()).thenReturn( new Vector(Arrays.asList(AuthenticationFilter.AUTH_TYPE)).elements()); diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 8567e1e679..55ef9d3de1 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -409,6 +409,9 @@ Release 2.7.0 - UNRELEASED HADOOP-10714. AmazonS3Client.deleteObjects() need to be limited to 1000 entries per call. (Juan Yu via atm) + HADOOP-11272. Allow ZKSignerSecretProvider and + ZKDelegationTokenSecretManager to use the same curator client. (Arun Suresh via atm) + Release 2.6.0 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java index 82dd2da7e1..ebc45a5b43 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/ZKDelegationTokenSecretManager.java @@ -136,7 +136,11 @@ public ZKDelegationTokenSecretManager(Configuration conf) { conf.getLong(DelegationTokenManager.REMOVAL_SCAN_INTERVAL, DelegationTokenManager.REMOVAL_SCAN_INTERVAL_DEFAULT) * 1000); if (CURATOR_TL.get() != null) { - zkClient = CURATOR_TL.get(); + zkClient = + CURATOR_TL.get().usingNamespace( + conf.get(ZK_DTSM_ZNODE_WORKING_PATH, + ZK_DTSM_ZNODE_WORKING_PATH_DEAFULT) + + "/" + ZK_DTSM_NAMESPACE); isExternalClient = true; } else { String connString = conf.get(ZK_DTSM_ZK_CONNECTION_STRING); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java index aa9ec9948d..fbd1129a8c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticationFilter.java @@ -18,6 +18,7 @@ package org.apache.hadoop.security.token.delegation.web; import com.google.common.annotations.VisibleForTesting; + import org.apache.curator.framework.CuratorFramework; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -46,6 +47,7 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; + import java.io.IOException; import java.io.Writer; import java.nio.charset.Charset; @@ -156,14 +158,7 @@ protected Configuration getProxyuserConfiguration(FilterConfig filterConfig) @Override public void init(FilterConfig filterConfig) throws ServletException { - // A single CuratorFramework should be used for a ZK cluster. - // If the ZKSignerSecretProvider has already created it, it has to - // be set here... to be used by the ZKDelegationTokenSecretManager - ZKDelegationTokenSecretManager.setCurator((CuratorFramework) - filterConfig.getServletContext().getAttribute(ZKSignerSecretProvider. - ZOOKEEPER_SIGNER_SECRET_PROVIDER_CURATOR_CLIENT_ATTRIBUTE)); super.init(filterConfig); - ZKDelegationTokenSecretManager.setCurator(null); AuthenticationHandler handler = getAuthenticationHandler(); AbstractDelegationTokenSecretManager dtSecretManager = (AbstractDelegationTokenSecretManager) filterConfig.getServletContext(). @@ -188,6 +183,19 @@ public void init(FilterConfig filterConfig) throws ServletException { ProxyUsers.refreshSuperUserGroupsConfiguration(conf, PROXYUSER_PREFIX); } + @Override + protected void initializeAuthHandler(String authHandlerClassName, + FilterConfig filterConfig) throws ServletException { + // A single CuratorFramework should be used for a ZK cluster. + // If the ZKSignerSecretProvider has already created it, it has to + // be set here... to be used by the ZKDelegationTokenSecretManager + ZKDelegationTokenSecretManager.setCurator((CuratorFramework) + filterConfig.getServletContext().getAttribute(ZKSignerSecretProvider. + ZOOKEEPER_SIGNER_SECRET_PROVIDER_CURATOR_CLIENT_ATTRIBUTE)); + super.initializeAuthHandler(authHandlerClassName, filterConfig); + ZKDelegationTokenSecretManager.setCurator(null); + } + protected void setHandlerAuthMethod(SaslRpcServer.AuthMethod authMethod) { this.handlerAuthMethod = authMethod; } diff --git a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java index 4628e3696a..9e761789c3 100644 --- a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java +++ b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.crypto.key.kms.server; +import org.apache.curator.test.TestingServer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.key.kms.server.KeyAuthorizationKeyProvider; import org.apache.hadoop.crypto.key.KeyProvider; @@ -1584,6 +1585,106 @@ public Void run() throws Exception { }); } + @Test + public void testKMSWithZKSigner() throws Exception { + doKMSWithZK(true, false); + } + + @Test + public void testKMSWithZKDTSM() throws Exception { + doKMSWithZK(false, true); + } + + @Test + public void testKMSWithZKSignerAndDTSM() throws Exception { + doKMSWithZK(true, true); + } + + public void doKMSWithZK(boolean zkDTSM, boolean zkSigner) throws Exception { + TestingServer zkServer = null; + try { + zkServer = new TestingServer(); + zkServer.start(); + + Configuration conf = new Configuration(); + conf.set("hadoop.security.authentication", "kerberos"); + UserGroupInformation.setConfiguration(conf); + final File testDir = getTestDir(); + conf = createBaseKMSConf(testDir); + conf.set("hadoop.kms.authentication.type", "kerberos"); + conf.set("hadoop.kms.authentication.kerberos.keytab", keytab.getAbsolutePath()); + conf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost"); + conf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT"); + + if (zkSigner) { + conf.set("hadoop.kms.authentication.signer.secret.provider", "zookeeper"); + conf.set("hadoop.kms.authentication.signer.secret.provider.zookeeper.path","/testKMSWithZKDTSM"); + conf.set("hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string",zkServer.getConnectString()); + } + + if (zkDTSM) { + conf.set("hadoop.kms.authentication.zk-dt-secret-manager.enable", "true"); + } + if (zkDTSM && !zkSigner) { + conf.set("hadoop.kms.authentication.zk-dt-secret-manager.zkConnectionString", zkServer.getConnectString()); + conf.set("hadoop.kms.authentication.zk-dt-secret-manager.znodeWorkingPath", "testZKPath"); + conf.set("hadoop.kms.authentication.zk-dt-secret-manager.zkAuthType", "none"); + } + + for (KMSACLs.Type type : KMSACLs.Type.values()) { + conf.set(type.getAclConfigKey(), type.toString()); + } + conf.set(KMSACLs.Type.CREATE.getAclConfigKey(), + KMSACLs.Type.CREATE.toString() + ",SET_KEY_MATERIAL"); + + conf.set(KMSACLs.Type.ROLLOVER.getAclConfigKey(), + KMSACLs.Type.ROLLOVER.toString() + ",SET_KEY_MATERIAL"); + + conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k0.ALL", "*"); + conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k1.ALL", "*"); + conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k2.ALL", "*"); + conf.set(KeyAuthorizationKeyProvider.KEY_ACL + "k3.ALL", "*"); + + writeConf(testDir, conf); + + KMSCallable c = + new KMSCallable() { + @Override + public KeyProvider call() throws Exception { + final Configuration conf = new Configuration(); + conf.setInt(KeyProvider.DEFAULT_BITLENGTH_NAME, 128); + final URI uri = createKMSUri(getKMSUrl()); + + final KeyProvider kp = + doAs("SET_KEY_MATERIAL", + new PrivilegedExceptionAction() { + @Override + public KeyProvider run() throws Exception { + KMSClientProvider kp = new KMSClientProvider(uri, conf); + kp.createKey("k1", new byte[16], + new KeyProvider.Options(conf)); + kp.createKey("k2", new byte[16], + new KeyProvider.Options(conf)); + kp.createKey("k3", new byte[16], + new KeyProvider.Options(conf)); + return kp; + } + }); + return kp; + } + }; + + runServer(null, null, testDir, c); + } finally { + if (zkServer != null) { + zkServer.stop(); + zkServer.close(); + } + } + + } + + @Test public void testProxyUserKerb() throws Exception { doProxyUserTest(true);