HADOOP-11016. KMS should support signing cookies with zookeeper secret manager. (tucu)
This commit is contained in:
parent
e4ddb6da15
commit
0a495bef5c
@ -187,6 +187,11 @@
|
|||||||
<artifactId>metrics-core</artifactId>
|
<artifactId>metrics-core</artifactId>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.curator</groupId>
|
||||||
|
<artifactId>curator-test</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
|
@ -68,4 +68,61 @@
|
|||||||
</description>
|
</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
|
<!-- Authentication cookie signature source -->
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>hadoop.kms.authentication.signer.secret.provider</name>
|
||||||
|
<value>random</value>
|
||||||
|
<description>
|
||||||
|
Indicates how the secret to sign the authentication cookies will be
|
||||||
|
stored. Options are 'random' (default), 'string' and 'zookeeper'.
|
||||||
|
If using a setup with multiple KMS instances, 'zookeeper' should be used.
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<!-- Configuration for 'zookeeper' authentication cookie signature source -->
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.path</name>
|
||||||
|
<value>/hadoop-kms/hadoop-auth-signature-secret</value>
|
||||||
|
<description>
|
||||||
|
The Zookeeper ZNode path where the KMS instances will store and retrieve
|
||||||
|
the secret from.
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string</name>
|
||||||
|
<value>#HOSTNAME#:#PORT#,...</value>
|
||||||
|
<description>
|
||||||
|
The Zookeeper connection string, a list of hostnames and port comma
|
||||||
|
separated.
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type</name>
|
||||||
|
<value>kerberos</value>
|
||||||
|
<description>
|
||||||
|
The Zookeeper authentication type, 'none' or 'sasl' (Kerberos).
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab</name>
|
||||||
|
<value>/etc/hadoop/conf/kms.keytab</value>
|
||||||
|
<description>
|
||||||
|
The absolute path for the Kerberos keytab with the credentials to
|
||||||
|
connect to Zookeeper.
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal</name>
|
||||||
|
<value>kms/#HOSTNAME#</value>
|
||||||
|
<description>
|
||||||
|
The Kerberos service principal used to connect to Zookeeper.
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
</configuration>
|
</configuration>
|
||||||
|
@ -46,7 +46,8 @@
|
|||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class KMSAuthenticationFilter
|
public class KMSAuthenticationFilter
|
||||||
extends DelegationTokenAuthenticationFilter {
|
extends DelegationTokenAuthenticationFilter {
|
||||||
private static final String CONF_PREFIX = KMSConfiguration.CONFIG_PREFIX +
|
|
||||||
|
public static final String CONFIG_PREFIX = KMSConfiguration.CONFIG_PREFIX +
|
||||||
"authentication.";
|
"authentication.";
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -56,9 +57,9 @@ protected Properties getConfiguration(String configPrefix,
|
|||||||
Configuration conf = KMSWebApp.getConfiguration();
|
Configuration conf = KMSWebApp.getConfiguration();
|
||||||
for (Map.Entry<String, String> entry : conf) {
|
for (Map.Entry<String, String> entry : conf) {
|
||||||
String name = entry.getKey();
|
String name = entry.getKey();
|
||||||
if (name.startsWith(CONF_PREFIX)) {
|
if (name.startsWith(CONFIG_PREFIX)) {
|
||||||
String value = conf.get(name);
|
String value = conf.get(name);
|
||||||
name = name.substring(CONF_PREFIX.length());
|
name = name.substring(CONFIG_PREFIX.length());
|
||||||
props.setProperty(name, value);
|
props.setProperty(name, value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -452,12 +452,12 @@ $ keytool -genkey -alias tomcat -keyalg RSA
|
|||||||
|
|
||||||
* GENERATE_EEK - generateEncryptedKey, warmUpEncryptedKeys
|
* GENERATE_EEK - generateEncryptedKey, warmUpEncryptedKeys
|
||||||
|
|
||||||
* DECRYPT_EEK - decryptEncryptedKey;
|
* DECRYPT_EEK - decryptEncryptedKey
|
||||||
|
|
||||||
* READ - getKeyVersion, getKeyVersions, getMetadata, getKeysMetadata,
|
* READ - getKeyVersion, getKeyVersions, getMetadata, getKeysMetadata,
|
||||||
getCurrentKey;
|
getCurrentKey
|
||||||
|
|
||||||
* ALL - all of the above;
|
* ALL - all of the above
|
||||||
|
|
||||||
These can be defined in the KMS <<<etc/hadoop/kms-acls.xml>>> as follows
|
These can be defined in the KMS <<<etc/hadoop/kms-acls.xml>>> as follows
|
||||||
|
|
||||||
@ -589,6 +589,89 @@ $ keytool -genkey -alias tomcat -keyalg RSA
|
|||||||
+---+
|
+---+
|
||||||
|
|
||||||
|
|
||||||
|
** Using Multiple Instances of KMS Behind a Load-Balancer or VIP
|
||||||
|
|
||||||
|
KMS supports multiple KMS instances behind a load-balancer or VIP for
|
||||||
|
scalability and for HA purposes.
|
||||||
|
|
||||||
|
When using multiple KMS instances behind a load-balancer or VIP, requests from
|
||||||
|
the same user may be handled by different KMS instances.
|
||||||
|
|
||||||
|
KMS instances behind a load-balancer or VIP must be specially configured to
|
||||||
|
work properly as a single logical service.
|
||||||
|
|
||||||
|
*** HTTP Kerberos Principals Configuration
|
||||||
|
|
||||||
|
TBD
|
||||||
|
|
||||||
|
*** HTTP Authentication Signature
|
||||||
|
|
||||||
|
KMS uses Hadoop Authentication for HTTP authentication. Hadoop Authentication
|
||||||
|
issues a signed HTTP Cookie once the client has authenticated successfully.
|
||||||
|
This HTTP Cookie has an expiration time, after which it will trigger a new
|
||||||
|
authentication sequence. This is done to avoid triggering the authentication
|
||||||
|
on every HTTP request of a client.
|
||||||
|
|
||||||
|
A KMS instance must verify the HTTP Cookie signatures signed by other KMS
|
||||||
|
instances. To do this all KMS instances must share the signing secret.
|
||||||
|
|
||||||
|
This secret sharing can be done using a Zookeeper service which is configured
|
||||||
|
in KMS with the following properties in the <<<kms-site.xml>>>:
|
||||||
|
|
||||||
|
+---+
|
||||||
|
<property>
|
||||||
|
<name>hadoop.kms.authentication.signer.secret.provider</name>
|
||||||
|
<value>zookeeper</value>
|
||||||
|
<description>
|
||||||
|
Indicates how the secret to sign the authentication cookies will be
|
||||||
|
stored. Options are 'random' (default), 'string' and 'zookeeper'.
|
||||||
|
If using a setup with multiple KMS instances, 'zookeeper' should be used.
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.path</name>
|
||||||
|
<value>/hadoop-kms/hadoop-auth-signature-secret</value>
|
||||||
|
<description>
|
||||||
|
The Zookeeper ZNode path where the KMS instances will store and retrieve
|
||||||
|
the secret from.
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string</name>
|
||||||
|
<value>#HOSTNAME#:#PORT#,...</value>
|
||||||
|
<description>
|
||||||
|
The Zookeeper connection string, a list of hostnames and port comma
|
||||||
|
separated.
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type</name>
|
||||||
|
<value>kerberos</value>
|
||||||
|
<description>
|
||||||
|
The Zookeeper authentication type, 'none' or 'sasl' (Kerberos).
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab</name>
|
||||||
|
<value>/etc/hadoop/conf/kms.keytab</value>
|
||||||
|
<description>
|
||||||
|
The absolute path for the Kerberos keytab with the credentials to
|
||||||
|
connect to Zookeeper.
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal</name>
|
||||||
|
<value>kms/#HOSTNAME#</value>
|
||||||
|
<description>
|
||||||
|
The Kerberos service principal used to connect to Zookeeper.
|
||||||
|
</description>
|
||||||
|
</property>
|
||||||
|
+---+
|
||||||
|
|
||||||
|
*** Delegation Tokens
|
||||||
|
|
||||||
|
TBD
|
||||||
|
|
||||||
** KMS HTTP REST API
|
** KMS HTTP REST API
|
||||||
|
|
||||||
*** Create a Key
|
*** Create a Key
|
||||||
|
@ -123,7 +123,8 @@ protected Configuration createBaseKMSConf(File keyStoreDir) throws Exception {
|
|||||||
return conf;
|
return conf;
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void writeConf(File confDir, Configuration conf) throws Exception {
|
public static void writeConf(File confDir, Configuration conf)
|
||||||
|
throws Exception {
|
||||||
Writer writer = new FileWriter(new File(confDir,
|
Writer writer = new FileWriter(new File(confDir,
|
||||||
KMSConfiguration.KMS_SITE_XML));
|
KMSConfiguration.KMS_SITE_XML));
|
||||||
conf.writeXml(writer);
|
conf.writeXml(writer);
|
||||||
@ -139,7 +140,7 @@ protected void writeConf(File confDir, Configuration conf) throws Exception {
|
|||||||
writer.close();
|
writer.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected URI createKMSUri(URL kmsUrl) throws Exception {
|
public static URI createKMSUri(URL kmsUrl) throws Exception {
|
||||||
String str = kmsUrl.toString();
|
String str = kmsUrl.toString();
|
||||||
str = str.replaceFirst("://", "@");
|
str = str.replaceFirst("://", "@");
|
||||||
return new URI("kms://" + str);
|
return new URI("kms://" + str);
|
||||||
|
@ -0,0 +1,179 @@
|
|||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.crypto.key.kms.server;
|
||||||
|
|
||||||
|
import org.apache.curator.test.TestingServer;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.crypto.key.KeyProvider;
|
||||||
|
import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion;
|
||||||
|
import org.apache.hadoop.crypto.key.KeyProvider.Options;
|
||||||
|
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
|
||||||
|
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersion;
|
||||||
|
import org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension;
|
||||||
|
import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
|
||||||
|
import org.apache.hadoop.crypto.key.kms.KMSRESTConstants;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.minikdc.MiniKdc;
|
||||||
|
import org.apache.hadoop.security.Credentials;
|
||||||
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||||
|
import org.apache.hadoop.security.authentication.util.ZKSignerSecretProvider;
|
||||||
|
import org.apache.hadoop.security.authorize.AuthorizationException;
|
||||||
|
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
|
||||||
|
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import javax.security.auth.Subject;
|
||||||
|
import javax.security.auth.kerberos.KerberosPrincipal;
|
||||||
|
import javax.security.auth.login.AppConfigurationEntry;
|
||||||
|
import javax.security.auth.login.LoginContext;
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileWriter;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Writer;
|
||||||
|
import java.net.HttpURLConnection;
|
||||||
|
import java.net.InetAddress;
|
||||||
|
import java.net.InetSocketAddress;
|
||||||
|
import java.net.ServerSocket;
|
||||||
|
import java.net.SocketTimeoutException;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.security.Principal;
|
||||||
|
import java.security.PrivilegedExceptionAction;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Properties;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.UUID;
|
||||||
|
import java.util.concurrent.Callable;
|
||||||
|
|
||||||
|
public class TestKMSWithZK {
|
||||||
|
|
||||||
|
protected Configuration createBaseKMSConf(File keyStoreDir) throws Exception {
|
||||||
|
Configuration conf = new Configuration(false);
|
||||||
|
conf.set("hadoop.security.key.provider.path",
|
||||||
|
"jceks://file@" + new Path(keyStoreDir.getAbsolutePath(),
|
||||||
|
"kms.keystore").toUri());
|
||||||
|
conf.set("hadoop.kms.authentication.type", "simple");
|
||||||
|
conf.setBoolean(KMSConfiguration.KEY_AUTHORIZATION_ENABLE, false);
|
||||||
|
|
||||||
|
conf.set(KMSACLs.Type.GET_KEYS.getAclConfigKey(), "foo");
|
||||||
|
return conf;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMultipleKMSInstancesWithZKSigner() throws Exception {
|
||||||
|
final File testDir = TestKMS.getTestDir();
|
||||||
|
Configuration conf = createBaseKMSConf(testDir);
|
||||||
|
|
||||||
|
TestingServer zkServer = new TestingServer();
|
||||||
|
zkServer.start();
|
||||||
|
|
||||||
|
MiniKMS kms1 = null;
|
||||||
|
MiniKMS kms2 = null;
|
||||||
|
|
||||||
|
conf.set(KMSAuthenticationFilter.CONFIG_PREFIX +
|
||||||
|
AuthenticationFilter.SIGNER_SECRET_PROVIDER, "zookeeper");
|
||||||
|
conf.set(KMSAuthenticationFilter.CONFIG_PREFIX +
|
||||||
|
ZKSignerSecretProvider.ZOOKEEPER_CONNECTION_STRING,
|
||||||
|
zkServer.getConnectString());
|
||||||
|
conf.set(KMSAuthenticationFilter.CONFIG_PREFIX +
|
||||||
|
ZKSignerSecretProvider.ZOOKEEPER_PATH, "/secret");
|
||||||
|
TestKMS.writeConf(testDir, conf);
|
||||||
|
|
||||||
|
try {
|
||||||
|
kms1 = new MiniKMS.Builder()
|
||||||
|
.setKmsConfDir(testDir).setLog4jConfFile("log4j.properties").build();
|
||||||
|
kms1.start();
|
||||||
|
|
||||||
|
kms2 = new MiniKMS.Builder()
|
||||||
|
.setKmsConfDir(testDir).setLog4jConfFile("log4j.properties").build();
|
||||||
|
kms2.start();
|
||||||
|
|
||||||
|
final URL url1 = new URL(kms1.getKMSUrl().toExternalForm() +
|
||||||
|
KMSRESTConstants.SERVICE_VERSION + "/" +
|
||||||
|
KMSRESTConstants.KEYS_NAMES_RESOURCE);
|
||||||
|
final URL url2 = new URL(kms2.getKMSUrl().toExternalForm() +
|
||||||
|
KMSRESTConstants.SERVICE_VERSION + "/" +
|
||||||
|
KMSRESTConstants.KEYS_NAMES_RESOURCE);
|
||||||
|
|
||||||
|
final DelegationTokenAuthenticatedURL.Token token =
|
||||||
|
new DelegationTokenAuthenticatedURL.Token();
|
||||||
|
final DelegationTokenAuthenticatedURL aUrl =
|
||||||
|
new DelegationTokenAuthenticatedURL();
|
||||||
|
|
||||||
|
UserGroupInformation ugiFoo = UserGroupInformation.createUserForTesting(
|
||||||
|
"foo", new String[]{"gfoo"});
|
||||||
|
UserGroupInformation ugiBar = UserGroupInformation.createUserForTesting(
|
||||||
|
"bar", new String[]{"gBar"});
|
||||||
|
|
||||||
|
ugiFoo.doAs(new PrivilegedExceptionAction<Object>() {
|
||||||
|
@Override
|
||||||
|
public Object run() throws Exception {
|
||||||
|
HttpURLConnection conn = aUrl.openConnection(url1, token);
|
||||||
|
Assert.assertEquals(HttpURLConnection.HTTP_OK,
|
||||||
|
conn.getResponseCode());
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ugiBar.doAs(new PrivilegedExceptionAction<Object>() {
|
||||||
|
@Override
|
||||||
|
public Object run() throws Exception {
|
||||||
|
HttpURLConnection conn = aUrl.openConnection(url2, token);
|
||||||
|
Assert.assertEquals(HttpURLConnection.HTTP_OK,
|
||||||
|
conn.getResponseCode());
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
ugiBar.doAs(new PrivilegedExceptionAction<Object>() {
|
||||||
|
@Override
|
||||||
|
public Object run() throws Exception {
|
||||||
|
final DelegationTokenAuthenticatedURL.Token emptyToken =
|
||||||
|
new DelegationTokenAuthenticatedURL.Token();
|
||||||
|
HttpURLConnection conn = aUrl.openConnection(url2, emptyToken);
|
||||||
|
Assert.assertEquals(HttpURLConnection.HTTP_FORBIDDEN,
|
||||||
|
conn.getResponseCode());
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
} finally {
|
||||||
|
if (kms2 != null) {
|
||||||
|
kms2.stop();
|
||||||
|
}
|
||||||
|
if (kms1 != null) {
|
||||||
|
kms1.stop();
|
||||||
|
}
|
||||||
|
zkServer.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user