From 5d182949badb2eb80393de7ba3838102d006488b Mon Sep 17 00:00:00 2001 From: Xiao Chen Date: Thu, 5 Jan 2017 17:21:57 -0800 Subject: [PATCH] HADOOP-13597. Switch KMS from Tomcat to Jetty. Contributed by John Zhuge. --- .../resources/assemblies/hadoop-kms-dist.xml | 25 +- .../server/AuthenticationFilter.java | 6 +- .../src/main/bin/hadoop-functions.sh | 51 ++-- .../hadoop/conf/ConfigurationWithLogging.java | 113 ++++++++ .../org/apache/hadoop/http/HttpServer2.java | 163 +++++++++--- .../hadoop/security/ssl/SSLFactory.java | 68 +++-- .../src/site/markdown/CommandsManual.md | 8 +- .../src/test/scripts/hadoop_mkdir.bats | 42 +++ .../src/test/scripts/hadoop_using_envvar.bats | 33 +++ .../dev-support/findbugsExcludeFile.xml | 2 +- hadoop-common-project/hadoop-kms/pom.xml | 160 +---------- .../hadoop-kms/src/main/conf/kms-env.sh | 48 ++-- .../src/main/conf/kms-log4j.properties | 3 +- .../hadoop-kms/src/main/conf/kms-site.xml | 169 +----------- .../key/kms/server/KMSConfiguration.java | 16 ++ .../crypto/key/kms/server/KMSJMXServlet.java | 36 --- .../crypto/key/kms/server/KMSWebApp.java | 23 +- .../crypto/key/kms/server/KMSWebServer.java | 155 +++++++++++ .../hadoop-kms/src/main/libexec/kms-config.sh | 72 ----- .../main/libexec/shellprofile.d/hadoop-kms.sh | 57 ++++ .../src/main/resources/kms-default.xml | 248 ++++++++++++++++++ .../webapps/kms}/WEB-INF/web.xml | 12 +- .../webapps/static}/index.html | 12 +- .../hadoop-kms/src/main/sbin/kms.sh | 116 +++----- .../src/main/tomcat/ROOT/WEB-INF/web.xml | 16 -- .../src/main/tomcat/logging.properties | 67 ----- .../hadoop-kms/src/main/tomcat/server.xml | 155 ----------- .../src/main/tomcat/ssl-server.xml.conf | 136 ---------- .../hadoop-kms/src/site/configuration.xsl | 49 ++++ .../hadoop-kms/src/site/markdown/index.md.vm | 122 ++++++--- .../hadoop/crypto/key/kms/server/MiniKMS.java | 116 ++------ 31 files changed, 1145 insertions(+), 1154 deletions(-) create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigurationWithLogging.java create mode 100644 hadoop-common-project/hadoop-common/src/test/scripts/hadoop_mkdir.bats create mode 100644 hadoop-common-project/hadoop-common/src/test/scripts/hadoop_using_envvar.bats delete mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJMXServlet.java create mode 100644 hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java delete mode 100644 hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh create mode 100755 hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh create mode 100644 hadoop-common-project/hadoop-kms/src/main/resources/kms-default.xml rename hadoop-common-project/hadoop-kms/src/main/{webapp => resources/webapps/kms}/WEB-INF/web.xml (87%) rename hadoop-common-project/hadoop-kms/src/main/{tomcat/ROOT => resources/webapps/static}/index.html (62%) delete mode 100644 hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/WEB-INF/web.xml delete mode 100644 hadoop-common-project/hadoop-kms/src/main/tomcat/logging.properties delete mode 100644 hadoop-common-project/hadoop-kms/src/main/tomcat/server.xml delete mode 100644 hadoop-common-project/hadoop-kms/src/main/tomcat/ssl-server.xml.conf create mode 100644 hadoop-common-project/hadoop-kms/src/site/configuration.xsl diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml b/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml index 5830bba301..ff6f99080c 100644 --- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml +++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml @@ -21,6 +21,14 @@ false + + + target + /share/hadoop/common + + ${project.artifactId}-${project.version}.jar + + ${basedir}/src/main/conf @@ -41,7 +49,7 @@ ${basedir}/src/main/libexec /libexec - * + **/* 0755 @@ -51,4 +59,19 @@ /share/doc/hadoop/kms + + + false + /share/hadoop/common/lib + + + org.apache.hadoop:hadoop-common + org.apache.hadoop:hadoop-hdfs + + org.slf4j:slf4j-api + org.slf4j:slf4j-log4j12 + org.hsqldb:hsqldb + + + diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java index 264d9916c0..b10fc844bb 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java @@ -142,7 +142,7 @@ public class AuthenticationFilter implements Filter { private String cookieDomain; private String cookiePath; private boolean isCookiePersistent; - private boolean isInitializedByTomcat; + private boolean destroySecretProvider; /** *

Initializes the authentication filter and signer secret provider.

@@ -209,7 +209,7 @@ protected void initializeSecretProvider(FilterConfig filterConfig) secretProvider = constructSecretProvider( filterConfig.getServletContext(), config, false); - isInitializedByTomcat = true; + destroySecretProvider = true; } catch (Exception ex) { throw new ServletException(ex); } @@ -356,7 +356,7 @@ public void destroy() { authHandler.destroy(); authHandler = null; } - if (secretProvider != null && isInitializedByTomcat) { + if (secretProvider != null && destroySecretProvider) { secretProvider.destroy(); secretProvider = null; } diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh index b6e2b59133..3151023441 100755 --- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh +++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh @@ -262,6 +262,39 @@ function hadoop_deprecate_envvar fi } +## @description Declare `var` being used and print its value. +## @audience public +## @stability stable +## @replaceable yes +## @param var +function hadoop_using_envvar +{ + local var=$1 + local val=${!var} + + if [[ -n "${val}" ]]; then + hadoop_debug "${var} = ${val}" + fi +} + +## @description Create the directory 'dir'. +## @audience public +## @stability stable +## @replaceable yes +## @param dir +function hadoop_mkdir +{ + local dir=$1 + + if [[ ! -w "${dir}" ]] && [[ ! -d "${dir}" ]]; then + hadoop_error "WARNING: ${dir} does not exist. Creating." + if ! mkdir -p "${dir}"; then + hadoop_error "ERROR: Unable to create ${dir}. Aborting." + exit 1 + fi + fi +} + ## @description Bootstraps the Hadoop shell environment ## @audience private ## @stability evolving @@ -1396,14 +1429,7 @@ function hadoop_verify_piddir hadoop_error "No pid directory defined." exit 1 fi - if [[ ! -w "${HADOOP_PID_DIR}" ]] && [[ ! -d "${HADOOP_PID_DIR}" ]]; then - hadoop_error "WARNING: ${HADOOP_PID_DIR} does not exist. Creating." - mkdir -p "${HADOOP_PID_DIR}" > /dev/null 2>&1 - if [[ $? -gt 0 ]]; then - hadoop_error "ERROR: Unable to create ${HADOOP_PID_DIR}. Aborting." - exit 1 - fi - fi + hadoop_mkdir "${HADOOP_PID_DIR}" touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1 if [[ $? -gt 0 ]]; then hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting." @@ -1421,14 +1447,7 @@ function hadoop_verify_logdir hadoop_error "No log directory defined." exit 1 fi - if [[ ! -w "${HADOOP_LOG_DIR}" ]] && [[ ! -d "${HADOOP_LOG_DIR}" ]]; then - hadoop_error "WARNING: ${HADOOP_LOG_DIR} does not exist. Creating." - mkdir -p "${HADOOP_LOG_DIR}" > /dev/null 2>&1 - if [[ $? -gt 0 ]]; then - hadoop_error "ERROR: Unable to create ${HADOOP_LOG_DIR}. Aborting." - exit 1 - fi - fi + hadoop_mkdir "${HADOOP_LOG_DIR}" touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1 if [[ $? -gt 0 ]]; then hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting." diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigurationWithLogging.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigurationWithLogging.java new file mode 100644 index 0000000000..8a5e05462f --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigurationWithLogging.java @@ -0,0 +1,113 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.conf; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Logs access to {@link Configuration}. + * Sensitive data will be redacted. + */ +@InterfaceAudience.Private +public class ConfigurationWithLogging extends Configuration { + private static final Logger LOG = + LoggerFactory.getLogger(ConfigurationWithLogging.class); + + private final Logger log; + private final ConfigRedactor redactor; + + public ConfigurationWithLogging(Configuration conf) { + super(conf); + log = LOG; + redactor = new ConfigRedactor(conf); + } + + /** + * @see Configuration#get(String). + */ + @Override + public String get(String name) { + String value = super.get(name); + log.info("Got {} = '{}'", name, redactor.redact(name, value)); + return value; + } + + /** + * @see Configuration#get(String, String). + */ + @Override + public String get(String name, String defaultValue) { + String value = super.get(name, defaultValue); + log.info("Got {} = '{}' (default '{}')", name, + redactor.redact(name, value), redactor.redact(name, defaultValue)); + return value; + } + + /** + * @see Configuration#getBoolean(String, boolean). + */ + @Override + public boolean getBoolean(String name, boolean defaultValue) { + boolean value = super.getBoolean(name, defaultValue); + log.info("Got {} = '{}' (default '{}')", name, value, defaultValue); + return value; + } + + /** + * @see Configuration#getFloat(String, float). + */ + @Override + public float getFloat(String name, float defaultValue) { + float value = super.getFloat(name, defaultValue); + log.info("Got {} = '{}' (default '{}')", name, value, defaultValue); + return value; + } + + /** + * @see Configuration#getInt(String, int). + */ + @Override + public int getInt(String name, int defaultValue) { + int value = super.getInt(name, defaultValue); + log.info("Got {} = '{}' (default '{}')", name, value, defaultValue); + return value; + } + + /** + * @see Configuration#getLong(String, long). + */ + @Override + public long getLong(String name, long defaultValue) { + long value = super.getLong(name, defaultValue); + log.info("Got {} = '{}' (default '{}')", name, value, defaultValue); + return value; + } + + /** + * @see Configuration#set(String, String, String). + */ + @Override + public void set(String name, String value, String source) { + log.info("Set {} to '{}'{}", name, redactor.redact(name, value), + source == null ? "" : " from " + source); + super.set(name, value, source); + } +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java index b930f754c5..6e21592331 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java @@ -17,6 +17,10 @@ */ package org.apache.hadoop.http; +import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER; +import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER; + +import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InterruptedIOException; @@ -45,7 +49,10 @@ import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.sun.jersey.spi.container.servlet.ServletContainer; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.HadoopIllegalArgumentException; @@ -54,14 +61,15 @@ import org.apache.hadoop.conf.ConfServlet; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.apache.hadoop.security.AuthenticationFilterInitializer; -import org.apache.hadoop.security.authentication.util.SignerSecretProvider; import org.apache.hadoop.jmx.JMXJsonServlet; import org.apache.hadoop.log.LogLevel; +import org.apache.hadoop.security.AuthenticationFilterInitializer; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; +import org.apache.hadoop.security.authentication.util.SignerSecretProvider; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.Shell; import org.eclipse.jetty.http.HttpVersion; @@ -90,16 +98,9 @@ import org.eclipse.jetty.servlet.ServletMapping; import org.eclipse.jetty.util.ArrayUtil; import org.eclipse.jetty.util.MultiException; -import org.eclipse.jetty.webapp.WebAppContext; -import org.eclipse.jetty.util.thread.QueuedThreadPool; - -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; -import com.sun.jersey.spi.container.servlet.ServletContainer; import org.eclipse.jetty.util.ssl.SslContextFactory; - -import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER; -import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER; +import org.eclipse.jetty.util.thread.QueuedThreadPool; +import org.eclipse.jetty.webapp.WebAppContext; /** * Create a Jetty embedded server to answer http requests. The primary goal is @@ -116,9 +117,22 @@ public final class HttpServer2 implements FilterContainer { public static final Log LOG = LogFactory.getLog(HttpServer2.class); + public static final String HTTP_SCHEME = "http"; + public static final String HTTPS_SCHEME = "https"; + + public static final String HTTP_MAX_REQUEST_HEADER_SIZE_KEY = + "hadoop.http.max.request.header.size"; + public static final int HTTP_MAX_REQUEST_HEADER_SIZE_DEFAULT = 65536; + public static final String HTTP_MAX_RESPONSE_HEADER_SIZE_KEY = + "hadoop.http.max.response.header.size"; + public static final int HTTP_MAX_RESPONSE_HEADER_SIZE_DEFAULT = 65536; + public static final String HTTP_MAX_THREADS_KEY = "hadoop.http.max.threads"; + public static final String HTTP_TEMP_DIR_KEY = "hadoop.http.temp.dir"; + static final String FILTER_INITIALIZER_PROPERTY = "hadoop.http.filter.initializers"; - public static final String HTTP_MAX_THREADS = "hadoop.http.max.threads"; + @Deprecated + public static final String HTTP_MAX_THREADS = HTTP_MAX_THREADS_KEY; // The ServletContext attribute where the daemon Configuration // gets stored. @@ -158,6 +172,7 @@ public static class Builder { private ArrayList endpoints = Lists.newArrayList(); private String name; private Configuration conf; + private Configuration sslConf; private String[] pathSpecs; private AccessControlList adminsAcl; private boolean securityEnabled = false; @@ -253,6 +268,15 @@ public Builder setConf(Configuration conf) { return this; } + /** + * Specify the SSL configuration to load. This API provides an alternative + * to keyStore/keyPassword/trustStore. + */ + public Builder setSSLConf(Configuration sslCnf) { + this.sslConf = sslCnf; + return this; + } + public Builder setPathSpec(String[] pathSpec) { this.pathSpecs = pathSpec; return this; @@ -315,7 +339,45 @@ public Builder setXFrameOption(String option) { return this; } + /** + * A wrapper of {@link Configuration#getPassword(String)}. It returns + * String instead of char[] and throws + * {@link IOException} when the password not found. + * + * @param conf the configuration + * @param name the property name + * @return the password string + */ + private static String getPassword(Configuration conf, String name) + throws IOException { + char[] passchars = conf.getPassword(name); + if (passchars == null) { + throw new IOException("Password " + name + " not found"); + } + return new String(passchars); + } + /** + * Load SSL properties from the SSL configuration. + */ + private void loadSSLConfiguration() throws IOException { + if (sslConf == null) { + return; + } + needsClientAuth(sslConf.getBoolean( + SSLFactory.SSL_SERVER_NEED_CLIENT_AUTH, + SSLFactory.SSL_SERVER_NEED_CLIENT_AUTH_DEFAULT)); + keyStore(sslConf.get(SSLFactory.SSL_SERVER_KEYSTORE_LOCATION), + getPassword(sslConf, SSLFactory.SSL_SERVER_KEYSTORE_PASSWORD), + sslConf.get(SSLFactory.SSL_SERVER_KEYSTORE_TYPE, + SSLFactory.SSL_SERVER_KEYSTORE_TYPE_DEFAULT)); + keyPassword(getPassword(sslConf, + SSLFactory.SSL_SERVER_KEYSTORE_KEYPASSWORD)); + trustStore(sslConf.get(SSLFactory.SSL_SERVER_TRUSTSTORE_LOCATION), + getPassword(sslConf, SSLFactory.SSL_SERVER_TRUSTSTORE_PASSWORD), + sslConf.get(SSLFactory.SSL_SERVER_TRUSTSTORE_TYPE, + SSLFactory.SSL_SERVER_TRUSTSTORE_TYPE_DEFAULT)); + } public HttpServer2 build() throws IOException { Preconditions.checkNotNull(name, "name is not set"); @@ -335,15 +397,33 @@ public HttpServer2 build() throws IOException { server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey); } + for (URI ep : endpoints) { + if (HTTPS_SCHEME.equals(ep.getScheme())) { + loadSSLConfiguration(); + break; + } + } + + int requestHeaderSize = conf.getInt( + HTTP_MAX_REQUEST_HEADER_SIZE_KEY, + HTTP_MAX_REQUEST_HEADER_SIZE_DEFAULT); + int responseHeaderSize = conf.getInt( + HTTP_MAX_RESPONSE_HEADER_SIZE_KEY, + HTTP_MAX_RESPONSE_HEADER_SIZE_DEFAULT); + + HttpConfiguration httpConfig = new HttpConfiguration(); + httpConfig.setRequestHeaderSize(requestHeaderSize); + httpConfig.setResponseHeaderSize(responseHeaderSize); + for (URI ep : endpoints) { final ServerConnector connector; String scheme = ep.getScheme(); - if ("http".equals(scheme)) { - connector = - HttpServer2.createDefaultChannelConnector(server.webServer); - } else if ("https".equals(scheme)) { - connector = createHttpsChannelConnector(server.webServer); - + if (HTTP_SCHEME.equals(scheme)) { + connector = createHttpChannelConnector(server.webServer, + httpConfig); + } else if (HTTPS_SCHEME.equals(scheme)) { + connector = createHttpsChannelConnector(server.webServer, + httpConfig); } else { throw new HadoopIllegalArgumentException( "unknown scheme for endpoint:" + ep); @@ -356,16 +436,20 @@ public HttpServer2 build() throws IOException { return server; } - private ServerConnector createHttpsChannelConnector(Server server) { + private ServerConnector createHttpChannelConnector( + Server server, HttpConfiguration httpConfig) { ServerConnector conn = new ServerConnector(server); - HttpConfiguration httpConfig = new HttpConfiguration(); - httpConfig.setRequestHeaderSize(JettyUtils.HEADER_SIZE); - httpConfig.setResponseHeaderSize(JettyUtils.HEADER_SIZE); - httpConfig.setSecureScheme("https"); - httpConfig.addCustomizer(new SecureRequestCustomizer()); ConnectionFactory connFactory = new HttpConnectionFactory(httpConfig); conn.addConnectionFactory(connFactory); configureChannelConnector(conn); + return conn; + } + + private ServerConnector createHttpsChannelConnector( + Server server, HttpConfiguration httpConfig) { + httpConfig.setSecureScheme(HTTPS_SCHEME); + httpConfig.addCustomizer(new SecureRequestCustomizer()); + ServerConnector conn = createHttpChannelConnector(server, httpConfig); SslContextFactory sslContextFactory = new SslContextFactory(); sslContextFactory.setNeedClientAuth(needsClientAuth); @@ -397,7 +481,7 @@ private HttpServer2(final Builder b) throws IOException { this.webServer = new Server(); this.adminsAcl = b.adminsAcl; this.handlers = new HandlerCollection(); - this.webAppContext = createWebAppContext(b.name, b.conf, adminsAcl, appDir); + this.webAppContext = createWebAppContext(b, adminsAcl, appDir); this.xFrameOptionIsEnabled = b.xFrameEnabled; this.xFrameOption = b.xFrameOption; @@ -482,8 +566,8 @@ private void addListener(ServerConnector connector) { listeners.add(connector); } - private static WebAppContext createWebAppContext(String name, - Configuration conf, AccessControlList adminsAcl, final String appDir) { + private static WebAppContext createWebAppContext(Builder b, + AccessControlList adminsAcl, final String appDir) { WebAppContext ctx = new WebAppContext(); ctx.setDefaultsDescriptor(null); ServletHolder holder = new ServletHolder(new DefaultServlet()); @@ -496,10 +580,15 @@ private static WebAppContext createWebAppContext(String name, holder.setInitParameters(params); ctx.setWelcomeFiles(new String[] {"index.html"}); ctx.addServlet(holder, "/"); - ctx.setDisplayName(name); + ctx.setDisplayName(b.name); ctx.setContextPath("/"); - ctx.setWar(appDir + "/" + name); - ctx.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf); + ctx.setWar(appDir + "/" + b.name); + String tempDirectory = b.conf.get(HTTP_TEMP_DIR_KEY); + if (tempDirectory != null && !tempDirectory.isEmpty()) { + ctx.setTempDirectory(new File(tempDirectory)); + ctx.setAttribute("javax.servlet.context.tempdir", tempDirectory); + } + ctx.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, b.conf); ctx.getServletContext().setAttribute(ADMINS_ACL, adminsAcl); addNoCacheFilter(ctx); return ctx; @@ -541,18 +630,6 @@ private static void configureChannelConnector(ServerConnector c) { } } - @InterfaceAudience.Private - public static ServerConnector createDefaultChannelConnector(Server server) { - ServerConnector conn = new ServerConnector(server); - HttpConfiguration httpConfig = new HttpConfiguration(); - httpConfig.setRequestHeaderSize(JettyUtils.HEADER_SIZE); - httpConfig.setResponseHeaderSize(JettyUtils.HEADER_SIZE); - ConnectionFactory connFactory = new HttpConnectionFactory(httpConfig); - conn.addConnectionFactory(connFactory); - configureChannelConnector(conn); - return conn; - } - /** Get an array of FilterConfiguration specified in the conf */ private static FilterInitializer[] getFilterInitializers(Configuration conf) { if (conf == null) { @@ -1056,7 +1133,7 @@ public void stop() throws Exception { } try { - // explicitly destroy the secrete provider + // explicitly destroy the secret provider secretProvider.destroy(); // clear & stop webAppContext attributes to avoid memory leaks. webAppContext.clearAttributes(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java index 95cba80989..cda26a56c5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java @@ -60,27 +60,61 @@ public class SSLFactory implements ConnectionConfigurator { @InterfaceAudience.Private public static enum Mode { CLIENT, SERVER } - public static final String SSL_REQUIRE_CLIENT_CERT_KEY = - "hadoop.ssl.require.client.cert"; - public static final String SSL_HOSTNAME_VERIFIER_KEY = - "hadoop.ssl.hostname.verifier"; - public static final String SSL_CLIENT_CONF_KEY = - "hadoop.ssl.client.conf"; - public static final String SSL_SERVER_CONF_KEY = - "hadoop.ssl.server.conf"; - public static final String SSLCERTIFICATE = IBM_JAVA?"ibmX509":"SunX509"; + public static final String SSL_CLIENT_CONF_KEY = "hadoop.ssl.client.conf"; + public static final String SSL_CLIENT_CONF_DEFAULT = "ssl-client.xml"; + public static final String SSL_SERVER_CONF_KEY = "hadoop.ssl.server.conf"; + public static final String SSL_SERVER_CONF_DEFAULT = "ssl-server.xml"; - public static final boolean DEFAULT_SSL_REQUIRE_CLIENT_CERT = false; + public static final String SSL_REQUIRE_CLIENT_CERT_KEY = + "hadoop.ssl.require.client.cert"; + public static final boolean SSL_REQUIRE_CLIENT_CERT_DEFAULT = false; + public static final String SSL_HOSTNAME_VERIFIER_KEY = + "hadoop.ssl.hostname.verifier"; + public static final String SSL_ENABLED_PROTOCOLS_KEY = + "hadoop.ssl.enabled.protocols"; + public static final String SSL_ENABLED_PROTOCOLS_DEFAULT = + "TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2"; + + public static final String SSL_SERVER_NEED_CLIENT_AUTH = + "ssl.server.need.client.auth"; + public static final boolean SSL_SERVER_NEED_CLIENT_AUTH_DEFAULT = false; + + public static final String SSL_SERVER_KEYSTORE_LOCATION = + "ssl.server.keystore.location"; + public static final String SSL_SERVER_KEYSTORE_PASSWORD = + "ssl.server.keystore.password"; + public static final String SSL_SERVER_KEYSTORE_TYPE = + "ssl.server.keystore.type"; + public static final String SSL_SERVER_KEYSTORE_TYPE_DEFAULT = "jks"; + public static final String SSL_SERVER_KEYSTORE_KEYPASSWORD = + "ssl.server.keystore.keypassword"; + + public static final String SSL_SERVER_TRUSTSTORE_LOCATION = + "ssl.server.truststore.location"; + public static final String SSL_SERVER_TRUSTSTORE_PASSWORD = + "ssl.server.truststore.password"; + public static final String SSL_SERVER_TRUSTSTORE_TYPE = + "ssl.server.truststore.type"; + public static final String SSL_SERVER_TRUSTSTORE_TYPE_DEFAULT = "jks"; + + public static final String SSL_SERVER_EXCLUDE_CIPHER_LIST = + "ssl.server.exclude.cipher.list"; + + @Deprecated + public static final boolean DEFAULT_SSL_REQUIRE_CLIENT_CERT = + SSL_REQUIRE_CLIENT_CERT_DEFAULT; + + public static final String SSLCERTIFICATE = IBM_JAVA?"ibmX509":"SunX509"; public static final String KEYSTORES_FACTORY_CLASS_KEY = "hadoop.ssl.keystores.factory.class"; + @Deprecated public static final String SSL_ENABLED_PROTOCOLS = - "hadoop.ssl.enabled.protocols"; + SSL_ENABLED_PROTOCOLS_KEY; + @Deprecated public static final String DEFAULT_SSL_ENABLED_PROTOCOLS = - "TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2"; - public static final String SSL_SERVER_EXCLUDE_CIPHER_LIST = - "ssl.server.exclude.cipher.list"; + SSL_ENABLED_PROTOCOLS_DEFAULT; private Configuration conf; private Mode mode; @@ -131,9 +165,11 @@ private Configuration readSSLConfiguration(Mode mode) { sslConf.setBoolean(SSL_REQUIRE_CLIENT_CERT_KEY, requireClientCert); String sslConfResource; if (mode == Mode.CLIENT) { - sslConfResource = conf.get(SSL_CLIENT_CONF_KEY, "ssl-client.xml"); + sslConfResource = conf.get(SSL_CLIENT_CONF_KEY, + SSL_CLIENT_CONF_DEFAULT); } else { - sslConfResource = conf.get(SSL_SERVER_CONF_KEY, "ssl-server.xml"); + sslConfResource = conf.get(SSL_SERVER_CONF_KEY, + SSL_SERVER_CONF_DEFAULT); } sslConf.addResource(sslConfResource); return sslConf; diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md b/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md index 27a858a663..8d98e912c2 100644 --- a/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md +++ b/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md @@ -207,6 +207,12 @@ NOTE: Some KeyProviders (e.g. org.apache.hadoop.crypto.key.JavaKeyStoreProvider) NOTE: Some KeyProviders do not directly execute a key deletion (e.g. performs a soft-delete instead, or delay the actual deletion, to prevent mistake). In these cases, one may encounter errors when creating/deleting a key with the same name after deleting it. Please check the underlying KeyProvider for details. +### `kms` + +Usage: `hadoop kms` + +Run KMS, the Key Management Server. + ### `trace` View and modify Hadoop tracing settings. See the [Tracing Guide](./Tracing.html). @@ -267,8 +273,6 @@ This command works by sending a HTTP/HTTPS request to the daemon's internal Jett * node manager * Timeline server -However, the command does not support KMS server, because its web interface is based on Tomcat, which does not support the servlet. - Files ----- diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_mkdir.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_mkdir.bats new file mode 100644 index 0000000000..90a4f1ab46 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_mkdir.bats @@ -0,0 +1,42 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load hadoop-functions_test_helper + +@test "hadoop_mkdir (create)" { + DIR=${BATS_TMPDIR}/nodir + rm -fr ${DIR} + run hadoop_mkdir ${DIR} + [ "${status}" = 0 ] + [ "${output}" = "WARNING: ${DIR} does not exist. Creating." ] +} + + +@test "hadoop_mkdir (exists)" { + DIR=${BATS_TMPDIR}/exists + mkdir -p ${DIR} + run hadoop_mkdir ${DIR} + [ "${status}" = 0 ] + [ -z "${output}" ] +} + + +@test "hadoop_mkdir (failed)" { + DIR=${BATS_TMPDIR}/readonly_dir/dir + mkdir -p ${BATS_TMPDIR}/readonly_dir + chmod a-w ${BATS_TMPDIR}/readonly_dir + run hadoop_mkdir ${DIR} + [ "${status}" != 0 ] +} \ No newline at end of file diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_using_envvar.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_using_envvar.bats new file mode 100644 index 0000000000..8f8e93730a --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_using_envvar.bats @@ -0,0 +1,33 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load hadoop-functions_test_helper + +@test "hadoop_using_envvar (has value)" { + HADOOP_SHELL_SCRIPT_DEBUG=true + VAR=value + run hadoop_using_envvar VAR + [ "${status}" = 0 ] + [ "${output}" = "DEBUG: VAR = value" ] +} + + +@test "hadoop_using_envvar (no value)" { + HADOOP_SHELL_SCRIPT_DEBUG=true + VAR= + run hadoop_using_envvar VAR + [ "${status}" = 0 ] + [ -z "${output}" ] +} \ No newline at end of file diff --git a/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml b/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml index 929936dad3..f864c03145 100644 --- a/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml +++ b/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml @@ -39,7 +39,7 @@ diff --git a/hadoop-common-project/hadoop-kms/pom.xml b/hadoop-common-project/hadoop-kms/pom.xml index 73b8339052..41c36e8e00 100644 --- a/hadoop-common-project/hadoop-kms/pom.xml +++ b/hadoop-common-project/hadoop-kms/pom.xml @@ -27,20 +27,11 @@ hadoop-kms 3.0.0-alpha2-SNAPSHOT - war + jar Apache Hadoop KMS Apache Hadoop KMS - - - ${project.build.directory}/${project.artifactId}-${project.version}/share/hadoop/kms/tomcat - - - http://archive.apache.org/dist/tomcat/tomcat-6/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz - - - org.apache.hadoop @@ -80,12 +71,14 @@ javax.servlet javax.servlet-api - provided org.eclipse.jetty jetty-server - test + + + org.eclipse.jetty + jetty-webapp org.apache.hadoop @@ -100,14 +93,6 @@ commons-httpclient commons-httpclient - - tomcat - jasper-compiler - - - tomcat - jasper-runtime - javax.servlet javax.servlet-api @@ -229,66 +214,21 @@ maven-antrun-plugin - create-web-xmls - generate-test-resources + site + site run - - - - - + - - - org.apache.maven.plugins - maven-war-plugin - - - default-war - prepare-package - - war - - - true - kms - ${project.build.directory}/kms - - - - - - - org.apache.maven.plugins - maven-jar-plugin - - - prepare-jar - prepare-package - - jar - - - classes - - - - prepare-test-jar - prepare-package - - test-jar - - - - - + org.codehaus.mojo findbugs-maven-plugin @@ -360,84 +300,6 @@ - - - org.apache.maven.plugins - maven-antrun-plugin - - - dist - - run - - package - - - - - - - - - - cd "${project.build.directory}/tomcat.exp" - gzip -cd ../../downloads/apache-tomcat-${tomcat.version}.tar.gz | tar xf - - - - - - - - - - - - - - - - - - - - - - - - - - - tar - package - - run - - - - - - cd "${project.build.directory}" - tar cf - ${project.artifactId}-${project.version} | gzip > ${project.artifactId}-${project.version}.tar.gz - - - - - - - - - diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh index e42904d185..0528932c27 100644 --- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh +++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh @@ -18,6 +18,14 @@ # hadoop-env.sh is read prior to this file. # +# KMS config directory +# +# export KMS_CONFIG=${HADOOP_CONF_DIR} + +# KMS log directory +# +# export KMS_LOG=${HADOOP_LOG_DIR} + # KMS temporary directory # # export KMS_TEMP=${HADOOP_HOME}/temp @@ -26,48 +34,22 @@ # # export KMS_HTTP_PORT=9600 -# The Admin port used by KMS -# -# export KMS_ADMIN_PORT=$((KMS_HTTP_PORT + 1)) - -# The maximum number of Tomcat handler threads +# The maximum number of HTTP handler threads # # export KMS_MAX_THREADS=1000 -# The maximum size of Tomcat HTTP header +# The maximum size of HTTP header # # export KMS_MAX_HTTP_HEADER_SIZE=65536 +# Whether SSL is enabled +# +# export KMS_SSL_ENABLED=false + # The location of the SSL keystore if using SSL # # export KMS_SSL_KEYSTORE_FILE=${HOME}/.keystore -# # The password of the SSL keystore if using SSL # -# export KMS_SSL_KEYSTORE_PASS=password - - -## -## Tomcat specific settings -## -# -# Location of tomcat -# -# export KMS_CATALINA_HOME=${HADOOP_HOME}/share/hadoop/kms/tomcat - -# Java System properties for KMS should be specified in this variable. -# The java.library.path and hadoop.home.dir properties are automatically -# configured. In order to supplement java.library.path, -# one should add to the JAVA_LIBRARY_PATH env var. -# -# export CATALINA_OPTS= - -# PID file -# -# export CATALINA_PID=${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-kms.pid - -# Output file -# -# export CATALINA_OUT=${KMS_LOG}/hadoop-${HADOOP_IDENT_STRING}-kms-${HOSTNAME}.out - +# export KMS_SSL_KEYSTORE_PASS=password \ No newline at end of file diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties b/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties index 8e6d909950..15ff436090 100644 --- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties +++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties @@ -32,7 +32,6 @@ log4j.appender.kms-audit.layout.ConversionPattern=%d{ISO8601} %m%n log4j.logger.kms-audit=INFO, kms-audit log4j.additivity.kms-audit=false -log4j.rootLogger=ALL, kms -log4j.logger.org.apache.hadoop.conf=ERROR +log4j.rootLogger=INFO, kms log4j.logger.org.apache.hadoop=INFO log4j.logger.com.sun.jersey.server.wadl.generators.WadlGeneratorJAXBGrammarGenerator=OFF \ No newline at end of file diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml b/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml index d188735227..85e71c331d 100644 --- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml +++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml @@ -12,172 +12,9 @@ See the License for the specific language governing permissions and limitations under the License. --> + + + - - - - hadoop.kms.key.provider.uri - jceks://file@/${user.home}/kms.keystore - - URI of the backing KeyProvider for the KMS. - - - - - hadoop.security.keystore.java-keystore-provider.password-file - kms.keystore.password - - If using the JavaKeyStoreProvider, the file name for the keystore password. - - - - - - - hadoop.kms.cache.enable - true - - Whether the KMS will act as a cache for the backing KeyProvider. - When the cache is enabled, operations like getKeyVersion, getMetadata, - and getCurrentKey will sometimes return cached data without consulting - the backing KeyProvider. Cached values are flushed when keys are deleted - or modified. - - - - - hadoop.kms.cache.timeout.ms - 600000 - - Expiry time for the KMS key version and key metadata cache, in - milliseconds. This affects getKeyVersion and getMetadata. - - - - - hadoop.kms.current.key.cache.timeout.ms - 30000 - - Expiry time for the KMS current key cache, in milliseconds. This - affects getCurrentKey operations. - - - - - - - hadoop.kms.audit.aggregation.window.ms - 10000 - - Duplicate audit log events within the aggregation window (specified in - ms) are quashed to reduce log traffic. A single message for aggregated - events is printed at the end of the window, along with a count of the - number of aggregated events. - - - - - - - hadoop.kms.authentication.type - simple - - Authentication type for the KMS. Can be either "simple" - or "kerberos". - - - - - hadoop.kms.authentication.kerberos.keytab - ${user.home}/kms.keytab - - Path to the keytab with credentials for the configured Kerberos principal. - - - - - hadoop.kms.authentication.kerberos.principal - HTTP/localhost - - The Kerberos principal to use for the HTTP endpoint. - The principal must start with 'HTTP/' as per the Kerberos HTTP SPNEGO specification. - - - - - hadoop.kms.authentication.kerberos.name.rules - DEFAULT - - Rules used to resolve Kerberos principal names. - - - - - - - hadoop.kms.authentication.signer.secret.provider - random - - Indicates how the secret to sign the authentication cookies will be - stored. Options are 'random' (default), 'string' and 'zookeeper'. - If using a setup with multiple KMS instances, 'zookeeper' should be used. - - - - - - - hadoop.kms.authentication.signer.secret.provider.zookeeper.path - /hadoop-kms/hadoop-auth-signature-secret - - The Zookeeper ZNode path where the KMS instances will store and retrieve - the secret from. - - - - - hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string - #HOSTNAME#:#PORT#,... - - The Zookeeper connection string, a list of hostnames and port comma - separated. - - - - - hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type - none - - The Zookeeper authentication type, 'none' (default) or 'sasl' (Kerberos). - - - - - hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab - /etc/hadoop/conf/kms.keytab - - The absolute path for the Kerberos keytab with the credentials to - connect to Zookeeper. - - - - - hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal - kms/#HOSTNAME# - - The Kerberos service principal used to connect to Zookeeper. - - - - - hadoop.kms.audit.logger - org.apache.hadoop.crypto.key.kms.server.SimpleKMSAuditLogger - - The audit logger for KMS. It is a comma-separated list of KMSAuditLogger - class names. Default is the text-format SimpleKMSAuditLogger only. - If this is not configured, default will be used. - - - diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java index 600f1e976b..1ef6c4e8f2 100644 --- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java @@ -32,6 +32,7 @@ public class KMSConfiguration { public static final String KMS_CONFIG_DIR = "kms.config.dir"; + public static final String KMS_DEFAULT_XML = "kms-default.xml"; public static final String KMS_SITE_XML = "kms-site.xml"; public static final String KMS_ACLS_XML = "kms-acls.xml"; @@ -42,6 +43,16 @@ public class KMSConfiguration { public static final String DEFAULT_KEY_ACL_PREFIX = "default.key.acl."; public static final String WHITELIST_KEY_ACL_PREFIX = "whitelist.key.acl."; + // HTTP properties + public static final String HTTP_PORT_KEY = "hadoop.kms.http.port"; + public static final int HTTP_PORT_DEFAULT = 9600; + public static final String HTTP_HOST_KEY = "hadoop.kms.http.host"; + public static final String HTTP_HOST_DEFAULT = "0.0.0.0"; + + // SSL properties + public static final String SSL_ENABLED_KEY = "hadoop.kms.ssl.enabled"; + public static final boolean SSL_ENABLED_DEFAULT = false; + // Property to set the backing KeyProvider public static final String KEY_PROVIDER_URI = CONFIG_PREFIX + "key.provider.uri"; @@ -77,6 +88,11 @@ public class KMSConfiguration { public static final boolean KEY_AUTHORIZATION_ENABLE_DEFAULT = true; + static { + Configuration.addDefaultResource(KMS_DEFAULT_XML); + Configuration.addDefaultResource(KMS_SITE_XML); + } + static Configuration getConfiguration(boolean loadHadoopDefaults, String ... resources) { Configuration conf = new Configuration(loadHadoopDefaults); diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJMXServlet.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJMXServlet.java deleted file mode 100644 index 6918015a90..0000000000 --- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJMXServlet.java +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.crypto.key.kms.server; - -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.jmx.JMXJsonServlet; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import java.io.IOException; - -@InterfaceAudience.Private -public class KMSJMXServlet extends JMXJsonServlet { - - @Override - protected boolean isInstrumentationAccessAllowed(HttpServletRequest request, - HttpServletResponse response) throws IOException { - return true; - } -} diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java index 40ae19f4c0..857139fd59 100644 --- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java @@ -17,10 +17,17 @@ */ package org.apache.hadoop.crypto.key.kms.server; +import java.io.File; +import java.io.IOException; +import java.net.URI; +import java.net.URL; + +import javax.servlet.ServletContextEvent; +import javax.servlet.ServletContextListener; + import com.codahale.metrics.JmxReporter; import com.codahale.metrics.Meter; import com.codahale.metrics.MetricRegistry; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.key.CachingKeyProvider; @@ -34,15 +41,6 @@ import org.apache.log4j.PropertyConfigurator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.slf4j.bridge.SLF4JBridgeHandler; - -import javax.servlet.ServletContextEvent; -import javax.servlet.ServletContextListener; - -import java.io.File; -import java.io.IOException; -import java.net.URI; -import java.net.URL; @InterfaceAudience.Private public class KMSWebApp implements ServletContextListener { @@ -81,11 +79,6 @@ public class KMSWebApp implements ServletContextListener { private static KMSAudit kmsAudit; private static KeyProviderCryptoExtension keyProviderCryptoExtension; - static { - SLF4JBridgeHandler.removeHandlersForRootLogger(); - SLF4JBridgeHandler.install(); - } - private void initLogging(String confDir) { if (System.getProperty("log4j.configuration") == null) { System.setProperty("log4j.defaultInitOverride", "true"); diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java new file mode 100644 index 0000000000..70945cb2c1 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java @@ -0,0 +1,155 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.crypto.key.kms.server; + +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URL; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.ConfigurationWithLogging; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.http.HttpServer2; +import org.apache.hadoop.security.ssl.SSLFactory; +import org.apache.hadoop.util.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The KMS web server. + */ +@InterfaceAudience.Private +public class KMSWebServer { + private static final Logger LOG = + LoggerFactory.getLogger(KMSWebServer.class); + + private static final String NAME = "kms"; + private static final String SERVLET_PATH = "/kms"; + + private final HttpServer2 httpServer; + private final String scheme; + + KMSWebServer(Configuration cnf) throws Exception { + ConfigurationWithLogging conf = new ConfigurationWithLogging(cnf); + + // Add SSL configuration file + conf.addResource(conf.get(SSLFactory.SSL_SERVER_CONF_KEY, + SSLFactory.SSL_SERVER_CONF_DEFAULT)); + + // Override configuration with deprecated environment variables. + deprecateEnv("KMS_TEMP", conf, HttpServer2.HTTP_TEMP_DIR_KEY, + KMSConfiguration.KMS_SITE_XML); + deprecateEnv("KMS_HTTP_PORT", conf, + KMSConfiguration.HTTP_PORT_KEY, KMSConfiguration.KMS_SITE_XML); + deprecateEnv("KMS_MAX_THREADS", conf, + HttpServer2.HTTP_MAX_THREADS_KEY, KMSConfiguration.KMS_SITE_XML); + deprecateEnv("KMS_MAX_HTTP_HEADER_SIZE", conf, + HttpServer2.HTTP_MAX_REQUEST_HEADER_SIZE_KEY, + KMSConfiguration.KMS_SITE_XML); + deprecateEnv("KMS_MAX_HTTP_HEADER_SIZE", conf, + HttpServer2.HTTP_MAX_RESPONSE_HEADER_SIZE_KEY, + KMSConfiguration.KMS_SITE_XML); + deprecateEnv("KMS_SSL_ENABLED", conf, + KMSConfiguration.SSL_ENABLED_KEY, KMSConfiguration.KMS_SITE_XML); + deprecateEnv("KMS_SSL_KEYSTORE_FILE", conf, + SSLFactory.SSL_SERVER_KEYSTORE_LOCATION, + SSLFactory.SSL_SERVER_CONF_DEFAULT); + deprecateEnv("KMS_SSL_KEYSTORE_PASS", conf, + SSLFactory.SSL_SERVER_KEYSTORE_PASSWORD, + SSLFactory.SSL_SERVER_CONF_DEFAULT); + + boolean sslEnabled = conf.getBoolean(KMSConfiguration.SSL_ENABLED_KEY, + KMSConfiguration.SSL_ENABLED_DEFAULT); + scheme = sslEnabled ? HttpServer2.HTTPS_SCHEME : HttpServer2.HTTP_SCHEME; + + String host = conf.get(KMSConfiguration.HTTP_HOST_KEY, + KMSConfiguration.HTTP_HOST_DEFAULT); + int port = conf.getInt(KMSConfiguration.HTTP_PORT_KEY, + KMSConfiguration.HTTP_PORT_DEFAULT); + URI endpoint = new URI(scheme, null, host, port, null, null, null); + + httpServer = new HttpServer2.Builder() + .setName(NAME) + .setConf(conf) + .setSSLConf(conf) + .authFilterConfigurationPrefix(KMSAuthenticationFilter.CONFIG_PREFIX) + .addEndpoint(endpoint) + .build(); + } + + /** + * Load the deprecated environment variable into the configuration. + * + * @param varName the environment variable name + * @param conf the configuration + * @param propName the configuration property name + * @param confFile the configuration file name + */ + private static void deprecateEnv(String varName, Configuration conf, + String propName, String confFile) { + String value = System.getenv(varName); + if (value == null) { + return; + } + String propValue = conf.get(propName); + LOG.warn("Environment variable {} = '{}' is deprecated and overriding" + + " property {} = '{}', please set the property in {} instead.", + varName, value, propName, propValue, confFile); + conf.set(propName, value, "environment variable " + varName); + } + + public void start() throws IOException { + httpServer.start(); + } + + public boolean isRunning() { + return httpServer.isAlive(); + } + + public void join() throws InterruptedException { + httpServer.join(); + } + + public void stop() throws Exception { + httpServer.stop(); + } + + public URL getKMSUrl() { + InetSocketAddress addr = httpServer.getConnectorAddress(0); + if (null == addr) { + return null; + } + try { + return new URL(scheme, addr.getHostName(), addr.getPort(), + SERVLET_PATH); + } catch (MalformedURLException ex) { + throw new RuntimeException("It should never happen: " + ex.getMessage(), + ex); + } + } + + public static void main(String[] args) throws Exception { + StringUtils.startupShutdownMessage(KMSWebServer.class, args, LOG); + Configuration conf = KMSConfiguration.getKMSConf(); + KMSWebServer kmsWebServer = new KMSWebServer(conf); + kmsWebServer.start(); + kmsWebServer.join(); + } +} diff --git a/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh b/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh deleted file mode 100644 index 52dba3882e..0000000000 --- a/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -function hadoop_subproject_init -{ - local this - local binparent - local varlist - - if [[ -z "${HADOOP_KMS_ENV_PROCESSED}" ]]; then - if [[ -e "${HADOOP_CONF_DIR}/kms-env.sh" ]]; then - . "${HADOOP_CONF_DIR}/kms-env.sh" - export HADOOP_KMS_ENV_PROCESSED=true - fi - fi - - export HADOOP_CATALINA_PREFIX=kms - - export HADOOP_CATALINA_TEMP="${KMS_TEMP:-${HADOOP_HOME}/temp}" - - hadoop_deprecate_envvar KMS_CONFIG HADOOP_CONF_DIR - - hadoop_deprecate_envvar KMS_LOG HADOOP_LOG_DIR - - export HADOOP_CATALINA_CONFIG="${HADOOP_CONF_DIR}" - export HADOOP_CATALINA_LOG="${HADOOP_LOG_DIR}" - - export HADOOP_CATALINA_HTTP_PORT="${KMS_HTTP_PORT:-9600}" - export HADOOP_CATALINA_ADMIN_PORT="${KMS_ADMIN_PORT:-$((HADOOP_CATALINA_HTTP_PORT+1))}" - export HADOOP_CATALINA_MAX_THREADS="${KMS_MAX_THREADS:-1000}" - export HADOOP_CATALINA_MAX_HTTP_HEADER_SIZE="${KMS_MAX_HTTP_HEADER_SIZE:-65536}" - - export HADOOP_CATALINA_SSL_KEYSTORE_FILE="${KMS_SSL_KEYSTORE_FILE:-${HOME}/.keystore}" - - export CATALINA_BASE="${CATALINA_BASE:-${HADOOP_HOME}/share/hadoop/kms/tomcat}" - export HADOOP_CATALINA_HOME="${KMS_CATALINA_HOME:-${CATALINA_BASE}}" - - export CATALINA_OUT="${CATALINA_OUT:-${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-kms-${HOSTNAME}.out}" - - export CATALINA_PID="${CATALINA_PID:-${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-kms.pid}" - - if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then - varlist=$(env | egrep '(^KMS|^CATALINA)' | cut -f1 -d= | grep -v _PASS) - for i in ${varlist}; do - hadoop_debug "Setting ${i} to ${!i}" - done - fi -} - -if [[ -n "${HADOOP_COMMON_HOME}" ]] && - [[ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" ]]; then - . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" -elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then - . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" -elif [[ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]]; then - . "${HADOOP_HOME}/libexec/hadoop-config.sh" -else - echo "ERROR: Hadoop common not found." 2>&1 - exit 1 -fi diff --git a/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh b/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh new file mode 100755 index 0000000000..c530716346 --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then + hadoop_add_subcommand "kms" "run KMS, the Key Management Server" +fi + +## @description Command handler for kms subcommand +## @audience private +## @stability stable +## @replaceable no +function hadoop_subcommand_kms +{ + if [[ -f "${HADOOP_CONF_DIR}/kms-env.sh" ]]; then + # shellcheck disable=SC1090 + . "${HADOOP_CONF_DIR}/kms-env.sh" + fi + + hadoop_deprecate_envvar KMS_CONFIG HADOOP_CONF_DIR + hadoop_deprecate_envvar KMS_LOG HADOOP_LOG_DIR + + hadoop_using_envvar KMS_HTTP_PORT + hadoop_using_envvar KMS_MAX_HTTP_HEADER_SIZE + hadoop_using_envvar KMS_MAX_THREADS + hadoop_using_envvar KMS_SSL_ENABLED + hadoop_using_envvar KMS_SSL_KEYSTORE_FILE + hadoop_using_envvar KMS_TEMP + + # shellcheck disable=SC2034 + HADOOP_SUBCMD_SUPPORTDAEMONIZATION=true + # shellcheck disable=SC2034 + HADOOP_CLASSNAME=org.apache.hadoop.crypto.key.kms.server.KMSWebServer + + hadoop_add_param HADOOP_OPTS "-Dkms.config.dir=" \ + "-Dkms.config.dir=${HADOOP_CONF_DIR}" + hadoop_add_param HADOOP_OPTS "-Dkms.log.dir=" \ + "-Dkms.log.dir=${HADOOP_LOG_DIR}" + + if [[ "${HADOOP_DAEMON_MODE}" == "default" ]] || + [[ "${HADOOP_DAEMON_MODE}" == "start" ]]; then + hadoop_mkdir "${KMS_TEMP:-${HADOOP_HOME}/temp}" + fi +} \ No newline at end of file diff --git a/hadoop-common-project/hadoop-kms/src/main/resources/kms-default.xml b/hadoop-common-project/hadoop-kms/src/main/resources/kms-default.xml new file mode 100644 index 0000000000..2b178b85ac --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/main/resources/kms-default.xml @@ -0,0 +1,248 @@ + + + + + + + + + hadoop.kms.http.port + 9600 + + The HTTP port for KMS REST API. + + + + + hadoop.kms.http.host + 0.0.0.0 + + The bind host for KMS REST API. + + + + + hadoop.kms.ssl.enabled + false + + Whether SSL is enabled. Default is false, i.e. disabled. + + + + + + + hadoop.http.max.threads + 1000 + + The maxmimum number of threads. + + + + + hadoop.http.max.request.header.size + 65536 + + The maxmimum HTTP request header size. + + + + + hadoop.http.max.response.header.size + 65536 + + The maxmimum HTTP response header size. + + + + + hadoop.http.temp.dir + ${hadoop.tmp.dir}/kms + + KMS temp directory. + + + + + + + hadoop.kms.key.provider.uri + jceks://file@/${user.home}/kms.keystore + + URI of the backing KeyProvider for the KMS. + + + + + hadoop.security.keystore.java-keystore-provider.password-file + + + If using the JavaKeyStoreProvider, the file name for the keystore password. + + + + + + + hadoop.kms.cache.enable + true + + Whether the KMS will act as a cache for the backing KeyProvider. + When the cache is enabled, operations like getKeyVersion, getMetadata, + and getCurrentKey will sometimes return cached data without consulting + the backing KeyProvider. Cached values are flushed when keys are deleted + or modified. + + + + + hadoop.kms.cache.timeout.ms + 600000 + + Expiry time for the KMS key version and key metadata cache, in + milliseconds. This affects getKeyVersion and getMetadata. + + + + + hadoop.kms.current.key.cache.timeout.ms + 30000 + + Expiry time for the KMS current key cache, in milliseconds. This + affects getCurrentKey operations. + + + + + + + hadoop.kms.audit.aggregation.window.ms + 10000 + + Duplicate audit log events within the aggregation window (specified in + ms) are quashed to reduce log traffic. A single message for aggregated + events is printed at the end of the window, along with a count of the + number of aggregated events. + + + + + + + hadoop.kms.authentication.type + simple + + Authentication type for the KMS. Can be either 'simple' (default) or + 'kerberos'. + + + + + hadoop.kms.authentication.kerberos.keytab + ${user.home}/kms.keytab + + Path to the keytab with credentials for the configured Kerberos principal. + + + + + hadoop.kms.authentication.kerberos.principal + HTTP/localhost + + The Kerberos principal to use for the HTTP endpoint. + The principal must start with 'HTTP/' as per the Kerberos HTTP SPNEGO specification. + + + + + hadoop.kms.authentication.kerberos.name.rules + DEFAULT + + Rules used to resolve Kerberos principal names. + + + + + + + hadoop.kms.authentication.signer.secret.provider + random + + Indicates how the secret to sign the authentication cookies will be + stored. Options are 'random' (default), 'string' and 'zookeeper'. + If using a setup with multiple KMS instances, 'zookeeper' should be used. + + + + + + + hadoop.kms.authentication.signer.secret.provider.zookeeper.path + /hadoop-kms/hadoop-auth-signature-secret + + The Zookeeper ZNode path where the KMS instances will store and retrieve + the secret from. + + + + + hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string + #HOSTNAME#:#PORT#,... + + The Zookeeper connection string, a list of hostnames and port comma + separated. + + + + + hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type + none + + The Zookeeper authentication type, 'none' (default) or 'sasl' (Kerberos). + + + + + hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab + /etc/hadoop/conf/kms.keytab + + The absolute path for the Kerberos keytab with the credentials to + connect to Zookeeper. + + + + + hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal + kms/#HOSTNAME# + + The Kerberos service principal used to connect to Zookeeper. + + + + + hadoop.kms.audit.logger + org.apache.hadoop.crypto.key.kms.server.SimpleKMSAuditLogger + + The audit logger for KMS. It is a comma-separated list of KMSAuditLogger + class names. Default is the text-format SimpleKMSAuditLogger only. + If this is not configured, default will be used. + + + + diff --git a/hadoop-common-project/hadoop-kms/src/main/webapp/WEB-INF/web.xml b/hadoop-common-project/hadoop-kms/src/main/resources/webapps/kms/WEB-INF/web.xml similarity index 87% rename from hadoop-common-project/hadoop-kms/src/main/webapp/WEB-INF/web.xml rename to hadoop-common-project/hadoop-kms/src/main/resources/webapps/kms/WEB-INF/web.xml index d081764217..1c14d28518 100644 --- a/hadoop-common-project/hadoop-kms/src/main/webapp/WEB-INF/web.xml +++ b/hadoop-common-project/hadoop-kms/src/main/resources/webapps/kms/WEB-INF/web.xml @@ -40,19 +40,9 @@ 1 - - jmx-servlet - org.apache.hadoop.crypto.key.kms.server.KMSJMXServlet - - webservices-driver - /* - - - - jmx-servlet - /jmx + /kms/* diff --git a/hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/index.html b/hadoop-common-project/hadoop-kms/src/main/resources/webapps/static/index.html similarity index 62% rename from hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/index.html rename to hadoop-common-project/hadoop-kms/src/main/resources/webapps/static/index.html index e9e45121b1..9925ad9304 100644 --- a/hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/index.html +++ b/hadoop-common-project/hadoop-kms/src/main/resources/webapps/static/index.html @@ -20,8 +20,16 @@

Hadoop KMS

diff --git a/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh b/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh index 7611f2ac75..b4955d5aca 100755 --- a/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh +++ b/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh @@ -13,92 +13,52 @@ # limitations under the License. # -MYNAME="${BASH_SOURCE-$0}" +MYNAME="${0##*/}" -function hadoop_usage +## @description Print usage +## @audience private +## @stability stable +## @replaceable no +function print_usage { - hadoop_add_subcommand "run" "Start kms in the current window" - hadoop_add_subcommand "run -security" "Start in the current window with security manager" - hadoop_add_subcommand "start" "Start kms in a separate window" - hadoop_add_subcommand "start -security" "Start in a separate window with security manager" - hadoop_add_subcommand "status" "Return the LSB compliant status" - hadoop_add_subcommand "stop" "Stop kms, waiting up to 5 seconds for the process to end" - hadoop_add_subcommand "top n" "Stop kms, waiting up to n seconds for the process to end" - hadoop_add_subcommand "stop -force" "Stop kms, wait up to 5 seconds and then use kill -KILL if still running" - hadoop_add_subcommand "stop n -force" "Stop kms, wait up to n seconds and then use kill -KILL if still running" - hadoop_generate_usage "${MYNAME}" false + cat </dev/null && pwd -P) - HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" -fi - -HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$HADOOP_DEFAULT_LIBEXEC_DIR}" -# shellcheck disable=SC2034 -HADOOP_NEW_CONFIG=true -if [[ -f "${HADOOP_LIBEXEC_DIR}/kms-config.sh" ]]; then - . "${HADOOP_LIBEXEC_DIR}/kms-config.sh" -else - echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/kms-config.sh." 2>&1 - exit 1 -fi - -# The Java System property 'kms.http.port' it is not used by Kms, -# it is used in Tomcat's server.xml configuration file -# - -hadoop_debug "Using CATALINA_OPTS: ${CATALINA_OPTS}" - -# We're using hadoop-common, so set up some stuff it might need: -hadoop_finalize - -hadoop_verify_logdir +echo "WARNING: ${MYNAME} is deprecated," \ + "please use 'hadoop [--daemon start|status|stop] kms'." >&2 if [[ $# = 0 ]]; then - case "${HADOOP_DAEMON_MODE}" in - status) - hadoop_status_daemon "${CATALINA_PID}" - exit - ;; - start) - set -- "start" - ;; - stop) - set -- "stop" - ;; - esac + print_usage + exit fi -hadoop_finalize_catalina_opts -export CATALINA_OPTS +case $1 in + run) + args=("kms") + ;; + start|stop|status) + args=("--daemon" "$1" "kms") + ;; + *) + echo "Unknown sub-command \"$1\"." + print_usage + exit 1 + ;; +esac -# A bug in catalina.sh script does not use CATALINA_OPTS for stopping the server -# -if [[ "${1}" = "stop" ]]; then - export JAVA_OPTS=${CATALINA_OPTS} +# Locate bin +if [[ -n "${HADOOP_HOME}" ]]; then + bin="${HADOOP_HOME}/bin" +else + sbin=$(cd -P -- "$(dirname -- "$0")" >/dev/null && pwd -P) + bin=$(cd -P -- "${sbin}/../bin" >/dev/null && pwd -P) fi -# If ssl, the populate the passwords into ssl-server.xml before starting tomcat -# -# KMS_SSL_KEYSTORE_PASS is a bit odd. -# if undefined, then the if test will not enable ssl on its own -# if "", set it to "password". -# if custom, use provided password -# -if [[ -f "${HADOOP_CATALINA_HOME}/conf/ssl-server.xml.conf" ]]; then - if [[ -n "${KMS_SSL_KEYSTORE_PASS+x}" ]]; then - export KMS_SSL_KEYSTORE_PASS=${KMS_SSL_KEYSTORE_PASS:-password} - KMS_SSL_KEYSTORE_PASS_ESCAPED=$(hadoop_xml_escape \ - "$(hadoop_sed_escape "$KMS_SSL_KEYSTORE_PASS")") - sed -e 's/"_kms_ssl_keystore_pass_"/'"\"${KMS_SSL_KEYSTORE_PASS_ESCAPED}\""'/g' \ - "${HADOOP_CATALINA_HOME}/conf/ssl-server.xml.conf" \ - > "${HADOOP_CATALINA_HOME}/conf/ssl-server.xml" - chmod 700 "${HADOOP_CATALINA_HOME}/conf/ssl-server.xml" >/dev/null 2>&1 - fi -fi - -exec "${HADOOP_CATALINA_HOME}/bin/catalina.sh" "$@" +exec "${bin}/hadoop" "${args[@]}" \ No newline at end of file diff --git a/hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/WEB-INF/web.xml b/hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/WEB-INF/web.xml deleted file mode 100644 index 9d0ae0db4c..0000000000 --- a/hadoop-common-project/hadoop-kms/src/main/tomcat/ROOT/WEB-INF/web.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - - diff --git a/hadoop-common-project/hadoop-kms/src/main/tomcat/logging.properties b/hadoop-common-project/hadoop-kms/src/main/tomcat/logging.properties deleted file mode 100644 index 7562719237..0000000000 --- a/hadoop-common-project/hadoop-kms/src/main/tomcat/logging.properties +++ /dev/null @@ -1,67 +0,0 @@ -# -# All Rights Reserved. -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -handlers = 1catalina.org.apache.juli.FileHandler, 2localhost.org.apache.juli.FileHandler, 3manager.org.apache.juli.FileHandler, 4host-manager.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler - -.handlers = 1catalina.org.apache.juli.FileHandler, java.util.logging.ConsoleHandler - -############################################################ -# Handler specific properties. -# Describes specific configuration info for Handlers. -############################################################ - -1catalina.org.apache.juli.FileHandler.level = FINE -1catalina.org.apache.juli.FileHandler.directory = ${kms.log.dir} -1catalina.org.apache.juli.FileHandler.prefix = kms-catalina. - -2localhost.org.apache.juli.FileHandler.level = FINE -2localhost.org.apache.juli.FileHandler.directory = ${kms.log.dir} -2localhost.org.apache.juli.FileHandler.prefix = kms-localhost. - -3manager.org.apache.juli.FileHandler.level = FINE -3manager.org.apache.juli.FileHandler.directory = ${kms.log.dir} -3manager.org.apache.juli.FileHandler.prefix = kms-manager. - -4host-manager.org.apache.juli.FileHandler.level = FINE -4host-manager.org.apache.juli.FileHandler.directory = ${kms.log.dir} -4host-manager.org.apache.juli.FileHandler.prefix = kms-host-manager. - -java.util.logging.ConsoleHandler.level = FINE -java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter - - -############################################################ -# Facility specific properties. -# Provides extra control for each logger. -############################################################ - -org.apache.catalina.core.ContainerBase.[Catalina].[localhost].level = INFO -org.apache.catalina.core.ContainerBase.[Catalina].[localhost].handlers = 2localhost.org.apache.juli.FileHandler - -org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/manager].level = INFO -org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/manager].handlers = 3manager.org.apache.juli.FileHandler - -org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/host-manager].level = INFO -org.apache.catalina.core.ContainerBase.[Catalina].[localhost].[/host-manager].handlers = 4host-manager.org.apache.juli.FileHandler - -# For example, set the com.xyz.foo logger to only log SEVERE -# messages: -#org.apache.catalina.startup.ContextConfig.level = FINE -#org.apache.catalina.startup.HostConfig.level = FINE -#org.apache.catalina.session.ManagerBase.level = FINE -#org.apache.catalina.core.AprLifecycleListener.level=FINE diff --git a/hadoop-common-project/hadoop-kms/src/main/tomcat/server.xml b/hadoop-common-project/hadoop-kms/src/main/tomcat/server.xml deleted file mode 100644 index d8fd161205..0000000000 --- a/hadoop-common-project/hadoop-kms/src/main/tomcat/server.xml +++ /dev/null @@ -1,155 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/hadoop-common-project/hadoop-kms/src/main/tomcat/ssl-server.xml.conf b/hadoop-common-project/hadoop-kms/src/main/tomcat/ssl-server.xml.conf deleted file mode 100644 index 272542a6b8..0000000000 --- a/hadoop-common-project/hadoop-kms/src/main/tomcat/ssl-server.xml.conf +++ /dev/null @@ -1,136 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/hadoop-common-project/hadoop-kms/src/site/configuration.xsl b/hadoop-common-project/hadoop-kms/src/site/configuration.xsl new file mode 100644 index 0000000000..8f2ae9bcbb --- /dev/null +++ b/hadoop-common-project/hadoop-kms/src/site/configuration.xsl @@ -0,0 +1,49 @@ + + + + + + + + +

Configuration Properties

+ + + + + + + + + + + + + +
namevaluedescription
+ + + + + + + +
+ + +
+
diff --git a/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm b/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm index 58c1fbdb96..810d5689a1 100644 --- a/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm +++ b/hadoop-common-project/hadoop-kms/src/site/markdown/index.md.vm @@ -29,7 +29,7 @@ The client is a KeyProvider implementation interacts with the KMS using the KMS KMS and its client have built-in security and they support HTTP SPNEGO Kerberos authentication and HTTPS secure transport. -KMS is a Java web-application and it runs using a pre-configured Tomcat bundled with the Hadoop distribution. +KMS is a Java Jetty web-application. KMS Client Configuration ------------------------ @@ -51,6 +51,15 @@ The following is an example to configure HDFS NameNode as a KMS client in KMS --- +$H3 Start/Stop the KMS + +To start/stop KMS, use `hadoop --daemon start|stop kms`. For example: + + hadoop-${project.version} $ hadoop --daemon start kms + +NOTE: The script `kms.sh` is deprecated. It is now just a wrapper of +`hadoop kms`. + $H3 KMS Configuration Configure the KMS backing KeyProvider properties in the `etc/hadoop/kms-site.xml` configuration file: @@ -71,6 +80,15 @@ The password file is looked up in the Hadoop's configuration directory via the c NOTE: You need to restart the KMS for the configuration changes to take effect. +$H3 KMS HTTP Configuration + +KMS pre-configures the HTTP port to 9600. + +KMS supports the following HTTP [configuration properties](./kms-default.html) +in `etc/hadoop/kms-site.xml`. + +NOTE: You need to restart the KMS for the configuration changes to take effect. + $H3 KMS Cache KMS has two kinds of caching: a CachingKeyProvider for caching the encryption keys, and a KeyProvider for caching the EEKs. @@ -180,36 +198,6 @@ The Aggregation interval is configured via the property : 10000 -$H3 Start/Stop the KMS - -To start/stop KMS use KMS's sbin/kms.sh script. For example: - - hadoop-${project.version} $ sbin/kms.sh start - -NOTE: Invoking the script without any parameters list all possible parameters (start, stop, run, etc.). The `kms.sh` script is a wrapper for Tomcat's `catalina.sh` script that sets the environment variables and Java System properties required to run KMS. - -$H3 Embedded Tomcat Configuration - -To configure the embedded Tomcat go to the `share/hadoop/kms/tomcat/conf`. - -KMS pre-configures the HTTP and Admin ports in Tomcat's `server.xml` to 9600 and 9601. - -Tomcat logs are also preconfigured to go to Hadoop's `logs/` directory. - -The following environment variables (which can be set in KMS's `etc/hadoop/kms-env.sh` script) can be used to alter those values: - -* KMS_HTTP_PORT -* KMS_ADMIN_PORT -* KMS_MAX_THREADS -* KMS_MAX_HTTP_HEADER_SIZE -* KMS_LOGNOTE: You need to restart the KMS for the configuration changes to take effect. - -$H3 Loading native libraries - -The following environment variable (which can be set in KMS's `etc/hadoop/kms-env.sh` script) can be used to specify the location of any required native libraries. For eg. Tomact native Apache Portable Runtime (APR) libraries: - -* JAVA_LIBRARY_PATH - $H3 KMS Security Configuration $H4 Enabling Kerberos HTTP SPNEGO Authentication @@ -279,20 +267,52 @@ If `users`, `groups` or `hosts` has a `*`, it means there are no restrictions fo $H4 KMS over HTTPS (SSL) -To configure KMS to work over HTTPS the following 2 properties must be set in the `etc/hadoop/kms_env.sh` script (shown with default values): +Enable SSL in `etc/hadoop/kms-site.xml`: -* KMS_SSL_KEYSTORE_FILE=$HOME/.keystore -* KMS_SSL_KEYSTORE_PASS=password +```xml + + hadoop.kms.ssl.enabled + true + + Whether SSL is enabled. Default is false, i.e. disabled. + + -In the KMS `tomcat/conf` directory, replace the `server.xml` file with the provided `ssl-server.xml` file. +``` + +Configure `etc/hadoop/ssl-server.xml` with proper values, for example: + +```xml + + ssl.server.keystore.location + ${user.home}/.keystore + Keystore to be used. Must be specified. + + + + ssl.server.keystore.password + + Must be specified. + + + + ssl.server.keystore.keypassword + + Must be specified. + +``` You need to create an SSL certificate for the KMS. As the `kms` Unix user, using the Java `keytool` command to create the SSL certificate: - $ keytool -genkey -alias tomcat -keyalg RSA + $ keytool -genkey -alias jetty -keyalg RSA -You will be asked a series of questions in an interactive prompt. It will create the keystore file, which will be named **.keystore** and located in the `kms` user home directory. +You will be asked a series of questions in an interactive prompt. It will +create the keystore file, which will be named **.keystore** and located in the +user's home directory. -The password you enter for "keystore password" must match the value of the `KMS_SSL_KEYSTORE_PASS` environment variable set in the `kms-env.sh` script in the configuration directory. +The password you enter for "keystore password" must match the value of the +property `ssl.server.keystore.password` set in the `ssl-server.xml` in the +configuration directory. The answer to "What is your first and last name?" (i.e. "CN") must be the hostname of the machine where the KMS will be running. @@ -1032,3 +1052,29 @@ $H4 Get Keys Metadata }, ... ] + +$H3 Deprecated Environment Variables + +The following environment variables are deprecated. Set the corresponding +configuration properties instead. + +Environment Variable | Configuration Property | Configuration File +-------------------------|------------------------------|-------------------- +KMS_HTTP_PORT | hadoop.kms.http.port | kms-site.xml +KMS_MAX_HTTP_HEADER_SIZE | hadoop.http.max.request.header.size and hadoop.http.max.response.header.size | kms-site.xml +KMS_MAX_THREADS | hadoop.http.max.threads | kms-site.xml +KMS_SSL_ENABLED | hadoop.kms.ssl.enabled | kms-site.xml +KMS_SSL_KEYSTORE_FILE | ssl.server.keystore.location | ssl-server.xml +KMS_SSL_KEYSTORE_PASS | ssl.server.keystore.password | ssl-server.xml +KMS_TEMP | hadoop.http.temp.dir | kms-site.xml + +$H3 Default HTTP Services + +Name | Description +-------------------|------------------------------------ +/conf | Display configuration properties +/jmx | Java JMX management interface +/logLevel | Get or set log level per class +/logs | Display log files +/stacks | Display JVM stacks +/static/index.html | The static home page \ No newline at end of file diff --git a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java index 8b181ad642..f911c5b750 100644 --- a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java +++ b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java @@ -17,84 +17,24 @@ */ package org.apache.hadoop.crypto.key.kms.server; -import com.google.common.base.Preconditions; - -import org.apache.commons.io.IOUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.http.JettyUtils; -import org.apache.hadoop.util.ThreadUtil; -import org.eclipse.jetty.http.HttpVersion; -import org.eclipse.jetty.server.ConnectionFactory; -import org.eclipse.jetty.server.HttpConfiguration; -import org.eclipse.jetty.server.HttpConnectionFactory; -import org.eclipse.jetty.server.SecureRequestCustomizer; -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.server.ServerConnector; -import org.eclipse.jetty.server.SslConnectionFactory; -import org.eclipse.jetty.util.ssl.SslContextFactory; -import org.eclipse.jetty.webapp.WebAppContext; - import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; +import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Writer; -import java.io.IOException; -import java.net.MalformedURLException; import java.net.URL; -import java.util.UUID; + +import com.google.common.base.Preconditions; +import org.apache.commons.io.IOUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.ssl.SSLFactory; +import org.apache.hadoop.util.ThreadUtil; public class MiniKMS { - private static Server createJettyServer(String keyStore, String password, int inPort) { - try { - boolean ssl = keyStore != null; - String host = "localhost"; - Server server = new Server(); - ServerConnector conn = new ServerConnector(server); - HttpConfiguration httpConfig = new HttpConfiguration(); - httpConfig.setRequestHeaderSize(JettyUtils.HEADER_SIZE); - httpConfig.setResponseHeaderSize(JettyUtils.HEADER_SIZE); - httpConfig.setSecureScheme("https"); - httpConfig.addCustomizer(new SecureRequestCustomizer()); - ConnectionFactory connFactory = new HttpConnectionFactory(httpConfig); - conn.addConnectionFactory(connFactory); - conn.setHost(host); - conn.setPort(inPort); - if (ssl) { - SslContextFactory sslContextFactory = new SslContextFactory(); - sslContextFactory.setNeedClientAuth(false); - sslContextFactory.setKeyStorePath(keyStore); - sslContextFactory.setKeyStoreType("jks"); - sslContextFactory.setKeyStorePassword(password); - conn.addFirstConnectionFactory( - new SslConnectionFactory(sslContextFactory, - HttpVersion.HTTP_1_1.asString())); - } - server.addConnector(conn); - return server; - } catch (Exception ex) { - throw new RuntimeException("Could not start embedded servlet container, " - + ex.getMessage(), ex); - } - } - - private static URL getJettyURL(Server server) { - boolean ssl = server.getConnectors()[0] - .getConnectionFactory(SslConnectionFactory.class) != null; - try { - String scheme = (ssl) ? "https" : "http"; - return new URL(scheme + "://" + - ((ServerConnector)server.getConnectors()[0]).getHost() + ":" - + ((ServerConnector)server.getConnectors()[0]).getLocalPort()); - } catch (MalformedURLException ex) { - throw new RuntimeException("It should never happen, " + ex.getMessage(), - ex); - } - } - public static class Builder { private File kmsConfDir; private String log4jConfFile; @@ -150,7 +90,7 @@ public MiniKMS build() { private String log4jConfFile; private String keyStore; private String keyStorePassword; - private Server jetty; + private KMSWebServer jetty; private int inPort; private URL kmsURL; @@ -178,7 +118,6 @@ private void copyResource(String inputResourceName, File outputFile) throws } public void start() throws Exception { - ClassLoader cl = Thread.currentThread().getContextClassLoader(); System.setProperty(KMSConfiguration.KMS_CONFIG_DIR, kmsConfDir); File aclsFile = new File(kmsConfDir, "kms-acls.xml"); if (!aclsFile.exists()) { @@ -202,35 +141,20 @@ public void start() throws Exception { writer.close(); } System.setProperty("log4j.configuration", log4jConfFile); - jetty = createJettyServer(keyStore, keyStorePassword, inPort); - // we need to do a special handling for MiniKMS to work when in a dir and - // when in a JAR in the classpath thanks to Jetty way of handling of webapps - // when they are in the a DIR, WAR or JAR. - URL webXmlUrl = cl.getResource("kms-webapp/WEB-INF/web.xml"); - if (webXmlUrl == null) { - throw new RuntimeException( - "Could not find kms-webapp/ dir in test classpath"); + final Configuration conf = KMSConfiguration.getKMSConf(); + conf.set(KMSConfiguration.HTTP_HOST_KEY, "localhost"); + conf.setInt(KMSConfiguration.HTTP_PORT_KEY, inPort); + if (keyStore != null) { + conf.setBoolean(KMSConfiguration.SSL_ENABLED_KEY, true); + conf.set(SSLFactory.SSL_SERVER_KEYSTORE_LOCATION, keyStore); + conf.set(SSLFactory.SSL_SERVER_KEYSTORE_PASSWORD, keyStorePassword); + conf.set(SSLFactory.SSL_SERVER_KEYSTORE_TYPE, "jks"); } - boolean webXmlInJar = webXmlUrl.getPath().contains(".jar!/"); - String webappPath; - if (webXmlInJar) { - File webInf = new File("target/" + UUID.randomUUID().toString() + - "/kms-webapp/WEB-INF"); - webInf.mkdirs(); - new File(webInf, "web.xml").delete(); - copyResource("kms-webapp/WEB-INF/web.xml", new File(webInf, "web.xml")); - webappPath = webInf.getParentFile().getAbsolutePath(); - } else { - webappPath = cl.getResource("kms-webapp").getPath(); - } - WebAppContext context = new WebAppContext(webappPath, "/kms"); - if (webXmlInJar) { - context.setClassLoader(cl); - } - jetty.setHandler(context); + + jetty = new KMSWebServer(conf); jetty.start(); - kmsURL = new URL(getJettyURL(jetty), "kms"); + kmsURL = jetty.getKMSUrl(); } public URL getKMSUrl() {