HDFS-2617. Replaced Kerberized SSL for image transfer and fsck with SPNEGO-based solution. Contributed by Jakob Homan, Alejandro Abdelnur, and Aaron T. Myers
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1334216 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
0bf8f11860
commit
5dbbe0e0a5
@ -52,8 +52,6 @@
|
|||||||
import org.apache.hadoop.jmx.JMXJsonServlet;
|
import org.apache.hadoop.jmx.JMXJsonServlet;
|
||||||
import org.apache.hadoop.log.LogLevel;
|
import org.apache.hadoop.log.LogLevel;
|
||||||
import org.apache.hadoop.metrics.MetricsServlet;
|
import org.apache.hadoop.metrics.MetricsServlet;
|
||||||
import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector;
|
|
||||||
import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector.MODE;
|
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.security.authorize.AccessControlList;
|
import org.apache.hadoop.security.authorize.AccessControlList;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
@ -99,6 +97,7 @@ public class HttpServer implements FilterContainer {
|
|||||||
// gets stored.
|
// gets stored.
|
||||||
public static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf";
|
public static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf";
|
||||||
static final String ADMINS_ACL = "admins.acl";
|
static final String ADMINS_ACL = "admins.acl";
|
||||||
|
public static final String SPNEGO_FILTER = "SpnegoFilter";
|
||||||
|
|
||||||
public static final String BIND_ADDRESS = "bind.address";
|
public static final String BIND_ADDRESS = "bind.address";
|
||||||
|
|
||||||
@ -237,11 +236,7 @@ public HttpServer(String name, String bindAddress, int port,
|
|||||||
webServer.addHandler(webAppContext);
|
webServer.addHandler(webAppContext);
|
||||||
|
|
||||||
addDefaultApps(contexts, appDir, conf);
|
addDefaultApps(contexts, appDir, conf);
|
||||||
|
|
||||||
defineFilter(webAppContext, "krb5Filter",
|
|
||||||
Krb5AndCertsSslSocketConnector.Krb5SslFilter.class.getName(),
|
|
||||||
null, null);
|
|
||||||
|
|
||||||
addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
|
addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
|
||||||
final FilterInitializer[] initializers = getFilterInitializers(conf);
|
final FilterInitializer[] initializers = getFilterInitializers(conf);
|
||||||
if (initializers != null) {
|
if (initializers != null) {
|
||||||
@ -424,12 +419,13 @@ public void addInternalServlet(String name, String pathSpec,
|
|||||||
* protect with Kerberos authentication.
|
* protect with Kerberos authentication.
|
||||||
* Note: This method is to be used for adding servlets that facilitate
|
* Note: This method is to be used for adding servlets that facilitate
|
||||||
* internal communication and not for user facing functionality. For
|
* internal communication and not for user facing functionality. For
|
||||||
* servlets added using this method, filters (except internal Kerberized
|
+ * servlets added using this method, filters (except internal Kerberos
|
||||||
* filters) are not enabled.
|
* filters) are not enabled.
|
||||||
*
|
*
|
||||||
* @param name The name of the servlet (can be passed as null)
|
* @param name The name of the servlet (can be passed as null)
|
||||||
* @param pathSpec The path spec for the servlet
|
* @param pathSpec The path spec for the servlet
|
||||||
* @param clazz The servlet class
|
* @param clazz The servlet class
|
||||||
|
* @param requireAuth Require Kerberos authenticate to access servlet
|
||||||
*/
|
*/
|
||||||
public void addInternalServlet(String name, String pathSpec,
|
public void addInternalServlet(String name, String pathSpec,
|
||||||
Class<? extends HttpServlet> clazz, boolean requireAuth) {
|
Class<? extends HttpServlet> clazz, boolean requireAuth) {
|
||||||
@ -440,11 +436,11 @@ public void addInternalServlet(String name, String pathSpec,
|
|||||||
webAppContext.addServlet(holder, pathSpec);
|
webAppContext.addServlet(holder, pathSpec);
|
||||||
|
|
||||||
if(requireAuth && UserGroupInformation.isSecurityEnabled()) {
|
if(requireAuth && UserGroupInformation.isSecurityEnabled()) {
|
||||||
LOG.info("Adding Kerberos filter to " + name);
|
LOG.info("Adding Kerberos (SPNEGO) filter to " + name);
|
||||||
ServletHandler handler = webAppContext.getServletHandler();
|
ServletHandler handler = webAppContext.getServletHandler();
|
||||||
FilterMapping fmap = new FilterMapping();
|
FilterMapping fmap = new FilterMapping();
|
||||||
fmap.setPathSpec(pathSpec);
|
fmap.setPathSpec(pathSpec);
|
||||||
fmap.setFilterName("krb5Filter");
|
fmap.setFilterName(SPNEGO_FILTER);
|
||||||
fmap.setDispatches(Handler.ALL);
|
fmap.setDispatches(Handler.ALL);
|
||||||
handler.addFilterMapping(fmap);
|
handler.addFilterMapping(fmap);
|
||||||
}
|
}
|
||||||
@ -580,26 +576,14 @@ public void addSslListener(InetSocketAddress addr, String keystore,
|
|||||||
webServer.addConnector(sslListener);
|
webServer.addConnector(sslListener);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Configure an ssl listener on the server.
|
|
||||||
* @param addr address to listen on
|
|
||||||
* @param sslConf conf to retrieve ssl options
|
|
||||||
* @param needClientAuth whether client authentication is required
|
|
||||||
*/
|
|
||||||
public void addSslListener(InetSocketAddress addr, Configuration sslConf,
|
|
||||||
boolean needClientAuth) throws IOException {
|
|
||||||
addSslListener(addr, sslConf, needClientAuth, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configure an ssl listener on the server.
|
* Configure an ssl listener on the server.
|
||||||
* @param addr address to listen on
|
* @param addr address to listen on
|
||||||
* @param sslConf conf to retrieve ssl options
|
* @param sslConf conf to retrieve ssl options
|
||||||
* @param needCertsAuth whether x509 certificate authentication is required
|
* @param needCertsAuth whether x509 certificate authentication is required
|
||||||
* @param needKrbAuth whether to allow kerberos auth
|
|
||||||
*/
|
*/
|
||||||
public void addSslListener(InetSocketAddress addr, Configuration sslConf,
|
public void addSslListener(InetSocketAddress addr, Configuration sslConf,
|
||||||
boolean needCertsAuth, boolean needKrbAuth) throws IOException {
|
boolean needCertsAuth) throws IOException {
|
||||||
if (webServer.isStarted()) {
|
if (webServer.isStarted()) {
|
||||||
throw new IOException("Failed to add ssl listener");
|
throw new IOException("Failed to add ssl listener");
|
||||||
}
|
}
|
||||||
@ -612,15 +596,7 @@ public void addSslListener(InetSocketAddress addr, Configuration sslConf,
|
|||||||
System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
|
System.setProperty("javax.net.ssl.trustStoreType", sslConf.get(
|
||||||
"ssl.server.truststore.type", "jks"));
|
"ssl.server.truststore.type", "jks"));
|
||||||
}
|
}
|
||||||
Krb5AndCertsSslSocketConnector.MODE mode;
|
SslSocketConnector sslListener = new SslSocketConnector();
|
||||||
if(needCertsAuth && needKrbAuth)
|
|
||||||
mode = MODE.BOTH;
|
|
||||||
else if (!needCertsAuth && needKrbAuth)
|
|
||||||
mode = MODE.KRB;
|
|
||||||
else // Default to certificates
|
|
||||||
mode = MODE.CERTS;
|
|
||||||
|
|
||||||
SslSocketConnector sslListener = new Krb5AndCertsSslSocketConnector(mode);
|
|
||||||
sslListener.setHost(addr.getHostName());
|
sslListener.setHost(addr.getHostName());
|
||||||
sslListener.setPort(addr.getPort());
|
sslListener.setPort(addr.getPort());
|
||||||
sslListener.setKeystore(sslConf.get("ssl.server.keystore.location"));
|
sslListener.setKeystore(sslConf.get("ssl.server.keystore.location"));
|
||||||
|
@ -1,232 +0,0 @@
|
|||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
|
||||||
* contributor license agreements. See the NOTICE file distributed with this
|
|
||||||
* work for additional information regarding copyright ownership. The ASF
|
|
||||||
* licenses this file to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
* License for the specific language governing permissions and limitations under
|
|
||||||
* the License.
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.security;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.net.InetAddress;
|
|
||||||
import java.net.ServerSocket;
|
|
||||||
import java.security.Principal;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Random;
|
|
||||||
|
|
||||||
import javax.net.ssl.SSLContext;
|
|
||||||
import javax.net.ssl.SSLServerSocket;
|
|
||||||
import javax.net.ssl.SSLServerSocketFactory;
|
|
||||||
import javax.net.ssl.SSLSocket;
|
|
||||||
import javax.security.auth.kerberos.KerberosPrincipal;
|
|
||||||
import javax.servlet.Filter;
|
|
||||||
import javax.servlet.FilterChain;
|
|
||||||
import javax.servlet.FilterConfig;
|
|
||||||
import javax.servlet.ServletException;
|
|
||||||
import javax.servlet.ServletRequest;
|
|
||||||
import javax.servlet.ServletResponse;
|
|
||||||
import javax.servlet.http.HttpServletRequest;
|
|
||||||
import javax.servlet.http.HttpServletRequestWrapper;
|
|
||||||
import javax.servlet.http.HttpServletResponse;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
import org.mortbay.io.EndPoint;
|
|
||||||
import org.mortbay.jetty.HttpSchemes;
|
|
||||||
import org.mortbay.jetty.Request;
|
|
||||||
import org.mortbay.jetty.security.ServletSSL;
|
|
||||||
import org.mortbay.jetty.security.SslSocketConnector;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extend Jetty's {@link SslSocketConnector} to optionally also provide
|
|
||||||
* Kerberos5ized SSL sockets. The only change in behavior from superclass
|
|
||||||
* is that we no longer honor requests to turn off NeedAuthentication when
|
|
||||||
* running with Kerberos support.
|
|
||||||
*/
|
|
||||||
public class Krb5AndCertsSslSocketConnector extends SslSocketConnector {
|
|
||||||
public static final List<String> KRB5_CIPHER_SUITES =
|
|
||||||
Collections.unmodifiableList(Collections.singletonList(
|
|
||||||
"TLS_KRB5_WITH_3DES_EDE_CBC_SHA"));
|
|
||||||
static {
|
|
||||||
SecurityUtil.initKrb5CipherSuites();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static final Log LOG = LogFactory
|
|
||||||
.getLog(Krb5AndCertsSslSocketConnector.class);
|
|
||||||
|
|
||||||
private static final String REMOTE_PRINCIPAL = "remote_principal";
|
|
||||||
|
|
||||||
public enum MODE {KRB, CERTS, BOTH} // Support Kerberos, certificates or both?
|
|
||||||
|
|
||||||
private final boolean useKrb;
|
|
||||||
private final boolean useCerts;
|
|
||||||
|
|
||||||
public Krb5AndCertsSslSocketConnector() {
|
|
||||||
super();
|
|
||||||
useKrb = true;
|
|
||||||
useCerts = false;
|
|
||||||
|
|
||||||
setPasswords();
|
|
||||||
}
|
|
||||||
|
|
||||||
public Krb5AndCertsSslSocketConnector(MODE mode) {
|
|
||||||
super();
|
|
||||||
useKrb = mode == MODE.KRB || mode == MODE.BOTH;
|
|
||||||
useCerts = mode == MODE.CERTS || mode == MODE.BOTH;
|
|
||||||
setPasswords();
|
|
||||||
logIfDebug("useKerb = " + useKrb + ", useCerts = " + useCerts);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If not using Certs, set passwords to random gibberish or else
|
|
||||||
// Jetty will actually prompt the user for some.
|
|
||||||
private void setPasswords() {
|
|
||||||
if(!useCerts) {
|
|
||||||
Random r = new Random();
|
|
||||||
System.setProperty("jetty.ssl.password", String.valueOf(r.nextLong()));
|
|
||||||
System.setProperty("jetty.ssl.keypassword", String.valueOf(r.nextLong()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected SSLServerSocketFactory createFactory() throws Exception {
|
|
||||||
if(useCerts)
|
|
||||||
return super.createFactory();
|
|
||||||
|
|
||||||
SSLContext context = super.getProvider()==null
|
|
||||||
? SSLContext.getInstance(super.getProtocol())
|
|
||||||
:SSLContext.getInstance(super.getProtocol(), super.getProvider());
|
|
||||||
context.init(null, null, null);
|
|
||||||
|
|
||||||
return context.getServerSocketFactory();
|
|
||||||
}
|
|
||||||
|
|
||||||
/* (non-Javadoc)
|
|
||||||
* @see org.mortbay.jetty.security.SslSocketConnector#newServerSocket(java.lang.String, int, int)
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
protected ServerSocket newServerSocket(String host, int port, int backlog)
|
|
||||||
throws IOException {
|
|
||||||
logIfDebug("Creating new KrbServerSocket for: " + host);
|
|
||||||
SSLServerSocket ss = null;
|
|
||||||
|
|
||||||
if(useCerts) // Get the server socket from the SSL super impl
|
|
||||||
ss = (SSLServerSocket)super.newServerSocket(host, port, backlog);
|
|
||||||
else { // Create a default server socket
|
|
||||||
try {
|
|
||||||
ss = (SSLServerSocket)(host == null
|
|
||||||
? createFactory().createServerSocket(port, backlog) :
|
|
||||||
createFactory().createServerSocket(port, backlog, InetAddress.getByName(host)));
|
|
||||||
} catch (Exception e)
|
|
||||||
{
|
|
||||||
LOG.warn("Could not create KRB5 Listener", e);
|
|
||||||
throw new IOException("Could not create KRB5 Listener: " + e.toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add Kerberos ciphers to this socket server if needed.
|
|
||||||
if(useKrb) {
|
|
||||||
ss.setNeedClientAuth(true);
|
|
||||||
String [] combined;
|
|
||||||
if(useCerts) { // combine the cipher suites
|
|
||||||
String[] certs = ss.getEnabledCipherSuites();
|
|
||||||
combined = new String[certs.length + KRB5_CIPHER_SUITES.size()];
|
|
||||||
System.arraycopy(certs, 0, combined, 0, certs.length);
|
|
||||||
System.arraycopy(KRB5_CIPHER_SUITES.toArray(new String[0]), 0, combined,
|
|
||||||
certs.length, KRB5_CIPHER_SUITES.size());
|
|
||||||
} else { // Just enable Kerberos auth
|
|
||||||
combined = KRB5_CIPHER_SUITES.toArray(new String[0]);
|
|
||||||
}
|
|
||||||
|
|
||||||
ss.setEnabledCipherSuites(combined);
|
|
||||||
}
|
|
||||||
|
|
||||||
return ss;
|
|
||||||
};
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void customize(EndPoint endpoint, Request request) throws IOException {
|
|
||||||
if(useKrb) { // Add Kerberos-specific info
|
|
||||||
SSLSocket sslSocket = (SSLSocket)endpoint.getTransport();
|
|
||||||
Principal remotePrincipal = sslSocket.getSession().getPeerPrincipal();
|
|
||||||
logIfDebug("Remote principal = " + remotePrincipal);
|
|
||||||
request.setScheme(HttpSchemes.HTTPS);
|
|
||||||
request.setAttribute(REMOTE_PRINCIPAL, remotePrincipal);
|
|
||||||
|
|
||||||
if(!useCerts) { // Add extra info that would have been added by super
|
|
||||||
String cipherSuite = sslSocket.getSession().getCipherSuite();
|
|
||||||
Integer keySize = Integer.valueOf(ServletSSL.deduceKeyLength(cipherSuite));;
|
|
||||||
|
|
||||||
request.setAttribute("javax.servlet.request.cipher_suite", cipherSuite);
|
|
||||||
request.setAttribute("javax.servlet.request.key_size", keySize);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if(useCerts) super.customize(endpoint, request);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void logIfDebug(String s) {
|
|
||||||
if(LOG.isDebugEnabled())
|
|
||||||
LOG.debug(s);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter that takes the Kerberos principal identified in the
|
|
||||||
* {@link Krb5AndCertsSslSocketConnector} and provides it the to the servlet
|
|
||||||
* at runtime, setting the principal and short name.
|
|
||||||
*/
|
|
||||||
public static class Krb5SslFilter implements Filter {
|
|
||||||
@Override
|
|
||||||
public void doFilter(ServletRequest req, ServletResponse resp,
|
|
||||||
FilterChain chain) throws IOException, ServletException {
|
|
||||||
final Principal princ =
|
|
||||||
(Principal)req.getAttribute(Krb5AndCertsSslSocketConnector.REMOTE_PRINCIPAL);
|
|
||||||
|
|
||||||
if(princ == null || !(princ instanceof KerberosPrincipal)) {
|
|
||||||
// Should never actually get here, since should be rejected at socket
|
|
||||||
// level.
|
|
||||||
LOG.warn("User not authenticated via kerberos from " + req.getRemoteAddr());
|
|
||||||
((HttpServletResponse)resp).sendError(HttpServletResponse.SC_FORBIDDEN,
|
|
||||||
"User not authenticated via Kerberos");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Provide principal information for servlet at runtime
|
|
||||||
ServletRequest wrapper =
|
|
||||||
new HttpServletRequestWrapper((HttpServletRequest) req) {
|
|
||||||
@Override
|
|
||||||
public Principal getUserPrincipal() {
|
|
||||||
return princ;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Return the full name of this remote user.
|
|
||||||
* @see javax.servlet.http.HttpServletRequestWrapper#getRemoteUser()
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
public String getRemoteUser() {
|
|
||||||
return princ.getName();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
chain.doFilter(wrapper, resp);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void init(FilterConfig arg0) throws ServletException {
|
|
||||||
/* Nothing to do here */
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void destroy() { /* Nothing to do here */ }
|
|
||||||
}
|
|
||||||
}
|
|
@ -17,14 +17,11 @@
|
|||||||
package org.apache.hadoop.security;
|
package org.apache.hadoop.security;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.lang.reflect.Constructor;
|
|
||||||
import java.lang.reflect.Field;
|
|
||||||
import java.lang.reflect.InvocationTargetException;
|
|
||||||
import java.lang.reflect.Method;
|
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
|
import java.net.URLConnection;
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.security.AccessController;
|
import java.security.AccessController;
|
||||||
import java.security.PrivilegedAction;
|
import java.security.PrivilegedAction;
|
||||||
@ -45,6 +42,8 @@
|
|||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
|
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
|
||||||
|
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
||||||
import org.apache.hadoop.security.token.Token;
|
import org.apache.hadoop.security.token.Token;
|
||||||
import org.apache.hadoop.security.token.TokenInfo;
|
import org.apache.hadoop.security.token.TokenInfo;
|
||||||
|
|
||||||
@ -134,79 +133,6 @@ protected static boolean isOriginalTGT(KerberosTicket ticket) {
|
|||||||
return isTGSPrincipal(ticket.getServer());
|
return isTGSPrincipal(ticket.getServer());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Explicitly pull the service ticket for the specified host. This solves a
|
|
||||||
* problem with Java's Kerberos SSL problem where the client cannot
|
|
||||||
* authenticate against a cross-realm service. It is necessary for clients
|
|
||||||
* making kerberized https requests to call this method on the target URL
|
|
||||||
* to ensure that in a cross-realm environment the remote host will be
|
|
||||||
* successfully authenticated.
|
|
||||||
*
|
|
||||||
* This method is internal to Hadoop and should not be used by other
|
|
||||||
* applications. This method should not be considered stable or open:
|
|
||||||
* it will be removed when the Java behavior is changed.
|
|
||||||
*
|
|
||||||
* @param remoteHost Target URL the krb-https client will access
|
|
||||||
* @throws IOException if the service ticket cannot be retrieved
|
|
||||||
*/
|
|
||||||
public static void fetchServiceTicket(URL remoteHost) throws IOException {
|
|
||||||
if(!UserGroupInformation.isSecurityEnabled())
|
|
||||||
return;
|
|
||||||
|
|
||||||
String serviceName = "host/" + remoteHost.getHost();
|
|
||||||
if (LOG.isDebugEnabled())
|
|
||||||
LOG.debug("Fetching service ticket for host at: " + serviceName);
|
|
||||||
Object serviceCred = null;
|
|
||||||
Method credsToTicketMeth;
|
|
||||||
Class<?> krb5utilClass;
|
|
||||||
try {
|
|
||||||
Class<?> principalClass;
|
|
||||||
Class<?> credentialsClass;
|
|
||||||
|
|
||||||
if (System.getProperty("java.vendor").contains("IBM")) {
|
|
||||||
principalClass = Class.forName("com.ibm.security.krb5.PrincipalName");
|
|
||||||
|
|
||||||
credentialsClass = Class.forName("com.ibm.security.krb5.Credentials");
|
|
||||||
krb5utilClass = Class.forName("com.ibm.security.jgss.mech.krb5");
|
|
||||||
} else {
|
|
||||||
principalClass = Class.forName("sun.security.krb5.PrincipalName");
|
|
||||||
credentialsClass = Class.forName("sun.security.krb5.Credentials");
|
|
||||||
krb5utilClass = Class.forName("sun.security.jgss.krb5.Krb5Util");
|
|
||||||
}
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
Constructor principalConstructor = principalClass.getConstructor(String.class,
|
|
||||||
int.class);
|
|
||||||
Field KRB_NT_SRV_HST = principalClass.getDeclaredField("KRB_NT_SRV_HST");
|
|
||||||
Method acquireServiceCredsMeth =
|
|
||||||
credentialsClass.getDeclaredMethod("acquireServiceCreds",
|
|
||||||
String.class, credentialsClass);
|
|
||||||
Method ticketToCredsMeth = krb5utilClass.getDeclaredMethod("ticketToCreds",
|
|
||||||
KerberosTicket.class);
|
|
||||||
credsToTicketMeth = krb5utilClass.getDeclaredMethod("credsToTicket",
|
|
||||||
credentialsClass);
|
|
||||||
|
|
||||||
Object principal = principalConstructor.newInstance(serviceName,
|
|
||||||
KRB_NT_SRV_HST.get(principalClass));
|
|
||||||
|
|
||||||
serviceCred = acquireServiceCredsMeth.invoke(credentialsClass,
|
|
||||||
principal.toString(),
|
|
||||||
ticketToCredsMeth.invoke(krb5utilClass, getTgtFromSubject()));
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new IOException("Can't get service ticket for: "
|
|
||||||
+ serviceName, e);
|
|
||||||
}
|
|
||||||
if (serviceCred == null) {
|
|
||||||
throw new IOException("Can't get service ticket for " + serviceName);
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
Subject.getSubject(AccessController.getContext()).getPrivateCredentials()
|
|
||||||
.add(credsToTicketMeth.invoke(krb5utilClass, serviceCred));
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new IOException("Can't get service ticket for: "
|
|
||||||
+ serviceName, e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert Kerberos principal name pattern to valid Kerberos principal
|
* Convert Kerberos principal name pattern to valid Kerberos principal
|
||||||
* names. It replaces hostname pattern with hostname, which should be
|
* names. It replaces hostname pattern with hostname, which should be
|
||||||
@ -513,6 +439,30 @@ public static <T> T doAsLoginUserOrFatal(PrivilegedAction<T> action) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Open a (if need be) secure connection to a URL in a secure environment
|
||||||
|
* that is using SPNEGO to authenticate its URLs. All Namenode and Secondary
|
||||||
|
* Namenode URLs that are protected via SPNEGO should be accessed via this
|
||||||
|
* method.
|
||||||
|
*
|
||||||
|
* @param url to authenticate via SPNEGO.
|
||||||
|
* @return A connection that has been authenticated via SPNEGO
|
||||||
|
* @throws IOException If unable to authenticate via SPNEGO
|
||||||
|
*/
|
||||||
|
public static URLConnection openSecureHttpConnection(URL url) throws IOException {
|
||||||
|
if(!UserGroupInformation.isSecurityEnabled()) {
|
||||||
|
return url.openConnection();
|
||||||
|
}
|
||||||
|
|
||||||
|
AuthenticatedURL.Token token = new AuthenticatedURL.Token();
|
||||||
|
try {
|
||||||
|
return new AuthenticatedURL().openConnection(url, token);
|
||||||
|
} catch (AuthenticationException e) {
|
||||||
|
throw new IOException("Exception trying to open authenticated connection to "
|
||||||
|
+ url, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resolves a host subject to the security requirements determined by
|
* Resolves a host subject to the security requirements determined by
|
||||||
* hadoop.security.token.service.use_ip.
|
* hadoop.security.token.service.use_ip.
|
||||||
@ -664,10 +614,4 @@ void setSearchDomains(String ... domains) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void initKrb5CipherSuites() {
|
|
||||||
if (UserGroupInformation.isSecurityEnabled()) {
|
|
||||||
System.setProperty("https.cipherSuites",
|
|
||||||
Krb5AndCertsSslSocketConnector.KRB5_CIPHER_SUITES.get(0));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -128,13 +128,6 @@
|
|||||||
</description>
|
</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
<property>
|
|
||||||
<name>dfs.secondary.https.port</name>
|
|
||||||
<value>50490</value>
|
|
||||||
<description>The https port where secondary-namenode binds</description>
|
|
||||||
|
|
||||||
</property>
|
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
<name>dfs.datanode.kerberos.principal</name>
|
<name>dfs.datanode.kerberos.principal</name>
|
||||||
<value>dn/_HOST@${local.realm}</value>
|
<value>dn/_HOST@${local.realm}</value>
|
||||||
|
@ -435,6 +435,9 @@ Release 2.0.0 - UNRELEASED
|
|||||||
HDFS-2476. More CPU efficient data structure for under-replicated,
|
HDFS-2476. More CPU efficient data structure for under-replicated,
|
||||||
over-replicated, and invalidated blocks. (Tomasz Nykiel via todd)
|
over-replicated, and invalidated blocks. (Tomasz Nykiel via todd)
|
||||||
|
|
||||||
|
HDFS-2617. Replaced Kerberized SSL for image transfer and fsck
|
||||||
|
with SPNEGO-based solution. (jghoman, tucu, and atm via eli)
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
||||||
HDFS-2481. Unknown protocol: org.apache.hadoop.hdfs.protocol.ClientProtocol.
|
HDFS-2481. Unknown protocol: org.apache.hadoop.hdfs.protocol.ClientProtocol.
|
||||||
|
@ -319,10 +319,10 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
|
|||||||
public static final String DFS_DATANODE_USER_NAME_KEY = "dfs.datanode.kerberos.principal";
|
public static final String DFS_DATANODE_USER_NAME_KEY = "dfs.datanode.kerberos.principal";
|
||||||
public static final String DFS_NAMENODE_KEYTAB_FILE_KEY = "dfs.namenode.keytab.file";
|
public static final String DFS_NAMENODE_KEYTAB_FILE_KEY = "dfs.namenode.keytab.file";
|
||||||
public static final String DFS_NAMENODE_USER_NAME_KEY = "dfs.namenode.kerberos.principal";
|
public static final String DFS_NAMENODE_USER_NAME_KEY = "dfs.namenode.kerberos.principal";
|
||||||
public static final String DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY = "dfs.namenode.kerberos.https.principal";
|
public static final String DFS_NAMENODE_INTERNAL_SPENGO_USER_NAME_KEY = "dfs.namenode.kerberos.internal.spnego.principal";
|
||||||
public static final String DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY = "dfs.secondary.namenode.keytab.file";
|
public static final String DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY = "dfs.secondary.namenode.keytab.file";
|
||||||
public static final String DFS_SECONDARY_NAMENODE_USER_NAME_KEY = "dfs.secondary.namenode.kerberos.principal";
|
public static final String DFS_SECONDARY_NAMENODE_USER_NAME_KEY = "dfs.secondary.namenode.kerberos.principal";
|
||||||
public static final String DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY = "dfs.secondary.namenode.kerberos.https.principal";
|
public static final String DFS_SECONDARY_NAMENODE_INTERNAL_SPENGO_USER_NAME_KEY = "dfs.secondary.namenode.kerberos.internal.spnego.principal";
|
||||||
public static final String DFS_NAMENODE_NAME_CACHE_THRESHOLD_KEY = "dfs.namenode.name.cache.threshold";
|
public static final String DFS_NAMENODE_NAME_CACHE_THRESHOLD_KEY = "dfs.namenode.name.cache.threshold";
|
||||||
public static final int DFS_NAMENODE_NAME_CACHE_THRESHOLD_DEFAULT = 10;
|
public static final int DFS_NAMENODE_NAME_CACHE_THRESHOLD_DEFAULT = 10;
|
||||||
|
|
||||||
|
@ -144,7 +144,7 @@ protected URI getNamenodeUri(URI uri) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
protected URI getNamenodeSecureUri(URI uri) {
|
protected URI getNamenodeSecureUri(URI uri) {
|
||||||
return DFSUtil.createUri("https", getNamenodeSecureAddr(uri));
|
return DFSUtil.createUri("http", getNamenodeSecureAddr(uri));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -247,7 +247,7 @@ public Token<?> run() throws IOException {
|
|||||||
c = DelegationTokenFetcher.getDTfromRemote(nnHttpUrl, renewer);
|
c = DelegationTokenFetcher.getDTfromRemote(nnHttpUrl, renewer);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOG.info("Couldn't get a delegation token from " + nnHttpUrl +
|
LOG.info("Couldn't get a delegation token from " + nnHttpUrl +
|
||||||
" using https.");
|
" using http.");
|
||||||
if(LOG.isDebugEnabled()) {
|
if(LOG.isDebugEnabled()) {
|
||||||
LOG.debug("error was ", e);
|
LOG.debug("error was ", e);
|
||||||
}
|
}
|
||||||
@ -686,11 +686,11 @@ public long renew(Token<?> token,
|
|||||||
Configuration conf) throws IOException {
|
Configuration conf) throws IOException {
|
||||||
// update the kerberos credentials, if they are coming from a keytab
|
// update the kerberos credentials, if they are coming from a keytab
|
||||||
UserGroupInformation.getLoginUser().reloginFromKeytab();
|
UserGroupInformation.getLoginUser().reloginFromKeytab();
|
||||||
// use https to renew the token
|
// use http to renew the token
|
||||||
InetSocketAddress serviceAddr = SecurityUtil.getTokenServiceAddr(token);
|
InetSocketAddress serviceAddr = SecurityUtil.getTokenServiceAddr(token);
|
||||||
return
|
return
|
||||||
DelegationTokenFetcher.renewDelegationToken
|
DelegationTokenFetcher.renewDelegationToken
|
||||||
(DFSUtil.createUri("https", serviceAddr).toString(),
|
(DFSUtil.createUri("http", serviceAddr).toString(),
|
||||||
(Token<DelegationTokenIdentifier>) token);
|
(Token<DelegationTokenIdentifier>) token);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -700,10 +700,10 @@ public void cancel(Token<?> token,
|
|||||||
Configuration conf) throws IOException {
|
Configuration conf) throws IOException {
|
||||||
// update the kerberos credentials, if they are coming from a keytab
|
// update the kerberos credentials, if they are coming from a keytab
|
||||||
UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
|
UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
|
||||||
// use https to cancel the token
|
// use http to cancel the token
|
||||||
InetSocketAddress serviceAddr = SecurityUtil.getTokenServiceAddr(token);
|
InetSocketAddress serviceAddr = SecurityUtil.getTokenServiceAddr(token);
|
||||||
DelegationTokenFetcher.cancelDelegationToken
|
DelegationTokenFetcher.cancelDelegationToken
|
||||||
(DFSUtil.createUri("https", serviceAddr).toString(),
|
(DFSUtil.createUri("http", serviceAddr).toString(),
|
||||||
(Token<DelegationTokenIdentifier>) token);
|
(Token<DelegationTokenIdentifier>) token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -27,6 +27,8 @@
|
|||||||
import javax.servlet.http.HttpServlet;
|
import javax.servlet.http.HttpServlet;
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.security.SecurityUtil;
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
@ -34,7 +36,6 @@
|
|||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
import org.apache.hadoop.hdfs.DFSUtil;
|
||||||
import org.apache.hadoop.hdfs.HAUtil;
|
import org.apache.hadoop.hdfs.HAUtil;
|
||||||
import org.apache.hadoop.hdfs.server.common.JspHelper;
|
import org.apache.hadoop.hdfs.server.common.JspHelper;
|
||||||
@ -83,11 +84,11 @@ public void doGet(final HttpServletRequest request,
|
|||||||
(Configuration)getServletContext().getAttribute(JspHelper.CURRENT_CONF);
|
(Configuration)getServletContext().getAttribute(JspHelper.CURRENT_CONF);
|
||||||
|
|
||||||
if(UserGroupInformation.isSecurityEnabled() &&
|
if(UserGroupInformation.isSecurityEnabled() &&
|
||||||
!isValidRequestor(request.getRemoteUser(), conf)) {
|
!isValidRequestor(request.getUserPrincipal().getName(), conf)) {
|
||||||
response.sendError(HttpServletResponse.SC_FORBIDDEN,
|
response.sendError(HttpServletResponse.SC_FORBIDDEN,
|
||||||
"Only Namenode and Secondary Namenode may access this servlet");
|
"Only Namenode and Secondary Namenode may access this servlet");
|
||||||
LOG.warn("Received non-NN/SNN request for image or edits from "
|
LOG.warn("Received non-NN/SNN request for image or edits from "
|
||||||
+ request.getRemoteHost());
|
+ request.getUserPrincipal().getName() + " at " + request.getRemoteHost());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -156,15 +157,10 @@ public Void run() throws Exception {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// issue a HTTP get request to download the new fsimage
|
// issue a HTTP get request to download the new fsimage
|
||||||
MD5Hash downloadImageDigest = reloginIfNecessary().doAs(
|
MD5Hash downloadImageDigest =
|
||||||
new PrivilegedExceptionAction<MD5Hash>() {
|
TransferFsImage.downloadImageToStorage(
|
||||||
@Override
|
|
||||||
public MD5Hash run() throws Exception {
|
|
||||||
return TransferFsImage.downloadImageToStorage(
|
|
||||||
parsedParams.getInfoServer(), txid,
|
parsedParams.getInfoServer(), txid,
|
||||||
nnImage.getStorage(), true);
|
nnImage.getStorage(), true);
|
||||||
}
|
|
||||||
});
|
|
||||||
nnImage.saveDigestAndRenameCheckpointImage(txid, downloadImageDigest);
|
nnImage.saveDigestAndRenameCheckpointImage(txid, downloadImageDigest);
|
||||||
|
|
||||||
// Now that we have a new checkpoint, we might be able to
|
// Now that we have a new checkpoint, we might be able to
|
||||||
@ -176,18 +172,6 @@ public MD5Hash run() throws Exception {
|
|||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// We may have lost our ticket since the last time we tried to open
|
|
||||||
// an http connection, so log in just in case.
|
|
||||||
private UserGroupInformation reloginIfNecessary() throws IOException {
|
|
||||||
// This method is only called on the NN, therefore it is safe to
|
|
||||||
// use these key values.
|
|
||||||
return UserGroupInformation.loginUserFromKeytabAndReturnUGI(
|
|
||||||
SecurityUtil.getServerPrincipal(conf
|
|
||||||
.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
|
|
||||||
NameNode.getAddress(conf).getHostName()),
|
|
||||||
conf.get(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY));
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
@ -232,18 +216,10 @@ static boolean isValidRequestor(String remoteUser, Configuration conf)
|
|||||||
|
|
||||||
Set<String> validRequestors = new HashSet<String>();
|
Set<String> validRequestors = new HashSet<String>();
|
||||||
|
|
||||||
validRequestors.add(
|
|
||||||
SecurityUtil.getServerPrincipal(conf
|
|
||||||
.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), NameNode
|
|
||||||
.getAddress(conf).getHostName()));
|
|
||||||
validRequestors.add(
|
validRequestors.add(
|
||||||
SecurityUtil.getServerPrincipal(conf
|
SecurityUtil.getServerPrincipal(conf
|
||||||
.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY), NameNode
|
.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY), NameNode
|
||||||
.getAddress(conf).getHostName()));
|
.getAddress(conf).getHostName()));
|
||||||
validRequestors.add(
|
|
||||||
SecurityUtil.getServerPrincipal(conf
|
|
||||||
.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
|
|
||||||
SecondaryNameNode.getHttpAddress(conf).getHostName()));
|
|
||||||
validRequestors.add(
|
validRequestors.add(
|
||||||
SecurityUtil.getServerPrincipal(conf
|
SecurityUtil.getServerPrincipal(conf
|
||||||
.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY),
|
.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY),
|
||||||
@ -251,10 +227,6 @@ static boolean isValidRequestor(String remoteUser, Configuration conf)
|
|||||||
|
|
||||||
if (HAUtil.isHAEnabled(conf, DFSUtil.getNamenodeNameServiceId(conf))) {
|
if (HAUtil.isHAEnabled(conf, DFSUtil.getNamenodeNameServiceId(conf))) {
|
||||||
Configuration otherNnConf = HAUtil.getConfForOtherNode(conf);
|
Configuration otherNnConf = HAUtil.getConfForOtherNode(conf);
|
||||||
validRequestors.add(
|
|
||||||
SecurityUtil.getServerPrincipal(otherNnConf
|
|
||||||
.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
|
|
||||||
NameNode.getAddress(otherNnConf).getHostName()));
|
|
||||||
validRequestors.add(
|
validRequestors.add(
|
||||||
SecurityUtil.getServerPrincipal(otherNnConf
|
SecurityUtil.getServerPrincipal(otherNnConf
|
||||||
.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY),
|
.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY),
|
||||||
@ -263,11 +235,11 @@ static boolean isValidRequestor(String remoteUser, Configuration conf)
|
|||||||
|
|
||||||
for(String v : validRequestors) {
|
for(String v : validRequestors) {
|
||||||
if(v != null && v.equals(remoteUser)) {
|
if(v != null && v.equals(remoteUser)) {
|
||||||
if(LOG.isDebugEnabled()) LOG.debug("isValidRequestor is allowing: " + remoteUser);
|
if(LOG.isInfoEnabled()) LOG.info("GetImageServlet allowing: " + remoteUser);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if(LOG.isDebugEnabled()) LOG.debug("isValidRequestor is rejecting: " + remoteUser);
|
if(LOG.isInfoEnabled()) LOG.info("GetImageServlet rejecting: " + remoteUser);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,10 +164,8 @@ public static enum OperationCategory {
|
|||||||
DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY,
|
DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY,
|
||||||
DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
|
DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
|
||||||
DFS_NAMENODE_HTTP_ADDRESS_KEY,
|
DFS_NAMENODE_HTTP_ADDRESS_KEY,
|
||||||
DFS_NAMENODE_HTTPS_ADDRESS_KEY,
|
|
||||||
DFS_NAMENODE_KEYTAB_FILE_KEY,
|
DFS_NAMENODE_KEYTAB_FILE_KEY,
|
||||||
DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
|
DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
|
||||||
DFS_NAMENODE_SECONDARY_HTTPS_PORT_KEY,
|
|
||||||
DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY,
|
DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY,
|
||||||
DFS_NAMENODE_BACKUP_ADDRESS_KEY,
|
DFS_NAMENODE_BACKUP_ADDRESS_KEY,
|
||||||
DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY,
|
DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY,
|
||||||
@ -361,8 +359,9 @@ public static InetSocketAddress getHttpAddress(Configuration conf) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
protected void setHttpServerAddress(Configuration conf) {
|
protected void setHttpServerAddress(Configuration conf) {
|
||||||
conf.set(DFS_NAMENODE_HTTP_ADDRESS_KEY,
|
String hostPort = NetUtils.getHostPortString(getHttpAddress());
|
||||||
NetUtils.getHostPortString(getHttpAddress()));
|
conf.set(DFS_NAMENODE_HTTP_ADDRESS_KEY, hostPort);
|
||||||
|
LOG.info("Web-server up at: " + hostPort);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void loadNamesystem(Configuration conf) throws IOException {
|
protected void loadNamesystem(Configuration conf) throws IOException {
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.namenode;
|
package org.apache.hadoop.hdfs.server.namenode;
|
||||||
|
|
||||||
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ADMIN;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT;
|
||||||
@ -43,6 +44,7 @@
|
|||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
import org.apache.hadoop.security.SecurityUtil;
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||||
import org.apache.hadoop.security.authorize.AccessControlList;
|
import org.apache.hadoop.security.authorize.AccessControlList;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -78,127 +80,101 @@ private String getDefaultServerPrincipal() throws IOException {
|
|||||||
conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY),
|
conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY),
|
||||||
nn.getNameNodeAddress().getHostName());
|
nn.getNameNodeAddress().getHostName());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void start() throws IOException {
|
public void start() throws IOException {
|
||||||
final String infoHost = bindAddress.getHostName();
|
final String infoHost = bindAddress.getHostName();
|
||||||
|
int infoPort = bindAddress.getPort();
|
||||||
if(UserGroupInformation.isSecurityEnabled()) {
|
|
||||||
String httpsUser = SecurityUtil.getServerPrincipal(conf
|
|
||||||
.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), infoHost);
|
|
||||||
if (httpsUser == null) {
|
|
||||||
LOG.warn(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY
|
|
||||||
+ " not defined in config. Starting http server as "
|
|
||||||
+ getDefaultServerPrincipal()
|
|
||||||
+ ": Kerberized SSL may be not function correctly.");
|
|
||||||
} else {
|
|
||||||
// Kerberized SSL servers must be run from the host principal...
|
|
||||||
LOG.info("Logging in as " + httpsUser + " to start http server.");
|
|
||||||
SecurityUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
|
|
||||||
DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY, infoHost);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
UserGroupInformation ugi = UserGroupInformation.getLoginUser();
|
httpServer = new HttpServer("hdfs", infoHost, infoPort,
|
||||||
try {
|
infoPort == 0, conf,
|
||||||
this.httpServer = ugi.doAs(new PrivilegedExceptionAction<HttpServer>() {
|
new AccessControlList(conf.get(DFS_ADMIN, " "))) {
|
||||||
@Override
|
{
|
||||||
public HttpServer run() throws IOException, InterruptedException {
|
// Add SPNEGO support to NameNode
|
||||||
int infoPort = bindAddress.getPort();
|
if (UserGroupInformation.isSecurityEnabled()) {
|
||||||
httpServer = new HttpServer("hdfs", infoHost, infoPort,
|
Map<String, String> params = new HashMap<String, String>();
|
||||||
infoPort == 0, conf,
|
String principalInConf = conf.get(
|
||||||
new AccessControlList(conf.get(DFSConfigKeys.DFS_ADMIN, " "))) {
|
DFSConfigKeys.DFS_NAMENODE_INTERNAL_SPENGO_USER_NAME_KEY);
|
||||||
{
|
if (principalInConf != null && !principalInConf.isEmpty()) {
|
||||||
if (WebHdfsFileSystem.isEnabled(conf, LOG)) {
|
params.put("kerberos.principal",
|
||||||
//add SPNEGO authentication filter for webhdfs
|
SecurityUtil.getServerPrincipal(principalInConf, infoHost));
|
||||||
final String name = "SPNEGO";
|
String httpKeytab = conf.get(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY);
|
||||||
final String classname = AuthFilter.class.getName();
|
if (httpKeytab != null && !httpKeytab.isEmpty()) {
|
||||||
final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
|
params.put("kerberos.keytab", httpKeytab);
|
||||||
Map<String, String> params = getAuthFilterParams(conf);
|
|
||||||
defineFilter(webAppContext, name, classname, params,
|
|
||||||
new String[]{pathSpec});
|
|
||||||
LOG.info("Added filter '" + name + "' (class=" + classname + ")");
|
|
||||||
|
|
||||||
// add webhdfs packages
|
|
||||||
addJerseyResourcePackage(
|
|
||||||
NamenodeWebHdfsMethods.class.getPackage().getName()
|
|
||||||
+ ";" + Param.class.getPackage().getName(), pathSpec);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String, String> getAuthFilterParams(Configuration conf)
|
params.put(AuthenticationFilter.AUTH_TYPE, "kerberos");
|
||||||
throws IOException {
|
|
||||||
Map<String, String> params = new HashMap<String, String>();
|
|
||||||
String principalInConf = conf
|
|
||||||
.get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
|
|
||||||
if (principalInConf != null && !principalInConf.isEmpty()) {
|
|
||||||
params
|
|
||||||
.put(
|
|
||||||
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
|
|
||||||
SecurityUtil.getServerPrincipal(principalInConf,
|
|
||||||
infoHost));
|
|
||||||
}
|
|
||||||
String httpKeytab = conf
|
|
||||||
.get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
|
|
||||||
if (httpKeytab != null && !httpKeytab.isEmpty()) {
|
|
||||||
params.put(
|
|
||||||
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
|
|
||||||
httpKeytab);
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
boolean certSSL = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, false);
|
defineFilter(webAppContext, SPNEGO_FILTER,
|
||||||
boolean useKrb = UserGroupInformation.isSecurityEnabled();
|
AuthenticationFilter.class.getName(), params, null);
|
||||||
if (certSSL || useKrb) {
|
|
||||||
boolean needClientAuth = conf.getBoolean(
|
|
||||||
DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
|
|
||||||
DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT);
|
|
||||||
InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf
|
|
||||||
.get(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
|
|
||||||
DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT));
|
|
||||||
Configuration sslConf = new HdfsConfiguration(false);
|
|
||||||
if (certSSL) {
|
|
||||||
sslConf.addResource(conf.get(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
|
|
||||||
DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT));
|
|
||||||
}
|
|
||||||
httpServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth,
|
|
||||||
useKrb);
|
|
||||||
// assume same ssl port for all datanodes
|
|
||||||
InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(
|
|
||||||
conf.get(DFS_DATANODE_HTTPS_ADDRESS_KEY,
|
|
||||||
infoHost + ":" + DFSConfigKeys.DFS_DATANODE_HTTPS_DEFAULT_PORT));
|
|
||||||
httpServer.setAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY,
|
|
||||||
datanodeSslPort.getPort());
|
|
||||||
}
|
}
|
||||||
httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn);
|
|
||||||
httpServer.setAttribute(NAMENODE_ADDRESS_ATTRIBUTE_KEY,
|
|
||||||
nn.getNameNodeAddress());
|
|
||||||
httpServer.setAttribute(FSIMAGE_ATTRIBUTE_KEY, nn.getFSImage());
|
|
||||||
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
|
|
||||||
setupServlets(httpServer, conf);
|
|
||||||
httpServer.start();
|
|
||||||
|
|
||||||
// The web-server port can be ephemeral... ensure we have the correct
|
|
||||||
// info
|
|
||||||
infoPort = httpServer.getPort();
|
|
||||||
httpAddress = new InetSocketAddress(infoHost, infoPort);
|
|
||||||
LOG.info(nn.getRole() + " Web-server up at: " + httpAddress);
|
|
||||||
return httpServer;
|
|
||||||
}
|
}
|
||||||
});
|
if (WebHdfsFileSystem.isEnabled(conf, LOG)) {
|
||||||
} catch (InterruptedException e) {
|
//add SPNEGO authentication filter for webhdfs
|
||||||
throw new IOException(e);
|
final String name = "SPNEGO";
|
||||||
} finally {
|
final String classname = AuthFilter.class.getName();
|
||||||
if(UserGroupInformation.isSecurityEnabled() &&
|
final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
|
||||||
conf.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY) != null) {
|
Map<String, String> params = getAuthFilterParams(conf);
|
||||||
// Go back to being the correct Namenode principal
|
defineFilter(webAppContext, name, classname, params,
|
||||||
LOG.info("Logging back in as NameNode user following http server start");
|
new String[]{pathSpec});
|
||||||
nn.loginAsNameNodeUser(conf);
|
LOG.info("Added filter '" + name + "' (class=" + classname + ")");
|
||||||
|
|
||||||
|
// add webhdfs packages
|
||||||
|
addJerseyResourcePackage(
|
||||||
|
NamenodeWebHdfsMethods.class.getPackage().getName()
|
||||||
|
+ ";" + Param.class.getPackage().getName(), pathSpec);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Map<String, String> getAuthFilterParams(Configuration conf)
|
||||||
|
throws IOException {
|
||||||
|
Map<String, String> params = new HashMap<String, String>();
|
||||||
|
String principalInConf = conf
|
||||||
|
.get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
|
||||||
|
if (principalInConf != null && !principalInConf.isEmpty()) {
|
||||||
|
params
|
||||||
|
.put(
|
||||||
|
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
|
||||||
|
SecurityUtil.getServerPrincipal(principalInConf,
|
||||||
|
bindAddress.getHostName()));
|
||||||
|
}
|
||||||
|
String httpKeytab = conf
|
||||||
|
.get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
|
||||||
|
if (httpKeytab != null && !httpKeytab.isEmpty()) {
|
||||||
|
params.put(
|
||||||
|
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
|
||||||
|
httpKeytab);
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
boolean certSSL = conf.getBoolean("dfs.https.enable", false);
|
||||||
|
if (certSSL) {
|
||||||
|
boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", false);
|
||||||
|
InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(infoHost + ":" + conf.get(
|
||||||
|
"dfs.https.port", infoHost + ":" + 0));
|
||||||
|
Configuration sslConf = new Configuration(false);
|
||||||
|
if (certSSL) {
|
||||||
|
sslConf.addResource(conf.get("dfs.https.server.keystore.resource",
|
||||||
|
"ssl-server.xml"));
|
||||||
|
}
|
||||||
|
httpServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth);
|
||||||
|
// assume same ssl port for all datanodes
|
||||||
|
InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get(
|
||||||
|
"dfs.datanode.https.address", infoHost + ":" + 50475));
|
||||||
|
httpServer.setAttribute("datanode.https.port", datanodeSslPort
|
||||||
|
.getPort());
|
||||||
}
|
}
|
||||||
|
httpServer.setAttribute("name.node", nn);
|
||||||
|
httpServer.setAttribute("name.node.address", bindAddress);
|
||||||
|
httpServer.setAttribute("name.system.image", nn.getFSImage());
|
||||||
|
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
|
||||||
|
setupServlets(httpServer, conf);
|
||||||
|
httpServer.start();
|
||||||
|
httpAddress = new InetSocketAddress(bindAddress.getAddress(), httpServer.getPort());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public void stop() throws Exception {
|
public void stop() throws Exception {
|
||||||
if (httpServer != null) {
|
if (httpServer != null) {
|
||||||
httpServer.stop();
|
httpServer.stop();
|
||||||
|
@ -25,8 +25,10 @@
|
|||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.commons.cli.CommandLineParser;
|
import org.apache.commons.cli.CommandLineParser;
|
||||||
@ -44,6 +46,7 @@
|
|||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
|
import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
import org.apache.hadoop.hdfs.DFSUtil;
|
||||||
import org.apache.hadoop.hdfs.HAUtil;
|
import org.apache.hadoop.hdfs.HAUtil;
|
||||||
import org.apache.hadoop.hdfs.NameNodeProxies;
|
import org.apache.hadoop.hdfs.NameNodeProxies;
|
||||||
@ -63,9 +66,9 @@
|
|||||||
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
|
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
|
||||||
import org.apache.hadoop.metrics2.source.JvmMetrics;
|
import org.apache.hadoop.metrics2.source.JvmMetrics;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector;
|
|
||||||
import org.apache.hadoop.security.SecurityUtil;
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||||
import org.apache.hadoop.security.authorize.AccessControlList;
|
import org.apache.hadoop.security.authorize.AccessControlList;
|
||||||
|
|
||||||
import org.apache.hadoop.util.Daemon;
|
import org.apache.hadoop.util.Daemon;
|
||||||
@ -108,7 +111,6 @@ public class SecondaryNameNode implements Runnable {
|
|||||||
private volatile boolean shouldRun;
|
private volatile boolean shouldRun;
|
||||||
private HttpServer infoServer;
|
private HttpServer infoServer;
|
||||||
private int infoPort;
|
private int infoPort;
|
||||||
private int imagePort;
|
|
||||||
private String infoBindAddress;
|
private String infoBindAddress;
|
||||||
|
|
||||||
private Collection<URI> checkpointDirs;
|
private Collection<URI> checkpointDirs;
|
||||||
@ -229,63 +231,47 @@ private void initialize(final Configuration conf,
|
|||||||
|
|
||||||
// Initialize other scheduling parameters from the configuration
|
// Initialize other scheduling parameters from the configuration
|
||||||
checkpointConf = new CheckpointConf(conf);
|
checkpointConf = new CheckpointConf(conf);
|
||||||
|
|
||||||
// initialize the webserver for uploading files.
|
|
||||||
// Kerberized SSL servers must be run from the host principal...
|
|
||||||
UserGroupInformation httpUGI =
|
|
||||||
UserGroupInformation.loginUserFromKeytabAndReturnUGI(
|
|
||||||
SecurityUtil.getServerPrincipal(conf
|
|
||||||
.get(DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
|
|
||||||
infoBindAddress),
|
|
||||||
conf.get(DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY));
|
|
||||||
try {
|
|
||||||
infoServer = httpUGI.doAs(new PrivilegedExceptionAction<HttpServer>() {
|
|
||||||
@Override
|
|
||||||
public HttpServer run() throws IOException, InterruptedException {
|
|
||||||
LOG.info("Starting web server as: " +
|
|
||||||
UserGroupInformation.getCurrentUser().getUserName());
|
|
||||||
|
|
||||||
int tmpInfoPort = infoSocAddr.getPort();
|
// initialize the webserver for uploading files.
|
||||||
infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort,
|
int tmpInfoPort = infoSocAddr.getPort();
|
||||||
tmpInfoPort == 0, conf,
|
infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort,
|
||||||
new AccessControlList(conf.get(DFS_ADMIN, " ")));
|
tmpInfoPort == 0, conf,
|
||||||
|
new AccessControlList(conf.get(DFS_ADMIN, " "))) {
|
||||||
if(UserGroupInformation.isSecurityEnabled()) {
|
{
|
||||||
SecurityUtil.initKrb5CipherSuites();
|
if (UserGroupInformation.isSecurityEnabled()) {
|
||||||
InetSocketAddress secInfoSocAddr =
|
Map<String, String> params = new HashMap<String, String>();
|
||||||
NetUtils.createSocketAddr(infoBindAddress + ":"+ conf.getInt(
|
String principalInConf = conf.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_INTERNAL_SPENGO_USER_NAME_KEY);
|
||||||
DFS_NAMENODE_SECONDARY_HTTPS_PORT_KEY,
|
if (principalInConf != null && !principalInConf.isEmpty()) {
|
||||||
DFS_NAMENODE_SECONDARY_HTTPS_PORT_DEFAULT));
|
params.put("kerberos.principal",
|
||||||
imagePort = secInfoSocAddr.getPort();
|
SecurityUtil.getServerPrincipal(principalInConf, infoSocAddr.getHostName()));
|
||||||
infoServer.addSslListener(secInfoSocAddr, conf, false, true);
|
|
||||||
}
|
}
|
||||||
|
String httpKeytab = conf.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY);
|
||||||
infoServer.setAttribute("secondary.name.node", SecondaryNameNode.this);
|
if (httpKeytab != null && !httpKeytab.isEmpty()) {
|
||||||
infoServer.setAttribute("name.system.image", checkpointImage);
|
params.put("kerberos.keytab", httpKeytab);
|
||||||
infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
|
}
|
||||||
infoServer.addInternalServlet("getimage", "/getimage",
|
params.put(AuthenticationFilter.AUTH_TYPE, "kerberos");
|
||||||
GetImageServlet.class, true);
|
|
||||||
infoServer.start();
|
defineFilter(webAppContext, SPNEGO_FILTER, AuthenticationFilter.class.getName(),
|
||||||
return infoServer;
|
params, null);
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
} catch (InterruptedException e) {
|
};
|
||||||
throw new RuntimeException(e);
|
infoServer.setAttribute("secondary.name.node", this);
|
||||||
}
|
infoServer.setAttribute("name.system.image", checkpointImage);
|
||||||
|
infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
|
||||||
|
infoServer.addInternalServlet("getimage", "/getimage",
|
||||||
|
GetImageServlet.class, true);
|
||||||
|
infoServer.start();
|
||||||
|
|
||||||
LOG.info("Web server init done");
|
LOG.info("Web server init done");
|
||||||
|
|
||||||
// The web-server port can be ephemeral... ensure we have the correct info
|
// The web-server port can be ephemeral... ensure we have the correct info
|
||||||
infoPort = infoServer.getPort();
|
infoPort = infoServer.getPort();
|
||||||
if (!UserGroupInformation.isSecurityEnabled()) {
|
|
||||||
imagePort = infoPort;
|
conf.set(DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, infoBindAddress + ":" + infoPort);
|
||||||
}
|
LOG.info("Secondary Web-server up at: " + infoBindAddress + ":" + infoPort);
|
||||||
|
|
||||||
conf.set(DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, infoBindAddress + ":" +infoPort);
|
|
||||||
LOG.info("Secondary Web-server up at: " + infoBindAddress + ":" +infoPort);
|
|
||||||
LOG.info("Secondary image servlet up at: " + infoBindAddress + ":" + imagePort);
|
|
||||||
LOG.info("Checkpoint Period :" + checkpointConf.getPeriod() + " secs " +
|
LOG.info("Checkpoint Period :" + checkpointConf.getPeriod() + " secs " +
|
||||||
"(" + checkpointConf.getPeriod()/60 + " min)");
|
"(" + checkpointConf.getPeriod() / 60 + " min)");
|
||||||
LOG.info("Log Size Trigger :" + checkpointConf.getTxnCount() + " txns");
|
LOG.info("Log Size Trigger :" + checkpointConf.getTxnCount() + " txns");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -434,7 +420,7 @@ private String getInfoServer() throws IOException {
|
|||||||
throw new IOException("This is not a DFS");
|
throw new IOException("This is not a DFS");
|
||||||
}
|
}
|
||||||
|
|
||||||
String configuredAddress = DFSUtil.getInfoServer(null, conf, true);
|
String configuredAddress = DFSUtil.getInfoServer(null, conf, false);
|
||||||
String address = DFSUtil.substituteForWildcardAddress(configuredAddress,
|
String address = DFSUtil.substituteForWildcardAddress(configuredAddress,
|
||||||
fsName.getHost());
|
fsName.getHost());
|
||||||
LOG.debug("Will connect to NameNode at HTTP address: " + address);
|
LOG.debug("Will connect to NameNode at HTTP address: " + address);
|
||||||
@ -446,7 +432,7 @@ private String getInfoServer() throws IOException {
|
|||||||
* for image transfers
|
* for image transfers
|
||||||
*/
|
*/
|
||||||
private InetSocketAddress getImageListenAddress() {
|
private InetSocketAddress getImageListenAddress() {
|
||||||
return new InetSocketAddress(infoBindAddress, imagePort);
|
return new InetSocketAddress(infoBindAddress, infoPort);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -507,7 +493,7 @@ boolean doCheckpoint() throws IOException {
|
|||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param argv The parameters passed to this program.
|
* @param opts The parameters passed to this program.
|
||||||
* @exception Exception if the filesystem does not exist.
|
* @exception Exception if the filesystem does not exist.
|
||||||
* @return 0 on success, non zero on error.
|
* @return 0 on success, non zero on error.
|
||||||
*/
|
*/
|
||||||
@ -709,7 +695,7 @@ static class CheckpointStorage extends FSImage {
|
|||||||
* Construct a checkpoint image.
|
* Construct a checkpoint image.
|
||||||
* @param conf Node configuration.
|
* @param conf Node configuration.
|
||||||
* @param imageDirs URIs of storage for image.
|
* @param imageDirs URIs of storage for image.
|
||||||
* @param editDirs URIs of storage for edit logs.
|
* @param editsDirs URIs of storage for edit logs.
|
||||||
* @throws IOException If storage cannot be access.
|
* @throws IOException If storage cannot be access.
|
||||||
*/
|
*/
|
||||||
CheckpointStorage(Configuration conf,
|
CheckpointStorage(Configuration conf,
|
||||||
|
@ -201,19 +201,17 @@ static MD5Hash getFileClient(String nnHostPort,
|
|||||||
String queryString, List<File> localPaths,
|
String queryString, List<File> localPaths,
|
||||||
NNStorage dstStorage, boolean getChecksum) throws IOException {
|
NNStorage dstStorage, boolean getChecksum) throws IOException {
|
||||||
byte[] buf = new byte[HdfsConstants.IO_FILE_BUFFER_SIZE];
|
byte[] buf = new byte[HdfsConstants.IO_FILE_BUFFER_SIZE];
|
||||||
String proto = UserGroupInformation.isSecurityEnabled() ? "https://" : "http://";
|
|
||||||
StringBuilder str = new StringBuilder(proto+nnHostPort+"/getimage?");
|
|
||||||
str.append(queryString);
|
|
||||||
|
|
||||||
|
String str = "http://" + nnHostPort + "/getimage?" + queryString;
|
||||||
|
LOG.info("Opening connection to " + str);
|
||||||
//
|
//
|
||||||
// open connection to remote server
|
// open connection to remote server
|
||||||
//
|
//
|
||||||
URL url = new URL(str.toString());
|
URL url = new URL(str);
|
||||||
|
|
||||||
// Avoid Krb bug with cross-realm hosts
|
HttpURLConnection connection = (HttpURLConnection)
|
||||||
SecurityUtil.fetchServiceTicket(url);
|
SecurityUtil.openSecureHttpConnection(url);
|
||||||
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
|
|
||||||
|
|
||||||
if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) {
|
if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) {
|
||||||
throw new HttpGetFailedException(
|
throw new HttpGetFailedException(
|
||||||
"Image transfer servlet at " + url +
|
"Image transfer servlet at " + url +
|
||||||
|
@ -95,7 +95,6 @@ public class BootstrapStandby implements Tool, Configurable {
|
|||||||
static final int ERR_CODE_LOGS_UNAVAILABLE = 6;
|
static final int ERR_CODE_LOGS_UNAVAILABLE = 6;
|
||||||
|
|
||||||
public int run(String[] args) throws Exception {
|
public int run(String[] args) throws Exception {
|
||||||
SecurityUtil.initKrb5CipherSuites();
|
|
||||||
parseArgs(args);
|
parseArgs(args);
|
||||||
parseConfAndFindOtherNN();
|
parseConfAndFindOtherNN();
|
||||||
NameNode.checkAllowFormat(conf);
|
NameNode.checkAllowFormat(conf);
|
||||||
@ -322,7 +321,7 @@ private void parseConfAndFindOtherNN() throws IOException {
|
|||||||
"Could not determine valid IPC address for other NameNode (%s)" +
|
"Could not determine valid IPC address for other NameNode (%s)" +
|
||||||
", got: %s", otherNNId, otherIpcAddr);
|
", got: %s", otherNNId, otherIpcAddr);
|
||||||
|
|
||||||
otherHttpAddr = DFSUtil.getInfoServer(null, otherNode, true);
|
otherHttpAddr = DFSUtil.getInfoServer(null, otherNode, false);
|
||||||
otherHttpAddr = DFSUtil.substituteForWildcardAddress(otherHttpAddr,
|
otherHttpAddr = DFSUtil.substituteForWildcardAddress(otherHttpAddr,
|
||||||
otherIpcAddr.getHostName());
|
otherIpcAddr.getHostName());
|
||||||
|
|
||||||
|
@ -92,7 +92,7 @@ private void setNameNodeAddresses(Configuration conf) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private String getHttpAddress(Configuration conf) {
|
private String getHttpAddress(Configuration conf) {
|
||||||
String configuredAddr = DFSUtil.getInfoServer(null, conf, true);
|
String configuredAddr = DFSUtil.getInfoServer(null, conf, false);
|
||||||
|
|
||||||
// Use the hostname from the RPC address as a default, in case
|
// Use the hostname from the RPC address as a default, in case
|
||||||
// the HTTP address is configured to 0.0.0.0.
|
// the HTTP address is configured to 0.0.0.0.
|
||||||
|
@ -504,7 +504,7 @@ public int setBalancerBandwidth(String[] argv, int idx) throws IOException {
|
|||||||
*/
|
*/
|
||||||
public int fetchImage(String[] argv, int idx) throws IOException {
|
public int fetchImage(String[] argv, int idx) throws IOException {
|
||||||
String infoServer = DFSUtil.getInfoServer(
|
String infoServer = DFSUtil.getInfoServer(
|
||||||
HAUtil.getAddressOfActive(getDFS()), getConf(), true);
|
HAUtil.getAddressOfActive(getDFS()), getConf(), false);
|
||||||
TransferFsImage.downloadMostRecentImageToDirectory(infoServer,
|
TransferFsImage.downloadMostRecentImageToDirectory(infoServer,
|
||||||
new File(argv[idx]));
|
new File(argv[idx]));
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -153,8 +153,7 @@ private Integer listCorruptFileBlocks(String dir, String baseUrl)
|
|||||||
url.append("&startblockafter=").append(String.valueOf(cookie));
|
url.append("&startblockafter=").append(String.valueOf(cookie));
|
||||||
}
|
}
|
||||||
URL path = new URL(url.toString());
|
URL path = new URL(url.toString());
|
||||||
SecurityUtil.fetchServiceTicket(path);
|
URLConnection connection = SecurityUtil.openSecureHttpConnection(path);
|
||||||
URLConnection connection = path.openConnection();
|
|
||||||
InputStream stream = connection.getInputStream();
|
InputStream stream = connection.getInputStream();
|
||||||
BufferedReader input = new BufferedReader(new InputStreamReader(
|
BufferedReader input = new BufferedReader(new InputStreamReader(
|
||||||
stream, "UTF-8"));
|
stream, "UTF-8"));
|
||||||
@ -222,16 +221,11 @@ private String getCurrentNamenodeAddress() throws IOException {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return DFSUtil.getInfoServer(HAUtil.getAddressOfActive(fs), conf, true);
|
return DFSUtil.getInfoServer(HAUtil.getAddressOfActive(fs), conf, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
private int doWork(final String[] args) throws IOException {
|
private int doWork(final String[] args) throws IOException {
|
||||||
String proto = "http://";
|
final StringBuilder url = new StringBuilder("http://");
|
||||||
if (UserGroupInformation.isSecurityEnabled()) {
|
|
||||||
SecurityUtil.initKrb5CipherSuites();
|
|
||||||
proto = "https://";
|
|
||||||
}
|
|
||||||
final StringBuilder url = new StringBuilder(proto);
|
|
||||||
|
|
||||||
String namenodeAddress = getCurrentNamenodeAddress();
|
String namenodeAddress = getCurrentNamenodeAddress();
|
||||||
if (namenodeAddress == null) {
|
if (namenodeAddress == null) {
|
||||||
@ -279,8 +273,7 @@ else if (args[idx].equals("-list-corruptfileblocks")) {
|
|||||||
return listCorruptFileBlocks(dir, url.toString());
|
return listCorruptFileBlocks(dir, url.toString());
|
||||||
}
|
}
|
||||||
URL path = new URL(url.toString());
|
URL path = new URL(url.toString());
|
||||||
SecurityUtil.fetchServiceTicket(path);
|
URLConnection connection = SecurityUtil.openSecureHttpConnection(path);
|
||||||
URLConnection connection = path.openConnection();
|
|
||||||
InputStream stream = connection.getInputStream();
|
InputStream stream = connection.getInputStream();
|
||||||
BufferedReader input = new BufferedReader(new InputStreamReader(
|
BufferedReader input = new BufferedReader(new InputStreamReader(
|
||||||
stream, "UTF-8"));
|
stream, "UTF-8"));
|
||||||
|
@ -72,11 +72,6 @@ public class DelegationTokenFetcher {
|
|||||||
private static final String RENEW = "renew";
|
private static final String RENEW = "renew";
|
||||||
private static final String PRINT = "print";
|
private static final String PRINT = "print";
|
||||||
|
|
||||||
static {
|
|
||||||
// Enable Kerberos sockets
|
|
||||||
System.setProperty("https.cipherSuites", "TLS_KRB5_WITH_3DES_EDE_CBC_SHA");
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void printUsage(PrintStream err) throws IOException {
|
private static void printUsage(PrintStream err) throws IOException {
|
||||||
err.println("fetchdt retrieves delegation tokens from the NameNode");
|
err.println("fetchdt retrieves delegation tokens from the NameNode");
|
||||||
err.println();
|
err.println();
|
||||||
@ -106,7 +101,7 @@ public static void main(final String[] args) throws Exception {
|
|||||||
final Configuration conf = new HdfsConfiguration();
|
final Configuration conf = new HdfsConfiguration();
|
||||||
Options fetcherOptions = new Options();
|
Options fetcherOptions = new Options();
|
||||||
fetcherOptions.addOption(WEBSERVICE, true,
|
fetcherOptions.addOption(WEBSERVICE, true,
|
||||||
"HTTPS url to reach the NameNode at");
|
"HTTP url to reach the NameNode at");
|
||||||
fetcherOptions.addOption(RENEWER, true,
|
fetcherOptions.addOption(RENEWER, true,
|
||||||
"Name of the delegation token renewer");
|
"Name of the delegation token renewer");
|
||||||
fetcherOptions.addOption(CANCEL, false, "cancel the token");
|
fetcherOptions.addOption(CANCEL, false, "cancel the token");
|
||||||
@ -224,8 +219,7 @@ static public Credentials getDTfromRemote(String nnAddr,
|
|||||||
}
|
}
|
||||||
|
|
||||||
URL remoteURL = new URL(url.toString());
|
URL remoteURL = new URL(url.toString());
|
||||||
SecurityUtil.fetchServiceTicket(remoteURL);
|
URLConnection connection = SecurityUtil.openSecureHttpConnection(remoteURL);
|
||||||
URLConnection connection = URLUtils.openConnection(remoteURL);
|
|
||||||
InputStream in = connection.getInputStream();
|
InputStream in = connection.getInputStream();
|
||||||
Credentials ts = new Credentials();
|
Credentials ts = new Credentials();
|
||||||
dis = new DataInputStream(in);
|
dis = new DataInputStream(in);
|
||||||
@ -264,7 +258,7 @@ static public long renewDelegationToken(String nnAddr,
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
URL url = new URL(buf.toString());
|
URL url = new URL(buf.toString());
|
||||||
SecurityUtil.fetchServiceTicket(url);
|
connection = (HttpURLConnection) SecurityUtil.openSecureHttpConnection(url);
|
||||||
connection = (HttpURLConnection)URLUtils.openConnection(url);
|
connection = (HttpURLConnection)URLUtils.openConnection(url);
|
||||||
if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) {
|
if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) {
|
||||||
throw new IOException("Error renewing token: " +
|
throw new IOException("Error renewing token: " +
|
||||||
@ -358,8 +352,7 @@ static public void cancelDelegationToken(String nnAddr,
|
|||||||
HttpURLConnection connection=null;
|
HttpURLConnection connection=null;
|
||||||
try {
|
try {
|
||||||
URL url = new URL(buf.toString());
|
URL url = new URL(buf.toString());
|
||||||
SecurityUtil.fetchServiceTicket(url);
|
connection = (HttpURLConnection) SecurityUtil.openSecureHttpConnection(url);
|
||||||
connection = (HttpURLConnection)URLUtils.openConnection(url);
|
|
||||||
if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) {
|
if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) {
|
||||||
throw new IOException("Error cancelling token: " +
|
throw new IOException("Error cancelling token: " +
|
||||||
connection.getResponseMessage());
|
connection.getResponseMessage());
|
||||||
|
@ -858,4 +858,15 @@
|
|||||||
</description>
|
</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>dfs.namenode.kerberos.internal.spnego.principal</name>
|
||||||
|
<value>${dfs.web.authentication.kerberos.principal}</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>dfs.secondary.namenode.kerberos.internal.spnego.principal</name>
|
||||||
|
<value>${dfs.web.authentication.kerberos.principal}</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
|
||||||
</configuration>
|
</configuration>
|
||||||
|
Loading…
Reference in New Issue
Block a user