HADOOP-16314. Make sure all web end points are covered by the same authentication filter.

Contributed by Prabhu Joseph
This commit is contained in:
Eric Yang 2019-06-05 18:52:39 -04:00
parent 3b1c2577d7
commit 294695dd57
18 changed files with 412 additions and 54 deletions

View File

@ -27,6 +27,7 @@
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
@ -66,6 +67,8 @@
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.server.ProxyUserAuthenticationFilterInitializer;
import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.ssl.SSLFactory;
@ -90,7 +93,6 @@
import org.eclipse.jetty.server.handler.RequestLogHandler;
import org.eclipse.jetty.server.session.AbstractSessionManager;
import org.eclipse.jetty.server.session.SessionHandler;
import org.eclipse.jetty.servlet.DefaultServlet;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.FilterMapping;
import org.eclipse.jetty.servlet.ServletContextHandler;
@ -155,7 +157,7 @@ public final class HttpServer2 implements FilterContainer {
// gets stored.
public static final String CONF_CONTEXT_ATTRIBUTE = "hadoop.conf";
public static final String ADMINS_ACL = "admins.acl";
public static final String SPNEGO_FILTER = "SpnegoFilter";
public static final String SPNEGO_FILTER = "authentication";
public static final String NO_CACHE_FILTER = "NoCacheFilter";
public static final String BIND_ADDRESS = "bind.address";
@ -433,7 +435,9 @@ public HttpServer2 build() throws IOException {
HttpServer2 server = new HttpServer2(this);
if (this.securityEnabled) {
if (this.securityEnabled &&
!this.conf.get(authFilterConfigurationPrefix + "type").
equals(PseudoAuthenticationHandler.TYPE)) {
server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey);
}
@ -608,13 +612,6 @@ private void initializeWebServer(String name, String hostName,
}
addDefaultServlets();
if (pathSpecs != null) {
for (String path : pathSpecs) {
LOG.info("adding path spec: " + path);
addFilterPathMapping(path, webAppContext);
}
}
}
private void addListener(ServerConnector connector) {
@ -625,7 +622,7 @@ private static WebAppContext createWebAppContext(Builder b,
AccessControlList adminsAcl, final String appDir) {
WebAppContext ctx = new WebAppContext();
ctx.setDefaultsDescriptor(null);
ServletHolder holder = new ServletHolder(new DefaultServlet());
ServletHolder holder = new ServletHolder(new WebServlet());
Map<String, String> params = ImmutableMap. <String, String> builder()
.put("acceptRanges", "true")
.put("dirAllowed", "false")
@ -684,10 +681,16 @@ private static FilterInitializer[] getFilterInitializers(Configuration conf) {
return null;
}
FilterInitializer[] initializers = new FilterInitializer[classes.length];
for(int i = 0; i < classes.length; i++) {
List<Class<?>> classList = new ArrayList<>(Arrays.asList(classes));
if (classList.contains(AuthenticationFilterInitializer.class) &&
classList.contains(ProxyUserAuthenticationFilterInitializer.class)) {
classList.remove(AuthenticationFilterInitializer.class);
}
FilterInitializer[] initializers = new FilterInitializer[classList.size()];
for(int i = 0; i < classList.size(); i++) {
initializers[i] = (FilterInitializer)ReflectionUtils.newInstance(
classes[i], conf);
classList.get(i), conf);
}
return initializers;
}
@ -735,7 +738,7 @@ protected void addDefaultApps(ContextHandlerCollection parent,
ServletContextHandler staticContext =
new ServletContextHandler(parent, "/static");
staticContext.setResourceBase(appDir + "/static");
staticContext.addServlet(DefaultServlet.class, "/*");
staticContext.addServlet(WebServlet.class, "/*");
staticContext.setDisplayName("static");
@SuppressWarnings("unchecked")
Map<String, String> params = staticContext.getInitParams();
@ -812,7 +815,6 @@ public void addJerseyResourcePackage(final String packageName,
public void addServlet(String name, String pathSpec,
Class<? extends HttpServlet> clazz) {
addInternalServlet(name, pathSpec, clazz, false);
addFilterPathMapping(pathSpec, webAppContext);
}
/**
@ -869,16 +871,6 @@ public void addInternalServlet(String name, String pathSpec,
}
}
webAppContext.addServlet(holder, pathSpec);
if(requireAuth && UserGroupInformation.isSecurityEnabled()) {
LOG.info("Adding Kerberos (SPNEGO) filter to " + name);
ServletHandler handler = webAppContext.getServletHandler();
FilterMapping fmap = new FilterMapping();
fmap.setPathSpec(pathSpec);
fmap.setFilterName(SPNEGO_FILTER);
fmap.setDispatches(FilterMapping.ALL);
handler.addFilterMapping(fmap);
}
}
/**
@ -945,8 +937,8 @@ public void addFilter(String name, String classname,
Map<String, String> parameters) {
FilterHolder filterHolder = getFilterHolder(name, classname, parameters);
final String[] USER_FACING_URLS = { "*.html", "*.jsp" };
FilterMapping fmap = getFilterMapping(name, USER_FACING_URLS);
final String[] userFacingUrls = {"/", "/*" };
FilterMapping fmap = getFilterMapping(name, userFacingUrls);
defineFilter(webAppContext, filterHolder, fmap);
LOG.info(
"Added filter " + name + " (class=" + classname + ") to context "

View File

@ -0,0 +1,59 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.http;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jetty.servlet.DefaultServlet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Hadoop DefaultServlet for serving static web content.
*/
public class WebServlet extends DefaultServlet {
private static final long serialVersionUID = 3910031415927L;
public static final Logger LOG = LoggerFactory.getLogger(WebServlet.class);
/**
* Get method is modified to support impersonation and Kerberos
* SPNEGO token by forcing client side redirect when accessing
* "/" (root) of the web application context.
*/
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
if (request.getRequestURI().equals("/")) {
StringBuilder location = new StringBuilder();
location.append("index.html");
if (request.getQueryString()!=null) {
// echo query string but prevent HTTP response splitting
location.append("?");
location.append(request.getQueryString()
.replaceAll("\n", "").replaceAll("\r", ""));
}
response.sendRedirect(location.toString());
} else {
super.doGet(request, response);
}
}
}

View File

@ -71,4 +71,6 @@ Trusted Proxy
Trusted Proxy adds support to perform operations using end user instead of proxy user. It fetches the end user from
doAs query parameter. To enable Trusted Proxy, please set the following configuration parameter:
Add org.apache.hadoop.security.authentication.server.ProxyUserAuthenticationFilterInitializer to hadoop.http.filter.initializers at the end in core-site.xml.
Add org.apache.hadoop.security.authentication.server.ProxyUserAuthenticationFilterInitializer to hadoop.http.filter.initializers in core-site.xml
instead of org.apache.hadoop.security.AuthenticationFilterInitializer.

View File

@ -142,6 +142,8 @@ public void testServletFilter() throws Exception {
for(int i = 0; i < urls.length; i++) {
assertTrue(RECORDS.remove(urls[i]));
}
assertTrue(RECORDS.isEmpty());
assertTrue(RECORDS.size()==1);
// Accesing "/" will redirect to /index.html
assertTrue(RECORDS.contains("/index.html"));
}
}

View File

@ -0,0 +1,238 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.http;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.KerberosTestUtils;
import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.server.AuthenticationToken;
import org.apache.hadoop.security.authentication.server.ProxyUserAuthenticationFilterInitializer;
import org.apache.hadoop.security.authentication.util.Signer;
import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
import org.apache.hadoop.security.authentication.util.StringSignerSecretProviderCreator;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.Assert;
import java.io.File;
import java.io.FileWriter;
import java.io.Writer;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
import java.util.Properties;
import static org.junit.Assert.assertTrue;
/**
* This class is tested for http server with SPNEGO authentication.
*/
public class TestHttpServerWithSpnego {
static final Log LOG = LogFactory.getLog(TestHttpServerWithSpnego.class);
private static final String SECRET_STR = "secret";
private static final String HTTP_USER = "HTTP";
private static final String PREFIX = "hadoop.http.authentication.";
private static final long TIMEOUT = 20000;
private static File httpSpnegoKeytabFile = new File(
KerberosTestUtils.getKeytabFile());
private static String httpSpnegoPrincipal =
KerberosTestUtils.getServerPrincipal();
private static String realm = KerberosTestUtils.getRealm();
private static File testRootDir = new File("target",
TestHttpServerWithSpnego.class.getName() + "-root");
private static MiniKdc testMiniKDC;
private static File secretFile = new File(testRootDir, SECRET_STR);
@BeforeClass
public static void setUp() throws Exception {
try {
testMiniKDC = new MiniKdc(MiniKdc.createConf(), testRootDir);
testMiniKDC.start();
testMiniKDC.createPrincipal(
httpSpnegoKeytabFile, HTTP_USER + "/localhost");
} catch (Exception e) {
assertTrue("Couldn't setup MiniKDC", false);
}
Writer w = new FileWriter(secretFile);
w.write("secret");
w.close();
}
@AfterClass
public static void tearDown() {
if (testMiniKDC != null) {
testMiniKDC.stop();
}
}
/**
* groupA
* - userA
* groupB
* - userA, userB
* groupC
* - userC
* SPNEGO filter has been enabled.
* userA has the privilege to impersonate users in groupB.
* userA has admin access to all default servlets, but userB
* and userC don't have. So "/logs" can only be accessed by userA.
* @throws Exception
*/
@Test
public void testAuthenticationWithProxyUser() throws Exception {
Configuration spengoConf = getSpengoConf(new Configuration());
//setup logs dir
System.setProperty("hadoop.log.dir", testRootDir.getAbsolutePath());
// Setup user group
UserGroupInformation.createUserForTesting("userA",
new String[]{"groupA", "groupB"});
UserGroupInformation.createUserForTesting("userB",
new String[]{"groupB"});
UserGroupInformation.createUserForTesting("userC",
new String[]{"groupC"});
// Make userA impersonate users in groupB
spengoConf.set("hadoop.proxyuser.userA.hosts", "*");
spengoConf.set("hadoop.proxyuser.userA.groups", "groupB");
ProxyUsers.refreshSuperUserGroupsConfiguration(spengoConf);
HttpServer2 httpServer = null;
try {
// Create http server to test.
httpServer = getCommonBuilder()
.setConf(spengoConf)
.setACL(new AccessControlList("userA groupA"))
.build();
httpServer.start();
// Get signer to encrypt token
Signer signer = getSignerToEncrypt();
// setup auth token for userA
AuthenticatedURL.Token token = getEncryptedAuthToken(signer, "userA");
String serverURL = "http://" +
NetUtils.getHostPortString(httpServer.getConnectorAddress(0)) + "/";
// The default authenticator is kerberos.
AuthenticatedURL authUrl = new AuthenticatedURL();
// userA impersonates userB, it's allowed.
for (String servlet :
new String[]{"stacks", "jmx", "conf"}) {
HttpURLConnection conn = authUrl
.openConnection(new URL(serverURL + servlet + "?doAs=userB"),
token);
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
}
// userA cannot impersonate userC, it fails.
for (String servlet :
new String[]{"stacks", "jmx", "conf"}){
HttpURLConnection conn = authUrl
.openConnection(new URL(serverURL + servlet + "?doAs=userC"),
token);
Assert.assertEquals(HttpURLConnection.HTTP_FORBIDDEN,
conn.getResponseCode());
}
// "/logs" and "/logLevel" require admin authorization,
// only userA has the access.
for (String servlet :
new String[]{"logLevel", "logs"}) {
HttpURLConnection conn = authUrl
.openConnection(new URL(serverURL + servlet), token);
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
}
// Setup token for userB
token = getEncryptedAuthToken(signer, "userB");
// userB cannot access these servlets.
for (String servlet :
new String[]{"logLevel", "logs"}) {
HttpURLConnection conn = authUrl
.openConnection(new URL(serverURL + servlet), token);
Assert.assertEquals(HttpURLConnection.HTTP_FORBIDDEN,
conn.getResponseCode());
}
} finally {
if (httpServer != null) {
httpServer.stop();
}
}
}
private AuthenticatedURL.Token getEncryptedAuthToken(Signer signer,
String user) throws Exception {
AuthenticationToken token =
new AuthenticationToken(user, user, "kerberos");
token.setExpires(System.currentTimeMillis() + TIMEOUT);
return new AuthenticatedURL.Token(signer.sign(token.toString()));
}
private Signer getSignerToEncrypt() throws Exception {
SignerSecretProvider secretProvider =
StringSignerSecretProviderCreator.newStringSignerSecretProvider();
Properties secretProviderProps = new Properties();
secretProviderProps.setProperty(
AuthenticationFilter.SIGNATURE_SECRET, SECRET_STR);
secretProvider.init(secretProviderProps, null, TIMEOUT);
return new Signer(secretProvider);
}
private Configuration getSpengoConf(Configuration conf) {
conf = new Configuration();
conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
ProxyUserAuthenticationFilterInitializer.class.getName());
conf.set(PREFIX + "type", "kerberos");
conf.setBoolean(PREFIX + "simple.anonymous.allowed", false);
conf.set(PREFIX + "signature.secret.file",
secretFile.getAbsolutePath());
conf.set(PREFIX + "kerberos.keytab",
httpSpnegoKeytabFile.getAbsolutePath());
conf.set(PREFIX + "kerberos.principal", httpSpnegoPrincipal);
conf.set(PREFIX + "cookie.domain", realm);
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
true);
return conf;
}
private HttpServer2.Builder getCommonBuilder() throws Exception {
return new HttpServer2.Builder().setName("test")
.addEndpoint(new URI("http://localhost:0"))
.setFindPort(true);
}
}

View File

@ -35,7 +35,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.net.NetUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -102,7 +101,6 @@ static void access(String urlstring) throws IOException {
}
}
@Test
public void testPathSpecFilters() throws Exception {
Configuration conf = new Configuration();

View File

@ -100,7 +100,6 @@ static void access(String urlstring) throws IOException {
}
}
@Test
public void testServletFilter() throws Exception {
Configuration conf = new Configuration();

View File

@ -34,6 +34,7 @@
import org.apache.hadoop.log.LogLevel.CLI;
import org.apache.hadoop.minikdc.KerberosSecurityTestcase;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.KerberosTestUtils;
import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
@ -73,6 +74,7 @@ public class TestLogLevel extends KerberosSecurityTestcase {
private final Logger log = ((Log4JLogger)testlog).getLogger();
private final static String PRINCIPAL = "loglevel.principal";
private final static String KEYTAB = "loglevel.keytab";
private static final String PREFIX = "hadoop.http.authentication.";
@BeforeClass
public static void setUp() throws Exception {
@ -262,6 +264,13 @@ private void testDynamicLogLevel(final String bindProtocol,
conf.set(KEYTAB, KerberosTestUtils.getKeytabFile());
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"kerberos");
conf.set(PREFIX + "type", "kerberos");
conf.set(PREFIX + "kerberos.keytab", KerberosTestUtils.getKeytabFile());
conf.set(PREFIX + "kerberos.principal",
KerberosTestUtils.getServerPrincipal());
conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
AuthenticationFilterInitializer.class.getName());
conf.setBoolean(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION,
true);
UserGroupInformation.setConfiguration(conf);

View File

@ -91,19 +91,7 @@ public static void initWebHdfs(Configuration conf, String hostname,
HdfsClientConfigKeys.DFS_WEBHDFS_ACL_PERMISSION_PATTERN_KEY,
HdfsClientConfigKeys.DFS_WEBHDFS_ACL_PERMISSION_PATTERN_DEFAULT));
// add authentication filter for webhdfs
final String className = conf.get(
DFSConfigKeys.DFS_WEBHDFS_AUTHENTICATION_FILTER_KEY,
DFSConfigKeys.DFS_WEBHDFS_AUTHENTICATION_FILTER_DEFAULT);
final String name = className;
final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
Map<String, String> params = getAuthFilterParams(conf, hostname,
httpKeytab);
HttpServer2.defineFilter(httpServer2.getWebAppContext(), name, className,
params, new String[] { pathSpec });
HttpServer2.LOG.info("Added filter '" + name + "' (class=" + className
+ ")");
// add REST CSRF prevention filter
if (conf.getBoolean(DFS_WEBHDFS_REST_CSRF_ENABLED_KEY,

View File

@ -26,8 +26,10 @@
import org.apache.hadoop.hdfs.qjournal.MiniQJMHACluster;
import org.apache.hadoop.hdfs.qjournal.TestSecureNNWithQJM;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.ipc.Client;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
@ -74,6 +76,7 @@ public class TestDFSInotifyEventInputStreamKerberized {
private static final Logger LOG =
LoggerFactory.getLogger(TestDFSInotifyEventInputStreamKerberized.class);
private static final String PREFIX = "hadoop.http.authentication.";
private File baseDir;
private String keystoresDir;
@ -183,6 +186,12 @@ public void initKerberizedCluster() throws Exception {
userName + "/" + krbInstance + "@" + kdc.getRealm();
final String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();
baseConf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
AuthenticationFilterInitializer.class.getName());
baseConf.set(PREFIX + "type", "kerberos");
baseConf.set(PREFIX + "kerberos.keytab", nnKeytabFile.getAbsolutePath());
baseConf.set(PREFIX + "kerberos.principal", "HTTP/" + krbInstance);
baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);

View File

@ -49,8 +49,10 @@
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
@ -68,6 +70,7 @@ public class TestSecureNNWithQJM {
private static final Path TEST_PATH = new Path("/test-dir");
private static final Path TEST_PATH_2 = new Path("/test-dir-2");
private static final String PREFIX = "hadoop.http.authentication.";
private static HdfsConfiguration baseConf;
private static File baseDir;
@ -112,6 +115,11 @@ public static void init() throws Exception {
String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();
baseConf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
AuthenticationFilterInitializer.class.getName());
baseConf.set(PREFIX + "type", "kerberos");
baseConf.set(PREFIX + "kerberos.keytab", keytab);
baseConf.set(PREFIX + "kerberos.principal", spnegoPrincipal);
baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);

View File

@ -63,12 +63,14 @@
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.web.resources.*;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.test.GenericTestUtils;
@ -80,6 +82,7 @@
import org.junit.Test;
public class TestWebHdfsTokens {
private static final String PREFIX = "hadoop.http.authentication.";
private static Configuration conf;
URI uri = null;
@ -142,6 +145,11 @@ private static void initSecureConf(Configuration secureConf)
kdc.createPrincipal(keytabFile, username, username + "/" + krbInstance,
"HTTP/" + krbInstance);
secureConf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
AuthenticationFilterInitializer.class.getName());
secureConf.set(PREFIX + "type", "kerberos");
secureConf.set(PREFIX + "kerberos.keytab", keytab);
secureConf.set(PREFIX + "kerberos.principal", spnegoPrincipal);
secureConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, principal);
secureConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
secureConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, principal);

View File

@ -34,6 +34,9 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.http.FilterContainer;
import org.apache.hadoop.http.FilterInitializer;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.net.NetUtils;
import org.junit.AfterClass;
import org.junit.Assert;
@ -63,6 +66,17 @@ public void doFilter(ServletRequest request, ServletResponse response,
public void destroy() {
}
/** Initializer for Custom Filter. */
static public class Initializer extends FilterInitializer {
public Initializer() {}
@Override
public void initFilter(FilterContainer container, Configuration config) {
container.addFilter("customFilter",
TestWebHdfsWithAuthenticationFilter.CustomizedFilter.class.
getName(), null);
}
}
}
private static Configuration conf;
@ -72,8 +86,8 @@ public void destroy() {
@BeforeClass
public static void setUp() throws IOException {
conf = new Configuration();
conf.set(DFSConfigKeys.DFS_WEBHDFS_AUTHENTICATION_FILTER_KEY,
CustomizedFilter.class.getName());
conf.set(HttpServer2.FILTER_INITIALIZER_PROPERTY,
CustomizedFilter.Initializer.class.getName());
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "localhost:0");
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
InetSocketAddress addr = cluster.getNameNode().getHttpAddress();

View File

@ -95,6 +95,15 @@ public void service(HttpServletRequest req, HttpServletResponse res)
if (uri.equals("/")) {
String redirectPath = webApp.getRedirectPath();
if (redirectPath != null && !redirectPath.isEmpty()) {
if (req.getQueryString()!=null) {
StringBuilder query = new StringBuilder();
query.append(redirectPath);
query.append("?");
// Prevent HTTP response splitting vulnerability
query.append(req.getQueryString().replaceAll("\r", "")
.replaceAll("\n", ""));
redirectPath = query.toString();
}
res.sendRedirect(redirectPath);
return;
}

View File

@ -21,6 +21,7 @@
import java.util.LinkedHashSet;
import java.util.Set;
import org.apache.hadoop.security.authentication.server.ProxyUserAuthenticationFilterInitializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@ -50,12 +51,17 @@ private TimelineServerUtils() {
*/
public static void setTimelineFilters(Configuration conf,
String configuredInitializers, Set<String> defaultInitializers) {
Set<String> ignoreInitializers = new LinkedHashSet<>();
ignoreInitializers.add(AuthenticationFilterInitializer.class.getName());
ignoreInitializers.add(
ProxyUserAuthenticationFilterInitializer.class.getName());
String[] parts = configuredInitializers.split(",");
Set<String> target = new LinkedHashSet<String>();
for (String filterInitializer : parts) {
filterInitializer = filterInitializer.trim();
if (filterInitializer.equals(
AuthenticationFilterInitializer.class.getName()) ||
if (ignoreInitializers.contains(filterInitializer) ||
filterInitializer.isEmpty()) {
continue;
}

View File

@ -28,6 +28,7 @@
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.security.authentication.server.ProxyUserAuthenticationFilterInitializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
@ -145,6 +146,9 @@ public static void setupSecurityAndFilters(Configuration conf,
}
target.add(filterInitializer.getName());
}
target.remove(ProxyUserAuthenticationFilterInitializer.class.getName());
actualInitializers = StringUtils.join(",", target);
LOG.info("Using RM authentication filter(kerberos/delegation-token)"

View File

@ -31,6 +31,7 @@
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.HttpCrossOriginFilterInitializer;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.authentication.server.ProxyUserAuthenticationFilterInitializer;
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.util.ReflectionUtils;
@ -159,9 +160,15 @@ protected void addFilters(Configuration conf) {
String initializers = conf.get("hadoop.http.filter.initializers", "");
Set<String> defaultInitializers = new LinkedHashSet<String>();
if (!initializers.contains(
TimelineReaderAuthenticationFilterInitializer.class.getName())) {
defaultInitializers.add(
TimelineReaderAuthenticationFilterInitializer.class.getName());
ProxyUserAuthenticationFilterInitializer.class.getName())) {
if (!initializers.contains(
TimelineReaderAuthenticationFilterInitializer.class.getName())) {
defaultInitializers.add(
TimelineReaderAuthenticationFilterInitializer.class.getName());
} else {
defaultInitializers.add(
ProxyUserAuthenticationFilterInitializer.class.getName());
}
}
defaultInitializers.add(

View File

@ -36,9 +36,9 @@
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.KerberosTestUtils;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.security.http.RMAuthenticationFilterInitializer;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@ -54,6 +54,7 @@ public class TestSecureAmFilter {
private String proxyHost = "localhost";
private static final File TEST_ROOT_DIR = new File("target",
TestSecureAmFilter.class.getName() + "-root");
private static final String PREFIX = "hadoop.http.authentication.";
private static File httpSpnegoKeytabFile = new File(
KerberosTestUtils.getKeytabFile());
private static Configuration rmconf = new Configuration();
@ -70,7 +71,12 @@ public static void setUp() {
rmconf.setBoolean(YarnConfiguration.RM_WEBAPP_DELEGATION_TOKEN_AUTH_FILTER,
true);
rmconf.set("hadoop.http.filter.initializers",
RMAuthenticationFilterInitializer.class.getName());
AuthenticationFilterInitializer.class.getName());
rmconf.set(PREFIX + "type", "kerberos");
rmconf.set(PREFIX + "kerberos.keytab",
httpSpnegoKeytabFile.getAbsolutePath());
rmconf.set(PREFIX + "kerberos.principal", httpSpnegoPrincipal);
rmconf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY,
httpSpnegoPrincipal);
rmconf.set(YarnConfiguration.RM_KEYTAB,