HDFS-16129. Fixing the signature secret file misusage in HttpFS. Contributed by Tamas Domok

* HDFS-16129. Fixing the signature secret file misusage in HttpFS.

The signature secret file was not used in HttpFs.
 - if the configuration did not contain the deprecated
httpfs.authentication.signature.secret.file option then it
used the random secret provider
 - if both option (httpfs. and hadoop.http.) was set then
the HttpFSAuthenticationFilter could not read the file
because the file path was not substituted properly

!NOTE! behavioral change: the deprecated httpfs. configuration
values are overwritten with the hadoop.http. values.

The commit also contains a follow up change to the YARN-10814,
empty secret files will result in a random secret provider.

Co-authored-by: Tamas Domok <tdomok@cloudera.com>
This commit is contained in:
Tamas Domok 2021-09-20 14:29:50 +02:00 committed by GitHub
parent f92c6750ec
commit f93e8fbf2d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 258 additions and 135 deletions

View File

@ -243,7 +243,9 @@ public static class Builder {
private String hostName; private String hostName;
private boolean disallowFallbackToRandomSignerSecretProvider; private boolean disallowFallbackToRandomSignerSecretProvider;
private String authFilterConfigurationPrefix = "hadoop.http.authentication."; private final List<String> authFilterConfigurationPrefixes =
new ArrayList<>(Collections.singletonList(
"hadoop.http.authentication."));
private String excludeCiphers; private String excludeCiphers;
private boolean xFrameEnabled; private boolean xFrameEnabled;
@ -365,8 +367,15 @@ public Builder disallowFallbackToRandomSingerSecretProvider(boolean value) {
return this; return this;
} }
public Builder authFilterConfigurationPrefix(String value) { public Builder setAuthFilterConfigurationPrefix(String value) {
this.authFilterConfigurationPrefix = value; this.authFilterConfigurationPrefixes.clear();
this.authFilterConfigurationPrefixes.add(value);
return this;
}
public Builder setAuthFilterConfigurationPrefixes(String[] prefixes) {
this.authFilterConfigurationPrefixes.clear();
Collections.addAll(this.authFilterConfigurationPrefixes, prefixes);
return this; return this;
} }
@ -473,8 +482,10 @@ public HttpServer2 build() throws IOException {
HttpServer2 server = new HttpServer2(this); HttpServer2 server = new HttpServer2(this);
if (this.securityEnabled && if (this.securityEnabled &&
!this.conf.get(authFilterConfigurationPrefix + "type"). authFilterConfigurationPrefixes.stream().noneMatch(
equals(PseudoAuthenticationHandler.TYPE)) { prefix -> this.conf.get(prefix + "type")
.equals(PseudoAuthenticationHandler.TYPE))
) {
server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey); server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey);
} }
@ -811,18 +822,25 @@ private static SignerSecretProvider constructSecretProvider(final Builder b,
throws Exception { throws Exception {
final Configuration conf = b.conf; final Configuration conf = b.conf;
Properties config = getFilterProperties(conf, Properties config = getFilterProperties(conf,
b.authFilterConfigurationPrefix); b.authFilterConfigurationPrefixes);
return AuthenticationFilter.constructSecretProvider( return AuthenticationFilter.constructSecretProvider(
ctx, config, b.disallowFallbackToRandomSignerSecretProvider); ctx, config, b.disallowFallbackToRandomSignerSecretProvider);
} }
private static Properties getFilterProperties(Configuration conf, String public static Properties getFilterProperties(Configuration conf, List<String> prefixes) {
prefix) { Properties props = new Properties();
Properties prop = new Properties(); for (String prefix : prefixes) {
Map<String, String> filterConfig = AuthenticationFilterInitializer Map<String, String> filterConfigMap =
.getFilterConfigMap(conf, prefix); AuthenticationFilterInitializer.getFilterConfigMap(conf, prefix);
prop.putAll(filterConfig); for (Map.Entry<String, String> entry : filterConfigMap.entrySet()) {
return prop; Object previous = props.setProperty(entry.getKey(), entry.getValue());
if (previous != null && !previous.equals(entry.getValue())) {
LOG.warn("Overwriting configuration for key='{}' with value='{}' " +
"previous value='{}'", entry.getKey(), entry.getValue(), previous);
}
}
}
return props;
} }
private static void addNoCacheFilter(ServletContextHandler ctxt) { private static void addNoCacheFilter(ServletContextHandler ctxt) {

View File

@ -118,7 +118,7 @@ public class KMSWebServer {
.setName(NAME) .setName(NAME)
.setConf(conf) .setConf(conf)
.setSSLConf(sslConf) .setSSLConf(sslConf)
.authFilterConfigurationPrefix(KMSAuthenticationFilter.CONFIG_PREFIX) .setAuthFilterConfigurationPrefix(KMSAuthenticationFilter.CONFIG_PREFIX)
.setACL(new AccessControlList(conf.get( .setACL(new AccessControlList(conf.get(
KMSConfiguration.HTTP_ADMINS_KEY, " "))) KMSConfiguration.HTTP_ADMINS_KEY, " ")))
.addEndpoint(endpoint) .addEndpoint(endpoint)

View File

@ -20,6 +20,7 @@
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.web.WebHdfsConstants; import org.apache.hadoop.hdfs.web.WebHdfsConstants;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider; import org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider;
import org.apache.hadoop.security.authentication.util.SignerSecretProvider; import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
@ -35,6 +36,8 @@
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
@ -46,9 +49,9 @@
public class HttpFSAuthenticationFilter public class HttpFSAuthenticationFilter
extends DelegationTokenAuthenticationFilter { extends DelegationTokenAuthenticationFilter {
static final String CONF_PREFIX = "httpfs.authentication."; public static final String CONF_PREFIX = "httpfs.authentication.";
public static final String HADOOP_HTTP_CONF_PREFIX = "hadoop.http.authentication.";
static final String HADOOP_HTTP_CONF_PREFIX = "hadoop.http.authentication."; static final String[] CONF_PREFIXES = {CONF_PREFIX, HADOOP_HTTP_CONF_PREFIX};
private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET
+ ".file"; + ".file";
@ -69,27 +72,9 @@ public class HttpFSAuthenticationFilter
@Override @Override
protected Properties getConfiguration(String configPrefix, protected Properties getConfiguration(String configPrefix,
FilterConfig filterConfig) throws ServletException{ FilterConfig filterConfig) throws ServletException{
Properties props = new Properties();
Configuration conf = HttpFSServerWebApp.get().getConfig(); Configuration conf = HttpFSServerWebApp.get().getConfig();
Properties props = HttpServer2.getFilterProperties(conf,
props.setProperty(AuthenticationFilter.COOKIE_PATH, "/"); new ArrayList<>(Arrays.asList(CONF_PREFIXES)));
for (Map.Entry<String, String> entry : conf) {
String name = entry.getKey();
if (name.startsWith(HADOOP_HTTP_CONF_PREFIX)) {
name = name.substring(HADOOP_HTTP_CONF_PREFIX.length());
props.setProperty(name, entry.getValue());
}
}
// Replace Hadoop Http Authentication Configs with HttpFS specific Configs
for (Map.Entry<String, String> entry : conf) {
String name = entry.getKey();
if (name.startsWith(CONF_PREFIX)) {
String value = conf.get(name);
name = name.substring(CONF_PREFIX.length());
props.setProperty(name, value);
}
}
String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null); String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
if (signatureSecretFile == null) { if (signatureSecretFile == null) {
@ -106,8 +91,16 @@ protected Properties getConfiguration(String configPrefix,
secret.append((char) c); secret.append((char) c);
c = reader.read(); c = reader.read();
} }
String secretString = secret.toString();
if (secretString.isEmpty()) {
throw new RuntimeException(
"No secret in HttpFs signature secret file: "
+ signatureSecretFile);
}
props.setProperty(AuthenticationFilter.SIGNATURE_SECRET, props.setProperty(AuthenticationFilter.SIGNATURE_SECRET,
secret.toString()); secretString);
} catch (IOException ex) { } catch (IOException ex) {
throw new RuntimeException("Could not read HttpFS signature " throw new RuntimeException("Could not read HttpFS signature "
+ "secret file: " + signatureSecretFile); + "secret file: " + signatureSecretFile);

View File

@ -17,6 +17,14 @@
*/ */
package org.apache.hadoop.fs.http.server; package org.apache.hadoop.fs.http.server;
import com.google.common.annotations.VisibleForTesting;
import static org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter.CONF_PREFIX;
import static org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX;
import static org.apache.hadoop.security.authentication.server.AuthenticationFilter.AUTH_TYPE;
import static org.apache.hadoop.security.authentication.server.AuthenticationFilter.SIGNATURE_SECRET_FILE;
import static org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler.KEYTAB;
import static org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler.PRINCIPAL;
import static org.apache.hadoop.util.StringUtils.startupShutdownMessage; import static org.apache.hadoop.util.StringUtils.startupShutdownMessage;
import java.io.IOException; import java.io.IOException;
@ -65,6 +73,7 @@ public class HttpFSServerWebServer {
private static final String SERVLET_PATH = "/webhdfs"; private static final String SERVLET_PATH = "/webhdfs";
static { static {
addDeprecatedKeys();
Configuration.addDefaultResource(HTTPFS_DEFAULT_XML); Configuration.addDefaultResource(HTTPFS_DEFAULT_XML);
Configuration.addDefaultResource(HTTPFS_SITE_XML); Configuration.addDefaultResource(HTTPFS_SITE_XML);
} }
@ -124,7 +133,8 @@ public class HttpFSServerWebServer {
.setName(NAME) .setName(NAME)
.setConf(conf) .setConf(conf)
.setSSLConf(sslConf) .setSSLConf(sslConf)
.authFilterConfigurationPrefix(HttpFSAuthenticationFilter.CONF_PREFIX) .setAuthFilterConfigurationPrefixes(
HttpFSAuthenticationFilter.CONF_PREFIXES)
.setACL(new AccessControlList(conf.get(HTTP_ADMINS_KEY, " "))) .setACL(new AccessControlList(conf.get(HTTP_ADMINS_KEY, " ")))
.addEndpoint(endpoint) .addEndpoint(endpoint)
.build(); .build();
@ -178,6 +188,11 @@ public URL getUrl() {
} }
} }
@VisibleForTesting
HttpServer2 getHttpServer() {
return httpServer;
}
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
startupShutdownMessage(HttpFSServerWebServer.class, args, LOG); startupShutdownMessage(HttpFSServerWebServer.class, args, LOG);
Configuration conf = new Configuration(true); Configuration conf = new Configuration(true);
@ -187,4 +202,17 @@ public static void main(String[] args) throws Exception {
webServer.start(); webServer.start();
webServer.join(); webServer.join();
} }
public static void addDeprecatedKeys() {
Configuration.addDeprecations(new Configuration.DeprecationDelta[]{
new Configuration.DeprecationDelta(CONF_PREFIX + KEYTAB,
HADOOP_HTTP_CONF_PREFIX + KEYTAB),
new Configuration.DeprecationDelta(CONF_PREFIX + PRINCIPAL,
HADOOP_HTTP_CONF_PREFIX + PRINCIPAL),
new Configuration.DeprecationDelta(CONF_PREFIX + SIGNATURE_SECRET_FILE,
HADOOP_HTTP_CONF_PREFIX + SIGNATURE_SECRET_FILE),
new Configuration.DeprecationDelta(CONF_PREFIX + AUTH_TYPE,
HADOOP_HTTP_CONF_PREFIX + AUTH_TYPE)
});
}
} }

View File

@ -158,8 +158,8 @@
If multiple HttpFS servers are used in a load-balancer/round-robin fashion, If multiple HttpFS servers are used in a load-balancer/round-robin fashion,
they should share the secret file. they should share the secret file.
If the secret file specified here does not exist, random secret is If the secret file specified here does not exist or it is empty, a random
generated at startup time. secret is generated at startup time.
httpfs.authentication.signature.secret.file is deprecated. Instead use httpfs.authentication.signature.secret.file is deprecated. Instead use
hadoop.http.authentication.signature.secret.file. hadoop.http.authentication.signature.secret.file.

View File

@ -33,6 +33,7 @@
import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.fs.contract.ContractTestUtils; import org.apache.hadoop.fs.contract.ContractTestUtils;
import org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter;
import org.apache.hadoop.fs.http.server.HttpFSServerWebApp; import org.apache.hadoop.fs.http.server.HttpFSServerWebApp;
import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.AclStatus;
@ -58,6 +59,7 @@
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.test.HFSTestCase; import org.apache.hadoop.test.HFSTestCase;
import org.apache.hadoop.test.HadoopUsersConfTestHelper; import org.apache.hadoop.test.HadoopUsersConfTestHelper;
import org.apache.hadoop.test.LambdaTestUtils; import org.apache.hadoop.test.LambdaTestUtils;
@ -148,7 +150,8 @@ private void createHttpFSServer() throws Exception {
HadoopUsersConfTestHelper.getHadoopProxyUserGroups()); HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts", conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
HadoopUsersConfTestHelper.getHadoopProxyUserHosts()); HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath()); conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
AuthenticationFilter.SIGNATURE_SECRET_FILE, secretFile.getAbsolutePath());
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml"); File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
os = new FileOutputStream(httpfsSite); os = new FileOutputStream(httpfsSite);
conf.writeXml(os); conf.writeXml(os);

View File

@ -23,6 +23,7 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.test.HTestCase; import org.apache.hadoop.test.HTestCase;
import org.apache.hadoop.test.HadoopUsersConfTestHelper; import org.apache.hadoop.test.HadoopUsersConfTestHelper;
import org.apache.hadoop.test.TestDir; import org.apache.hadoop.test.TestDir;
@ -128,8 +129,9 @@ private void createHttpFSServer() throws Exception {
conf.set("httpfs.proxyuser." + conf.set("httpfs.proxyuser." +
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts", HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
HadoopUsersConfTestHelper.getHadoopProxyUserHosts()); HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
conf.set("httpfs.authentication.signature.secret.file", conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
secretFile.getAbsolutePath()); AuthenticationFilter.SIGNATURE_SECRET_FILE,
secretFile.getAbsolutePath());
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml"); File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
os = new FileOutputStream(httpfsSite); os = new FileOutputStream(httpfsSite);

View File

@ -231,8 +231,9 @@ private Configuration createHttpFSConf(boolean addDelegationTokenAuthHandler,
// HTTPFS configuration // HTTPFS configuration
conf = new Configuration(false); conf = new Configuration(false);
if (addDelegationTokenAuthHandler) { if (addDelegationTokenAuthHandler) {
conf.set("httpfs.authentication.type", conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
HttpFSKerberosAuthenticationHandlerForTesting.class.getName()); AuthenticationFilter.AUTH_TYPE,
HttpFSKerberosAuthenticationHandlerForTesting.class.getName());
} }
conf.set("httpfs.services.ext", MockGroups.class.getName()); conf.set("httpfs.services.ext", MockGroups.class.getName());
conf.set("httpfs.admin.group", HadoopUsersConfTestHelper. conf.set("httpfs.admin.group", HadoopUsersConfTestHelper.
@ -243,8 +244,9 @@ private Configuration createHttpFSConf(boolean addDelegationTokenAuthHandler,
conf.set("httpfs.proxyuser." + conf.set("httpfs.proxyuser." +
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts", HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
HadoopUsersConfTestHelper.getHadoopProxyUserHosts()); HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
conf.set("httpfs.authentication.signature.secret.file", conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
secretFile.getAbsolutePath()); AuthenticationFilter.SIGNATURE_SECRET_FILE,
secretFile.getAbsolutePath());
conf.set("httpfs.hadoop.config.dir", hadoopConfDir.toString()); conf.set("httpfs.hadoop.config.dir", hadoopConfDir.toString());
if (sslEnabled) { if (sslEnabled) {
conf.set("httpfs.ssl.enabled", "true"); conf.set("httpfs.ssl.enabled", "true");

View File

@ -23,6 +23,7 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.test.HTestCase; import org.apache.hadoop.test.HTestCase;
import org.apache.hadoop.test.HadoopUsersConfTestHelper; import org.apache.hadoop.test.HadoopUsersConfTestHelper;
import org.apache.hadoop.test.TestDir; import org.apache.hadoop.test.TestDir;
@ -136,8 +137,9 @@ private void createHttpFSServer() throws Exception {
conf.set("httpfs.proxyuser." + conf.set("httpfs.proxyuser." +
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts", HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
HadoopUsersConfTestHelper.getHadoopProxyUserHosts()); HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
conf.set("httpfs.authentication.signature.secret.file", conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
secretFile.getAbsolutePath()); AuthenticationFilter.SIGNATURE_SECRET_FILE,
secretFile.getAbsolutePath());
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml"); File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
os = new FileOutputStream(httpfsSite); os = new FileOutputStream(httpfsSite);

View File

@ -23,6 +23,7 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.test.HTestCase; import org.apache.hadoop.test.HTestCase;
import org.apache.hadoop.test.HadoopUsersConfTestHelper; import org.apache.hadoop.test.HadoopUsersConfTestHelper;
import org.apache.hadoop.test.TestDir; import org.apache.hadoop.test.TestDir;
@ -137,8 +138,9 @@ private void createHttpFSServer() throws Exception {
conf.set("httpfs.proxyuser." + conf.set("httpfs.proxyuser." +
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts", HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
HadoopUsersConfTestHelper.getHadoopProxyUserHosts()); HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
conf.set("httpfs.authentication.signature.secret.file", conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
secretFile.getAbsolutePath()); AuthenticationFilter.SIGNATURE_SECRET_FILE,
secretFile.getAbsolutePath());
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml"); File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
os = new FileOutputStream(httpfsSite); os = new FileOutputStream(httpfsSite);

View File

@ -17,9 +17,11 @@
*/ */
package org.apache.hadoop.fs.http.server; package org.apache.hadoop.fs.http.server;
import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.FileOutputStream;
import java.io.BufferedReader;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.IOException;
import java.net.HttpURLConnection; import java.net.HttpURLConnection;
import java.net.URL; import java.net.URL;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
@ -27,17 +29,23 @@
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.util.FileSignerSecretProvider;
import org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider;
import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.HadoopUsersConfTestHelper; import org.apache.hadoop.test.HadoopUsersConfTestHelper;
import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.After;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.Timeout; import org.junit.rules.Timeout;
import static org.apache.hadoop.security.authentication.server.AuthenticationFilter.SIGNER_SECRET_PROVIDER_ATTRIBUTE;
/** /**
* Test {@link HttpFSServerWebServer}. * Test {@link HttpFSServerWebServer}.
*/ */
@ -45,11 +53,13 @@ public class TestHttpFSServerWebServer {
@Rule @Rule
public Timeout timeout = new Timeout(30000); public Timeout timeout = new Timeout(30000);
private File secretFile;
private HttpFSServerWebServer webServer; private HttpFSServerWebServer webServer;
@BeforeClass @Before
public static void beforeClass() throws Exception { public void init() throws Exception {
File homeDir = GenericTestUtils.getTestDir(); File homeDir = GenericTestUtils.setupTestRootDir(TestHttpFSServerWebServer.class);
File confDir = new File(homeDir, "etc/hadoop"); File confDir = new File(homeDir, "etc/hadoop");
File logsDir = new File(homeDir, "logs"); File logsDir = new File(homeDir, "logs");
File tempDir = new File(homeDir, "temp"); File tempDir = new File(homeDir, "temp");
@ -71,43 +81,33 @@ public static void beforeClass() throws Exception {
System.setProperty("httpfs.home.dir", homeDir.getAbsolutePath()); System.setProperty("httpfs.home.dir", homeDir.getAbsolutePath());
System.setProperty("httpfs.log.dir", logsDir.getAbsolutePath()); System.setProperty("httpfs.log.dir", logsDir.getAbsolutePath());
System.setProperty("httpfs.config.dir", confDir.getAbsolutePath()); System.setProperty("httpfs.config.dir", confDir.getAbsolutePath());
FileUtils.writeStringToFile(new File(confDir, "httpfs-signature.secret"), secretFile = new File(System.getProperty("httpfs.config.dir"),
"foo", StandardCharsets.UTF_8); "httpfs-signature-custom.secret");
} }
@Before @After
public void setUp() throws Exception { public void teardown() throws Exception {
Configuration conf = new Configuration(); if (webServer != null) {
conf.set(HttpFSServerWebServer.HTTP_HOSTNAME_KEY, "localhost"); webServer.stop();
conf.setInt(HttpFSServerWebServer.HTTP_PORT_KEY, 0); }
conf.set(AuthenticationFilter.SIGNATURE_SECRET_FILE,
"httpfs-signature.secret");
Configuration sslConf = new Configuration();
webServer = new HttpFSServerWebServer(conf, sslConf);
} }
@Test @Test
public void testStartStop() throws Exception { public void testStartStop() throws Exception {
webServer = createWebServer(createConfigurationWithRandomSecret());
webServer.start(); webServer.start();
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
URL url = new URL(webServer.getUrl(), MessageFormat.format(
"/webhdfs/v1/?user.name={0}&op=liststatus", user));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
BufferedReader reader = new BufferedReader(
new InputStreamReader(conn.getInputStream()));
reader.readLine();
reader.close();
webServer.stop(); webServer.stop();
} }
@Test @Test
public void testJustStop() throws Exception { public void testJustStop() throws Exception {
webServer = createWebServer(createConfigurationWithRandomSecret());
webServer.stop(); webServer.stop();
} }
@Test @Test
public void testDoubleStop() throws Exception { public void testDoubleStop() throws Exception {
webServer = createWebServer(createConfigurationWithRandomSecret());
webServer.start(); webServer.start();
webServer.stop(); webServer.stop();
webServer.stop(); webServer.stop();
@ -115,9 +115,140 @@ public void testDoubleStop() throws Exception {
@Test @Test
public void testDoubleStart() throws Exception { public void testDoubleStart() throws Exception {
webServer = createWebServer(createConfigurationWithRandomSecret());
webServer.start(); webServer.start();
webServer.start(); webServer.start();
webServer.stop(); webServer.stop();
} }
@Test
public void testServiceWithSecretFile() throws Exception {
createSecretFile("foo");
webServer = createWebServer(createConfigurationWithSecretFile());
webServer.start();
assertServiceRespondsWithOK(webServer.getUrl());
assertSignerSecretProviderType(webServer.getHttpServer(),
FileSignerSecretProvider.class);
webServer.stop();
}
@Test
public void testServiceWithSecretFileWithDeprecatedConfigOnly()
throws Exception {
createSecretFile("foo");
Configuration conf = createConfiguration();
setDeprecatedSecretFile(conf, secretFile.getAbsolutePath());
webServer = createWebServer(conf);
webServer.start();
assertServiceRespondsWithOK(webServer.getUrl());
assertSignerSecretProviderType(webServer.getHttpServer(),
FileSignerSecretProvider.class);
webServer.stop();
}
@Test
public void testServiceWithSecretFileWithBothConfigOptions() throws Exception {
createSecretFile("foo");
Configuration conf = createConfigurationWithSecretFile();
setDeprecatedSecretFile(conf, secretFile.getAbsolutePath());
webServer = createWebServer(conf);
webServer.start();
assertServiceRespondsWithOK(webServer.getUrl());
assertSignerSecretProviderType(webServer.getHttpServer(),
FileSignerSecretProvider.class);
webServer.stop();
}
@Test
public void testServiceWithMissingSecretFile() throws Exception {
webServer = createWebServer(createConfigurationWithSecretFile());
webServer.start();
assertServiceRespondsWithOK(webServer.getUrl());
assertSignerSecretProviderType(webServer.getHttpServer(),
RandomSignerSecretProvider.class);
webServer.stop();
}
@Test
public void testServiceWithEmptySecretFile() throws Exception {
// The AuthenticationFilter.constructSecretProvider will do the fallback
// to the random secrets not the HttpFSAuthenticationFilter.
createSecretFile("");
webServer = createWebServer(createConfigurationWithSecretFile());
webServer.start();
assertServiceRespondsWithOK(webServer.getUrl());
assertSignerSecretProviderType(webServer.getHttpServer(),
RandomSignerSecretProvider.class);
webServer.stop();
}
private <T extends SignerSecretProvider> void assertSignerSecretProviderType(
HttpServer2 server, Class<T> expected) {
SignerSecretProvider secretProvider = (SignerSecretProvider)
server.getWebAppContext().getServletContext()
.getAttribute(SIGNER_SECRET_PROVIDER_ATTRIBUTE);
Assert.assertNotNull("The secret provider must not be null", secretProvider);
Assert.assertEquals("The secret provider must match the following", expected, secretProvider.getClass());
}
private void assertServiceRespondsWithOK(URL serviceURL)
throws Exception {
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
URL url = new URL(serviceURL, MessageFormat.format(
"/webhdfs/v1/?user.name={0}&op=liststatus", user));
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
try (BufferedReader reader = new BufferedReader(
new InputStreamReader(conn.getInputStream()))) {
reader.readLine();
}
}
private void setDeprecatedSecretFile(Configuration conf, String path) {
conf.set(HttpFSAuthenticationFilter.CONF_PREFIX +
AuthenticationFilter.SIGNATURE_SECRET_FILE,
path);
}
private Configuration createConfigurationWithRandomSecret() {
Configuration conf = createConfiguration();
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
AuthenticationFilter.SIGNER_SECRET_PROVIDER, "random");
return conf;
}
private Configuration createConfigurationWithSecretFile() {
Configuration conf = createConfiguration();
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
AuthenticationFilter.SIGNATURE_SECRET_FILE,
secretFile.getAbsolutePath());
return conf;
}
private Configuration createConfiguration() {
Configuration conf = new Configuration(false);
conf.set(HttpFSServerWebServer.HTTP_HOSTNAME_KEY, "localhost");
conf.setInt(HttpFSServerWebServer.HTTP_PORT_KEY, 0);
return conf;
}
private HttpFSServerWebServer createWebServer(Configuration conf)
throws Exception {
Configuration sslConf = new Configuration(false);
// The configuration must be stored for the HttpFSAuthenticatorFilter, because
// it accesses the configuration from the webapp: HttpFSServerWebApp.get().getConfig()
try (FileOutputStream os = new FileOutputStream(
new File(System.getProperty("httpfs.config.dir"), "httpfs-site.xml"))) {
conf.writeXml(os);
}
return new HttpFSServerWebServer(conf, sslConf);
}
private void createSecretFile(String content) throws IOException {
Assert.assertTrue(secretFile.createNewFile());
FileUtils.writeStringToFile(secretFile, content, StandardCharsets.UTF_8);
}
} }

View File

@ -1,58 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.http.server;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Shell;
import org.junit.BeforeClass;
import java.io.File;
/**
* Unlike {@link TestHttpFSServerWebServer}, httpfs-signature.secret doesn't
* exist. In this case, a random secret is used.
*/
public class TestHttpFSServerWebServerWithRandomSecret extends
TestHttpFSServerWebServer {
@BeforeClass
public static void beforeClass() throws Exception {
File homeDir = GenericTestUtils.getTestDir();
File confDir = new File(homeDir, "etc/hadoop");
File logsDir = new File(homeDir, "logs");
File tempDir = new File(homeDir, "temp");
confDir.mkdirs();
logsDir.mkdirs();
tempDir.mkdirs();
if (Shell.WINDOWS) {
File binDir = new File(homeDir, "bin");
binDir.mkdirs();
File winutils = Shell.getWinUtilsFile();
if (winutils.exists()) {
FileUtils.copyFileToDirectory(winutils, binDir);
}
}
System.setProperty("hadoop.home.dir", homeDir.getAbsolutePath());
System.setProperty("hadoop.log.dir", logsDir.getAbsolutePath());
System.setProperty("httpfs.home.dir", homeDir.getAbsolutePath());
System.setProperty("httpfs.log.dir", logsDir.getAbsolutePath());
System.setProperty("httpfs.config.dir", confDir.getAbsolutePath());
}
}