HDFS-16129. Fixing the signature secret file misusage in HttpFS. Contributed by Tamas Domok
* HDFS-16129. Fixing the signature secret file misusage in HttpFS. The signature secret file was not used in HttpFs. - if the configuration did not contain the deprecated httpfs.authentication.signature.secret.file option then it used the random secret provider - if both option (httpfs. and hadoop.http.) was set then the HttpFSAuthenticationFilter could not read the file because the file path was not substituted properly !NOTE! behavioral change: the deprecated httpfs. configuration values are overwritten with the hadoop.http. values. The commit also contains a follow up change to the YARN-10814, empty secret files will result in a random secret provider. Co-authored-by: Tamas Domok <tdomok@cloudera.com>
This commit is contained in:
parent
f92c6750ec
commit
f93e8fbf2d
@ -243,7 +243,9 @@ public static class Builder {
|
||||
|
||||
private String hostName;
|
||||
private boolean disallowFallbackToRandomSignerSecretProvider;
|
||||
private String authFilterConfigurationPrefix = "hadoop.http.authentication.";
|
||||
private final List<String> authFilterConfigurationPrefixes =
|
||||
new ArrayList<>(Collections.singletonList(
|
||||
"hadoop.http.authentication."));
|
||||
private String excludeCiphers;
|
||||
|
||||
private boolean xFrameEnabled;
|
||||
@ -365,8 +367,15 @@ public Builder disallowFallbackToRandomSingerSecretProvider(boolean value) {
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder authFilterConfigurationPrefix(String value) {
|
||||
this.authFilterConfigurationPrefix = value;
|
||||
public Builder setAuthFilterConfigurationPrefix(String value) {
|
||||
this.authFilterConfigurationPrefixes.clear();
|
||||
this.authFilterConfigurationPrefixes.add(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setAuthFilterConfigurationPrefixes(String[] prefixes) {
|
||||
this.authFilterConfigurationPrefixes.clear();
|
||||
Collections.addAll(this.authFilterConfigurationPrefixes, prefixes);
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -473,8 +482,10 @@ public HttpServer2 build() throws IOException {
|
||||
HttpServer2 server = new HttpServer2(this);
|
||||
|
||||
if (this.securityEnabled &&
|
||||
!this.conf.get(authFilterConfigurationPrefix + "type").
|
||||
equals(PseudoAuthenticationHandler.TYPE)) {
|
||||
authFilterConfigurationPrefixes.stream().noneMatch(
|
||||
prefix -> this.conf.get(prefix + "type")
|
||||
.equals(PseudoAuthenticationHandler.TYPE))
|
||||
) {
|
||||
server.initSpnego(conf, hostName, usernameConfKey, keytabConfKey);
|
||||
}
|
||||
|
||||
@ -811,18 +822,25 @@ private static SignerSecretProvider constructSecretProvider(final Builder b,
|
||||
throws Exception {
|
||||
final Configuration conf = b.conf;
|
||||
Properties config = getFilterProperties(conf,
|
||||
b.authFilterConfigurationPrefix);
|
||||
b.authFilterConfigurationPrefixes);
|
||||
return AuthenticationFilter.constructSecretProvider(
|
||||
ctx, config, b.disallowFallbackToRandomSignerSecretProvider);
|
||||
}
|
||||
|
||||
private static Properties getFilterProperties(Configuration conf, String
|
||||
prefix) {
|
||||
Properties prop = new Properties();
|
||||
Map<String, String> filterConfig = AuthenticationFilterInitializer
|
||||
.getFilterConfigMap(conf, prefix);
|
||||
prop.putAll(filterConfig);
|
||||
return prop;
|
||||
public static Properties getFilterProperties(Configuration conf, List<String> prefixes) {
|
||||
Properties props = new Properties();
|
||||
for (String prefix : prefixes) {
|
||||
Map<String, String> filterConfigMap =
|
||||
AuthenticationFilterInitializer.getFilterConfigMap(conf, prefix);
|
||||
for (Map.Entry<String, String> entry : filterConfigMap.entrySet()) {
|
||||
Object previous = props.setProperty(entry.getKey(), entry.getValue());
|
||||
if (previous != null && !previous.equals(entry.getValue())) {
|
||||
LOG.warn("Overwriting configuration for key='{}' with value='{}' " +
|
||||
"previous value='{}'", entry.getKey(), entry.getValue(), previous);
|
||||
}
|
||||
}
|
||||
}
|
||||
return props;
|
||||
}
|
||||
|
||||
private static void addNoCacheFilter(ServletContextHandler ctxt) {
|
||||
|
@ -118,7 +118,7 @@ public class KMSWebServer {
|
||||
.setName(NAME)
|
||||
.setConf(conf)
|
||||
.setSSLConf(sslConf)
|
||||
.authFilterConfigurationPrefix(KMSAuthenticationFilter.CONFIG_PREFIX)
|
||||
.setAuthFilterConfigurationPrefix(KMSAuthenticationFilter.CONFIG_PREFIX)
|
||||
.setACL(new AccessControlList(conf.get(
|
||||
KMSConfiguration.HTTP_ADMINS_KEY, " ")))
|
||||
.addEndpoint(endpoint)
|
||||
|
@ -20,6 +20,7 @@
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdfs.web.WebHdfsConstants;
|
||||
import org.apache.hadoop.http.HttpServer2;
|
||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||
import org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider;
|
||||
import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
|
||||
@ -35,6 +36,8 @@
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
@ -46,9 +49,9 @@
|
||||
public class HttpFSAuthenticationFilter
|
||||
extends DelegationTokenAuthenticationFilter {
|
||||
|
||||
static final String CONF_PREFIX = "httpfs.authentication.";
|
||||
|
||||
static final String HADOOP_HTTP_CONF_PREFIX = "hadoop.http.authentication.";
|
||||
public static final String CONF_PREFIX = "httpfs.authentication.";
|
||||
public static final String HADOOP_HTTP_CONF_PREFIX = "hadoop.http.authentication.";
|
||||
static final String[] CONF_PREFIXES = {CONF_PREFIX, HADOOP_HTTP_CONF_PREFIX};
|
||||
|
||||
private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET
|
||||
+ ".file";
|
||||
@ -69,27 +72,9 @@ public class HttpFSAuthenticationFilter
|
||||
@Override
|
||||
protected Properties getConfiguration(String configPrefix,
|
||||
FilterConfig filterConfig) throws ServletException{
|
||||
Properties props = new Properties();
|
||||
Configuration conf = HttpFSServerWebApp.get().getConfig();
|
||||
|
||||
props.setProperty(AuthenticationFilter.COOKIE_PATH, "/");
|
||||
for (Map.Entry<String, String> entry : conf) {
|
||||
String name = entry.getKey();
|
||||
if (name.startsWith(HADOOP_HTTP_CONF_PREFIX)) {
|
||||
name = name.substring(HADOOP_HTTP_CONF_PREFIX.length());
|
||||
props.setProperty(name, entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
// Replace Hadoop Http Authentication Configs with HttpFS specific Configs
|
||||
for (Map.Entry<String, String> entry : conf) {
|
||||
String name = entry.getKey();
|
||||
if (name.startsWith(CONF_PREFIX)) {
|
||||
String value = conf.get(name);
|
||||
name = name.substring(CONF_PREFIX.length());
|
||||
props.setProperty(name, value);
|
||||
}
|
||||
}
|
||||
Properties props = HttpServer2.getFilterProperties(conf,
|
||||
new ArrayList<>(Arrays.asList(CONF_PREFIXES)));
|
||||
|
||||
String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
|
||||
if (signatureSecretFile == null) {
|
||||
@ -106,8 +91,16 @@ protected Properties getConfiguration(String configPrefix,
|
||||
secret.append((char) c);
|
||||
c = reader.read();
|
||||
}
|
||||
|
||||
String secretString = secret.toString();
|
||||
if (secretString.isEmpty()) {
|
||||
throw new RuntimeException(
|
||||
"No secret in HttpFs signature secret file: "
|
||||
+ signatureSecretFile);
|
||||
}
|
||||
|
||||
props.setProperty(AuthenticationFilter.SIGNATURE_SECRET,
|
||||
secret.toString());
|
||||
secretString);
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException("Could not read HttpFS signature "
|
||||
+ "secret file: " + signatureSecretFile);
|
||||
|
@ -17,6 +17,14 @@
|
||||
*/
|
||||
package org.apache.hadoop.fs.http.server;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
import static org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter.CONF_PREFIX;
|
||||
import static org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX;
|
||||
import static org.apache.hadoop.security.authentication.server.AuthenticationFilter.AUTH_TYPE;
|
||||
import static org.apache.hadoop.security.authentication.server.AuthenticationFilter.SIGNATURE_SECRET_FILE;
|
||||
import static org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler.KEYTAB;
|
||||
import static org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler.PRINCIPAL;
|
||||
import static org.apache.hadoop.util.StringUtils.startupShutdownMessage;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -65,6 +73,7 @@ public class HttpFSServerWebServer {
|
||||
private static final String SERVLET_PATH = "/webhdfs";
|
||||
|
||||
static {
|
||||
addDeprecatedKeys();
|
||||
Configuration.addDefaultResource(HTTPFS_DEFAULT_XML);
|
||||
Configuration.addDefaultResource(HTTPFS_SITE_XML);
|
||||
}
|
||||
@ -124,7 +133,8 @@ public class HttpFSServerWebServer {
|
||||
.setName(NAME)
|
||||
.setConf(conf)
|
||||
.setSSLConf(sslConf)
|
||||
.authFilterConfigurationPrefix(HttpFSAuthenticationFilter.CONF_PREFIX)
|
||||
.setAuthFilterConfigurationPrefixes(
|
||||
HttpFSAuthenticationFilter.CONF_PREFIXES)
|
||||
.setACL(new AccessControlList(conf.get(HTTP_ADMINS_KEY, " ")))
|
||||
.addEndpoint(endpoint)
|
||||
.build();
|
||||
@ -178,6 +188,11 @@ public URL getUrl() {
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
HttpServer2 getHttpServer() {
|
||||
return httpServer;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
startupShutdownMessage(HttpFSServerWebServer.class, args, LOG);
|
||||
Configuration conf = new Configuration(true);
|
||||
@ -187,4 +202,17 @@ public static void main(String[] args) throws Exception {
|
||||
webServer.start();
|
||||
webServer.join();
|
||||
}
|
||||
|
||||
public static void addDeprecatedKeys() {
|
||||
Configuration.addDeprecations(new Configuration.DeprecationDelta[]{
|
||||
new Configuration.DeprecationDelta(CONF_PREFIX + KEYTAB,
|
||||
HADOOP_HTTP_CONF_PREFIX + KEYTAB),
|
||||
new Configuration.DeprecationDelta(CONF_PREFIX + PRINCIPAL,
|
||||
HADOOP_HTTP_CONF_PREFIX + PRINCIPAL),
|
||||
new Configuration.DeprecationDelta(CONF_PREFIX + SIGNATURE_SECRET_FILE,
|
||||
HADOOP_HTTP_CONF_PREFIX + SIGNATURE_SECRET_FILE),
|
||||
new Configuration.DeprecationDelta(CONF_PREFIX + AUTH_TYPE,
|
||||
HADOOP_HTTP_CONF_PREFIX + AUTH_TYPE)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -158,8 +158,8 @@
|
||||
If multiple HttpFS servers are used in a load-balancer/round-robin fashion,
|
||||
they should share the secret file.
|
||||
|
||||
If the secret file specified here does not exist, random secret is
|
||||
generated at startup time.
|
||||
If the secret file specified here does not exist or it is empty, a random
|
||||
secret is generated at startup time.
|
||||
|
||||
httpfs.authentication.signature.secret.file is deprecated. Instead use
|
||||
hadoop.http.authentication.signature.secret.file.
|
||||
|
@ -33,6 +33,7 @@
|
||||
import org.apache.hadoop.fs.RemoteIterator;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
import org.apache.hadoop.fs.contract.ContractTestUtils;
|
||||
import org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter;
|
||||
import org.apache.hadoop.fs.http.server.HttpFSServerWebApp;
|
||||
import org.apache.hadoop.fs.permission.AclEntry;
|
||||
import org.apache.hadoop.fs.permission.AclStatus;
|
||||
@ -58,6 +59,7 @@
|
||||
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
|
||||
import org.apache.hadoop.ipc.RemoteException;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||
import org.apache.hadoop.test.HFSTestCase;
|
||||
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
|
||||
import org.apache.hadoop.test.LambdaTestUtils;
|
||||
@ -148,7 +150,8 @@ private void createHttpFSServer() throws Exception {
|
||||
HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
|
||||
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
|
||||
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
|
||||
conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
|
||||
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
|
||||
AuthenticationFilter.SIGNATURE_SECRET_FILE, secretFile.getAbsolutePath());
|
||||
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
|
||||
os = new FileOutputStream(httpfsSite);
|
||||
conf.writeXml(os);
|
||||
|
@ -23,6 +23,7 @@
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||
import org.apache.hadoop.test.HTestCase;
|
||||
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
|
||||
import org.apache.hadoop.test.TestDir;
|
||||
@ -128,8 +129,9 @@ private void createHttpFSServer() throws Exception {
|
||||
conf.set("httpfs.proxyuser." +
|
||||
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
|
||||
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
|
||||
conf.set("httpfs.authentication.signature.secret.file",
|
||||
secretFile.getAbsolutePath());
|
||||
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
|
||||
AuthenticationFilter.SIGNATURE_SECRET_FILE,
|
||||
secretFile.getAbsolutePath());
|
||||
|
||||
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
|
||||
os = new FileOutputStream(httpfsSite);
|
||||
|
@ -231,8 +231,9 @@ private Configuration createHttpFSConf(boolean addDelegationTokenAuthHandler,
|
||||
// HTTPFS configuration
|
||||
conf = new Configuration(false);
|
||||
if (addDelegationTokenAuthHandler) {
|
||||
conf.set("httpfs.authentication.type",
|
||||
HttpFSKerberosAuthenticationHandlerForTesting.class.getName());
|
||||
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
|
||||
AuthenticationFilter.AUTH_TYPE,
|
||||
HttpFSKerberosAuthenticationHandlerForTesting.class.getName());
|
||||
}
|
||||
conf.set("httpfs.services.ext", MockGroups.class.getName());
|
||||
conf.set("httpfs.admin.group", HadoopUsersConfTestHelper.
|
||||
@ -243,8 +244,9 @@ private Configuration createHttpFSConf(boolean addDelegationTokenAuthHandler,
|
||||
conf.set("httpfs.proxyuser." +
|
||||
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
|
||||
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
|
||||
conf.set("httpfs.authentication.signature.secret.file",
|
||||
secretFile.getAbsolutePath());
|
||||
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
|
||||
AuthenticationFilter.SIGNATURE_SECRET_FILE,
|
||||
secretFile.getAbsolutePath());
|
||||
conf.set("httpfs.hadoop.config.dir", hadoopConfDir.toString());
|
||||
if (sslEnabled) {
|
||||
conf.set("httpfs.ssl.enabled", "true");
|
||||
|
@ -23,6 +23,7 @@
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||
import org.apache.hadoop.test.HTestCase;
|
||||
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
|
||||
import org.apache.hadoop.test.TestDir;
|
||||
@ -136,8 +137,9 @@ private void createHttpFSServer() throws Exception {
|
||||
conf.set("httpfs.proxyuser." +
|
||||
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
|
||||
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
|
||||
conf.set("httpfs.authentication.signature.secret.file",
|
||||
secretFile.getAbsolutePath());
|
||||
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
|
||||
AuthenticationFilter.SIGNATURE_SECRET_FILE,
|
||||
secretFile.getAbsolutePath());
|
||||
|
||||
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
|
||||
os = new FileOutputStream(httpfsSite);
|
||||
|
@ -23,6 +23,7 @@
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||
import org.apache.hadoop.test.HTestCase;
|
||||
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
|
||||
import org.apache.hadoop.test.TestDir;
|
||||
@ -137,8 +138,9 @@ private void createHttpFSServer() throws Exception {
|
||||
conf.set("httpfs.proxyuser." +
|
||||
HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
|
||||
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
|
||||
conf.set("httpfs.authentication.signature.secret.file",
|
||||
secretFile.getAbsolutePath());
|
||||
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
|
||||
AuthenticationFilter.SIGNATURE_SECRET_FILE,
|
||||
secretFile.getAbsolutePath());
|
||||
|
||||
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
|
||||
os = new FileOutputStream(httpfsSite);
|
||||
|
@ -17,9 +17,11 @@
|
||||
*/
|
||||
package org.apache.hadoop.fs.http.server;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.IOException;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
@ -27,17 +29,23 @@
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.http.HttpServer2;
|
||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||
import org.apache.hadoop.security.authentication.util.FileSignerSecretProvider;
|
||||
import org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider;
|
||||
import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.After;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.Timeout;
|
||||
|
||||
import static org.apache.hadoop.security.authentication.server.AuthenticationFilter.SIGNER_SECRET_PROVIDER_ATTRIBUTE;
|
||||
|
||||
/**
|
||||
* Test {@link HttpFSServerWebServer}.
|
||||
*/
|
||||
@ -45,11 +53,13 @@ public class TestHttpFSServerWebServer {
|
||||
|
||||
@Rule
|
||||
public Timeout timeout = new Timeout(30000);
|
||||
|
||||
private File secretFile;
|
||||
private HttpFSServerWebServer webServer;
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
File homeDir = GenericTestUtils.getTestDir();
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
File homeDir = GenericTestUtils.setupTestRootDir(TestHttpFSServerWebServer.class);
|
||||
File confDir = new File(homeDir, "etc/hadoop");
|
||||
File logsDir = new File(homeDir, "logs");
|
||||
File tempDir = new File(homeDir, "temp");
|
||||
@ -71,43 +81,33 @@ public static void beforeClass() throws Exception {
|
||||
System.setProperty("httpfs.home.dir", homeDir.getAbsolutePath());
|
||||
System.setProperty("httpfs.log.dir", logsDir.getAbsolutePath());
|
||||
System.setProperty("httpfs.config.dir", confDir.getAbsolutePath());
|
||||
FileUtils.writeStringToFile(new File(confDir, "httpfs-signature.secret"),
|
||||
"foo", StandardCharsets.UTF_8);
|
||||
secretFile = new File(System.getProperty("httpfs.config.dir"),
|
||||
"httpfs-signature-custom.secret");
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
conf.set(HttpFSServerWebServer.HTTP_HOSTNAME_KEY, "localhost");
|
||||
conf.setInt(HttpFSServerWebServer.HTTP_PORT_KEY, 0);
|
||||
conf.set(AuthenticationFilter.SIGNATURE_SECRET_FILE,
|
||||
"httpfs-signature.secret");
|
||||
Configuration sslConf = new Configuration();
|
||||
webServer = new HttpFSServerWebServer(conf, sslConf);
|
||||
@After
|
||||
public void teardown() throws Exception {
|
||||
if (webServer != null) {
|
||||
webServer.stop();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStartStop() throws Exception {
|
||||
webServer = createWebServer(createConfigurationWithRandomSecret());
|
||||
webServer.start();
|
||||
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
|
||||
URL url = new URL(webServer.getUrl(), MessageFormat.format(
|
||||
"/webhdfs/v1/?user.name={0}&op=liststatus", user));
|
||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
|
||||
BufferedReader reader = new BufferedReader(
|
||||
new InputStreamReader(conn.getInputStream()));
|
||||
reader.readLine();
|
||||
reader.close();
|
||||
webServer.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testJustStop() throws Exception {
|
||||
webServer = createWebServer(createConfigurationWithRandomSecret());
|
||||
webServer.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDoubleStop() throws Exception {
|
||||
webServer = createWebServer(createConfigurationWithRandomSecret());
|
||||
webServer.start();
|
||||
webServer.stop();
|
||||
webServer.stop();
|
||||
@ -115,9 +115,140 @@ public void testDoubleStop() throws Exception {
|
||||
|
||||
@Test
|
||||
public void testDoubleStart() throws Exception {
|
||||
webServer = createWebServer(createConfigurationWithRandomSecret());
|
||||
webServer.start();
|
||||
webServer.start();
|
||||
webServer.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testServiceWithSecretFile() throws Exception {
|
||||
createSecretFile("foo");
|
||||
webServer = createWebServer(createConfigurationWithSecretFile());
|
||||
webServer.start();
|
||||
assertServiceRespondsWithOK(webServer.getUrl());
|
||||
assertSignerSecretProviderType(webServer.getHttpServer(),
|
||||
FileSignerSecretProvider.class);
|
||||
webServer.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testServiceWithSecretFileWithDeprecatedConfigOnly()
|
||||
throws Exception {
|
||||
createSecretFile("foo");
|
||||
Configuration conf = createConfiguration();
|
||||
setDeprecatedSecretFile(conf, secretFile.getAbsolutePath());
|
||||
webServer = createWebServer(conf);
|
||||
webServer.start();
|
||||
assertServiceRespondsWithOK(webServer.getUrl());
|
||||
assertSignerSecretProviderType(webServer.getHttpServer(),
|
||||
FileSignerSecretProvider.class);
|
||||
webServer.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testServiceWithSecretFileWithBothConfigOptions() throws Exception {
|
||||
createSecretFile("foo");
|
||||
Configuration conf = createConfigurationWithSecretFile();
|
||||
setDeprecatedSecretFile(conf, secretFile.getAbsolutePath());
|
||||
webServer = createWebServer(conf);
|
||||
webServer.start();
|
||||
assertServiceRespondsWithOK(webServer.getUrl());
|
||||
assertSignerSecretProviderType(webServer.getHttpServer(),
|
||||
FileSignerSecretProvider.class);
|
||||
webServer.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testServiceWithMissingSecretFile() throws Exception {
|
||||
webServer = createWebServer(createConfigurationWithSecretFile());
|
||||
webServer.start();
|
||||
assertServiceRespondsWithOK(webServer.getUrl());
|
||||
assertSignerSecretProviderType(webServer.getHttpServer(),
|
||||
RandomSignerSecretProvider.class);
|
||||
webServer.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testServiceWithEmptySecretFile() throws Exception {
|
||||
// The AuthenticationFilter.constructSecretProvider will do the fallback
|
||||
// to the random secrets not the HttpFSAuthenticationFilter.
|
||||
createSecretFile("");
|
||||
webServer = createWebServer(createConfigurationWithSecretFile());
|
||||
webServer.start();
|
||||
assertServiceRespondsWithOK(webServer.getUrl());
|
||||
assertSignerSecretProviderType(webServer.getHttpServer(),
|
||||
RandomSignerSecretProvider.class);
|
||||
webServer.stop();
|
||||
}
|
||||
|
||||
private <T extends SignerSecretProvider> void assertSignerSecretProviderType(
|
||||
HttpServer2 server, Class<T> expected) {
|
||||
SignerSecretProvider secretProvider = (SignerSecretProvider)
|
||||
server.getWebAppContext().getServletContext()
|
||||
.getAttribute(SIGNER_SECRET_PROVIDER_ATTRIBUTE);
|
||||
Assert.assertNotNull("The secret provider must not be null", secretProvider);
|
||||
Assert.assertEquals("The secret provider must match the following", expected, secretProvider.getClass());
|
||||
}
|
||||
|
||||
private void assertServiceRespondsWithOK(URL serviceURL)
|
||||
throws Exception {
|
||||
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
|
||||
URL url = new URL(serviceURL, MessageFormat.format(
|
||||
"/webhdfs/v1/?user.name={0}&op=liststatus", user));
|
||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
|
||||
try (BufferedReader reader = new BufferedReader(
|
||||
new InputStreamReader(conn.getInputStream()))) {
|
||||
reader.readLine();
|
||||
}
|
||||
}
|
||||
|
||||
private void setDeprecatedSecretFile(Configuration conf, String path) {
|
||||
conf.set(HttpFSAuthenticationFilter.CONF_PREFIX +
|
||||
AuthenticationFilter.SIGNATURE_SECRET_FILE,
|
||||
path);
|
||||
}
|
||||
|
||||
private Configuration createConfigurationWithRandomSecret() {
|
||||
Configuration conf = createConfiguration();
|
||||
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
|
||||
AuthenticationFilter.SIGNER_SECRET_PROVIDER, "random");
|
||||
return conf;
|
||||
}
|
||||
|
||||
private Configuration createConfigurationWithSecretFile() {
|
||||
Configuration conf = createConfiguration();
|
||||
conf.set(HttpFSAuthenticationFilter.HADOOP_HTTP_CONF_PREFIX +
|
||||
AuthenticationFilter.SIGNATURE_SECRET_FILE,
|
||||
secretFile.getAbsolutePath());
|
||||
return conf;
|
||||
}
|
||||
|
||||
private Configuration createConfiguration() {
|
||||
Configuration conf = new Configuration(false);
|
||||
conf.set(HttpFSServerWebServer.HTTP_HOSTNAME_KEY, "localhost");
|
||||
conf.setInt(HttpFSServerWebServer.HTTP_PORT_KEY, 0);
|
||||
return conf;
|
||||
}
|
||||
|
||||
private HttpFSServerWebServer createWebServer(Configuration conf)
|
||||
throws Exception {
|
||||
Configuration sslConf = new Configuration(false);
|
||||
|
||||
// The configuration must be stored for the HttpFSAuthenticatorFilter, because
|
||||
// it accesses the configuration from the webapp: HttpFSServerWebApp.get().getConfig()
|
||||
try (FileOutputStream os = new FileOutputStream(
|
||||
new File(System.getProperty("httpfs.config.dir"), "httpfs-site.xml"))) {
|
||||
conf.writeXml(os);
|
||||
}
|
||||
|
||||
return new HttpFSServerWebServer(conf, sslConf);
|
||||
}
|
||||
|
||||
private void createSecretFile(String content) throws IOException {
|
||||
Assert.assertTrue(secretFile.createNewFile());
|
||||
FileUtils.writeStringToFile(secretFile, content, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,58 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.fs.http.server;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
/**
|
||||
* Unlike {@link TestHttpFSServerWebServer}, httpfs-signature.secret doesn't
|
||||
* exist. In this case, a random secret is used.
|
||||
*/
|
||||
public class TestHttpFSServerWebServerWithRandomSecret extends
|
||||
TestHttpFSServerWebServer {
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
File homeDir = GenericTestUtils.getTestDir();
|
||||
File confDir = new File(homeDir, "etc/hadoop");
|
||||
File logsDir = new File(homeDir, "logs");
|
||||
File tempDir = new File(homeDir, "temp");
|
||||
confDir.mkdirs();
|
||||
logsDir.mkdirs();
|
||||
tempDir.mkdirs();
|
||||
|
||||
if (Shell.WINDOWS) {
|
||||
File binDir = new File(homeDir, "bin");
|
||||
binDir.mkdirs();
|
||||
File winutils = Shell.getWinUtilsFile();
|
||||
if (winutils.exists()) {
|
||||
FileUtils.copyFileToDirectory(winutils, binDir);
|
||||
}
|
||||
}
|
||||
|
||||
System.setProperty("hadoop.home.dir", homeDir.getAbsolutePath());
|
||||
System.setProperty("hadoop.log.dir", logsDir.getAbsolutePath());
|
||||
System.setProperty("httpfs.home.dir", homeDir.getAbsolutePath());
|
||||
System.setProperty("httpfs.log.dir", logsDir.getAbsolutePath());
|
||||
System.setProperty("httpfs.config.dir", confDir.getAbsolutePath());
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user