HDFS-13654. Use a random secret when a secret file doesn't exist in HttpFS. This should be default.
This commit is contained in:
parent
219e286722
commit
35f1014b3e
@ -304,7 +304,6 @@
|
||||
<configuration>
|
||||
<excludes>
|
||||
<exclude>src/test/resources/classutils.txt</exclude>
|
||||
<exclude>src/main/conf/httpfs-signature.secret</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
@ -1 +0,0 @@
|
||||
hadoop httpfs secret
|
@ -21,6 +21,8 @@
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdfs.web.WebHdfsConstants;
|
||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||
import org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider;
|
||||
import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
|
||||
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationFilter;
|
||||
import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticationHandler;
|
||||
|
||||
@ -37,8 +39,8 @@
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* Subclass of hadoop-auth <code>AuthenticationFilter</code> that obtains its configuration
|
||||
* from HttpFSServer's server configuration.
|
||||
* Subclass of hadoop-auth <code>AuthenticationFilter</code> that obtains its
|
||||
* configuration from HttpFSServer's server configuration.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class HttpFSAuthenticationFilter
|
||||
@ -46,7 +48,8 @@ public class HttpFSAuthenticationFilter
|
||||
|
||||
static final String CONF_PREFIX = "httpfs.authentication.";
|
||||
|
||||
private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET + ".file";
|
||||
private static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET
|
||||
+ ".file";
|
||||
|
||||
/**
|
||||
* Returns the hadoop-auth configuration from HttpFSServer's configuration.
|
||||
@ -78,22 +81,25 @@ protected Properties getConfiguration(String configPrefix,
|
||||
|
||||
String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
|
||||
if (signatureSecretFile == null) {
|
||||
throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE);
|
||||
throw new RuntimeException("Undefined property: "
|
||||
+ SIGNATURE_SECRET_FILE);
|
||||
}
|
||||
|
||||
try {
|
||||
StringBuilder secret = new StringBuilder();
|
||||
Reader reader = new InputStreamReader(Files.newInputStream(Paths.get(
|
||||
signatureSecretFile)), StandardCharsets.UTF_8);
|
||||
int c = reader.read();
|
||||
while (c > -1) {
|
||||
secret.append((char)c);
|
||||
c = reader.read();
|
||||
if (!isRandomSecret(filterConfig)) {
|
||||
try (Reader reader = new InputStreamReader(Files.newInputStream(
|
||||
Paths.get(signatureSecretFile)), StandardCharsets.UTF_8)) {
|
||||
StringBuilder secret = new StringBuilder();
|
||||
int c = reader.read();
|
||||
while (c > -1) {
|
||||
secret.append((char) c);
|
||||
c = reader.read();
|
||||
}
|
||||
props.setProperty(AuthenticationFilter.SIGNATURE_SECRET,
|
||||
secret.toString());
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException("Could not read HttpFS signature "
|
||||
+ "secret file: " + signatureSecretFile);
|
||||
}
|
||||
reader.close();
|
||||
props.setProperty(AuthenticationFilter.SIGNATURE_SECRET, secret.toString());
|
||||
} catch (IOException ex) {
|
||||
throw new RuntimeException("Could not read HttpFS signature secret file: " + signatureSecretFile);
|
||||
}
|
||||
setAuthHandlerClass(props);
|
||||
String dtkind = WebHdfsConstants.WEBHDFS_TOKEN_KIND.toString();
|
||||
@ -115,4 +121,12 @@ protected Configuration getProxyuserConfiguration(FilterConfig filterConfig) {
|
||||
return conf;
|
||||
}
|
||||
|
||||
private boolean isRandomSecret(FilterConfig filterConfig) {
|
||||
SignerSecretProvider secretProvider = (SignerSecretProvider) filterConfig
|
||||
.getServletContext().getAttribute(SIGNER_SECRET_PROVIDER_ATTRIBUTE);
|
||||
if (secretProvider == null) {
|
||||
return false;
|
||||
}
|
||||
return secretProvider.getClass() == RandomSignerSecretProvider.class;
|
||||
}
|
||||
}
|
||||
|
@ -157,6 +157,9 @@
|
||||
|
||||
If multiple HttpFS servers are used in a load-balancer/round-robin fashion,
|
||||
they should share the secret file.
|
||||
|
||||
If the secret file specified here does not exist, random secret is
|
||||
generated at startup time.
|
||||
</description>
|
||||
</property>
|
||||
|
||||
|
@ -0,0 +1,58 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.fs.http.server;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
/**
|
||||
* Unlike {@link TestHttpFSServerWebServer}, httpfs-signature.secret doesn't
|
||||
* exist. In this case, a random secret is used.
|
||||
*/
|
||||
public class TestHttpFSServerWebServerWithRandomSecret extends
|
||||
TestHttpFSServerWebServer {
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
File homeDir = GenericTestUtils.getTestDir();
|
||||
File confDir = new File(homeDir, "etc/hadoop");
|
||||
File logsDir = new File(homeDir, "logs");
|
||||
File tempDir = new File(homeDir, "temp");
|
||||
confDir.mkdirs();
|
||||
logsDir.mkdirs();
|
||||
tempDir.mkdirs();
|
||||
|
||||
if (Shell.WINDOWS) {
|
||||
File binDir = new File(homeDir, "bin");
|
||||
binDir.mkdirs();
|
||||
File winutils = Shell.getWinUtilsFile();
|
||||
if (winutils.exists()) {
|
||||
FileUtils.copyFileToDirectory(winutils, binDir);
|
||||
}
|
||||
}
|
||||
|
||||
System.setProperty("hadoop.home.dir", homeDir.getAbsolutePath());
|
||||
System.setProperty("hadoop.log.dir", logsDir.getAbsolutePath());
|
||||
System.setProperty("httpfs.home.dir", homeDir.getAbsolutePath());
|
||||
System.setProperty("httpfs.log.dir", logsDir.getAbsolutePath());
|
||||
System.setProperty("httpfs.config.dir", confDir.getAbsolutePath());
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user