HDFS-12114. Consistent HttpFS property names. Contributed by John Zhuge.

This commit is contained in:
John Zhuge 2017-07-10 17:22:00 -07:00
parent 9144fd9e9b
commit ac0a04a6e1
5 changed files with 18 additions and 33 deletions

View File

@ -46,17 +46,16 @@ public class HttpFSServerWebServer {
private static final String HTTPFS_SITE_XML = "httpfs-site.xml";
// HTTP properties
static final String HTTP_PORT_KEY = "hadoop.httpfs.http.port";
static final String HTTP_PORT_KEY = "httpfs.http.port";
private static final int HTTP_PORT_DEFAULT = 14000;
static final String HTTP_HOST_KEY = "hadoop.httpfs.http.host";
private static final String HTTP_HOST_DEFAULT = "0.0.0.0";
static final String HTTP_HOSTNAME_KEY = "httpfs.http.hostname";
private static final String HTTP_HOSTNAME_DEFAULT = "0.0.0.0";
// SSL properties
static final String SSL_ENABLED_KEY = "httpfs.ssl.enabled";
private static final boolean SSL_ENABLED_DEFAULT = false;
private static final String HTTP_ADMINS_KEY =
"hadoop.httpfs.http.administrators";
private static final String HTTP_ADMINS_KEY = "httpfs.http.administrators";
private static final String NAME = "webhdfs";
private static final String SERVLET_PATH = "/webhdfs";
@ -74,6 +73,8 @@ public class HttpFSServerWebServer {
// Override configuration with deprecated environment variables.
deprecateEnv("HTTPFS_TEMP", conf, HttpServer2.HTTP_TEMP_DIR_KEY,
HTTPFS_SITE_XML);
deprecateEnv("HTTPFS_HTTP_HOSTNAME", conf, HTTP_HOSTNAME_KEY,
HTTPFS_SITE_XML);
deprecateEnv("HTTPFS_HTTP_PORT", conf, HTTP_PORT_KEY,
HTTPFS_SITE_XML);
deprecateEnv("HTTPFS_MAX_THREADS", conf,
@ -95,7 +96,7 @@ public class HttpFSServerWebServer {
SSL_ENABLED_DEFAULT);
scheme = sslEnabled ? HttpServer2.HTTPS_SCHEME : HttpServer2.HTTP_SCHEME;
String host = conf.get(HTTP_HOST_KEY, HTTP_HOST_DEFAULT);
String host = conf.get(HTTP_HOSTNAME_KEY, HTTP_HOSTNAME_DEFAULT);
int port = conf.getInt(HTTP_PORT_KEY, HTTP_PORT_DEFAULT);
URI endpoint = new URI(scheme, null, host, port, null, null, null);

View File

@ -30,17 +30,6 @@ function hdfs_subcommand_httpfs
. "${HADOOP_CONF_DIR}/httpfs-env.sh"
fi
hadoop_deprecate_envvar HTTPFS_CONFIG HADOOP_CONF_DIR
hadoop_deprecate_envvar HTTPFS_LOG HADOOP_LOG_DIR
hadoop_using_envvar HTTPFS_HTTP_HOSTNAME
hadoop_using_envvar HTTPFS_HTTP_PORT
hadoop_using_envvar HTTPFS_MAX_HTTP_HEADER_SIZE
hadoop_using_envvar HTTPFS_MAX_THREADS
hadoop_using_envvar HTTPFS_SSL_ENABLED
hadoop_using_envvar HTTPFS_SSL_KEYSTORE_FILE
hadoop_using_envvar HTTPFS_TEMP
# shellcheck disable=SC2034
HADOOP_SUBCMD_SUPPORTDAEMONIZATION=true
# shellcheck disable=SC2034
@ -53,12 +42,6 @@ function hdfs_subcommand_httpfs
"-Dhttpfs.config.dir=${HTTPFS_CONFIG:-${HADOOP_CONF_DIR}}"
hadoop_add_param HADOOP_OPTS "-Dhttpfs.log.dir" \
"-Dhttpfs.log.dir=${HTTPFS_LOG:-${HADOOP_LOG_DIR}}"
hadoop_add_param HADOOP_OPTS "-Dhttpfs.http.hostname" \
"-Dhttpfs.http.hostname=${HTTPFS_HOST_NAME:-$(hostname -f)}"
if [[ -n "${HTTPFS_SSL_ENABLED}" ]]; then
hadoop_add_param HADOOP_OPTS "-Dhttpfs.ssl.enabled" \
"-Dhttpfs.ssl.enabled=${HTTPFS_SSL_ENABLED}"
fi
if [[ "${HADOOP_DAEMON_MODE}" == "default" ]] ||
[[ "${HADOOP_DAEMON_MODE}" == "start" ]]; then

View File

@ -16,7 +16,7 @@
<configuration>
<property>
<name>hadoop.httpfs.http.port</name>
<name>httpfs.http.port</name>
<value>14000</value>
<description>
The HTTP port for HttpFS REST API.
@ -24,7 +24,7 @@
</property>
<property>
<name>hadoop.httpfs.http.host</name>
<name>httpfs.http.hostname</name>
<value>0.0.0.0</value>
<description>
The bind host for HttpFS REST API.
@ -32,7 +32,7 @@
</property>
<property>
<name>hadoop.httpfs.http.administrators</name>
<name>httpfs.http.administrators</name>
<value></value>
<description>ACL for the admins, this configuration is used to control
who can access the default servlets for HttpFS server. The value
@ -46,7 +46,7 @@
</property>
<property>
<name>hadoop.httpfs.ssl.enabled</name>
<name>httpfs.ssl.enabled</name>
<value>false</value>
<description>
Whether SSL is enabled. Default is false, i.e. disabled.

View File

@ -82,7 +82,7 @@ Enable SSL in `etc/hadoop/httpfs-site.xml`:
```xml
<property>
<name>hadoop.httpfs.ssl.enabled</name>
<name>httpfs.ssl.enabled</name>
<value>true</value>
<description>
Whether SSL is enabled. Default is false, i.e. disabled.
@ -142,13 +142,14 @@ configuration properties instead.
Environment Variable | Configuration Property | Configuration File
----------------------------|------------------------------|--------------------
HTTPFS_TEMP | hadoop.http.temp.dir | httpfs-site.xml
HTTPFS_HTTP_PORT | hadoop.httpfs.http.port | httpfs-site.xml
HTTPFS_HTTP_HOSTNAME | httpfs.http.hostname | httpfs-site.xml
HTTPFS_HTTP_PORT | httpfs.http.port | httpfs-site.xml
HTTPFS_MAX_HTTP_HEADER_SIZE | hadoop.http.max.request.header.size and hadoop.http.max.response.header.size | httpfs-site.xml
HTTPFS_MAX_THREADS | hadoop.http.max.threads | httpfs-site.xml
HTTPFS_SSL_ENABLED | hadoop.httpfs.ssl.enabled | httpfs-site.xml
HTTPFS_SSL_ENABLED | httpfs.ssl.enabled | httpfs-site.xml
HTTPFS_SSL_KEYSTORE_FILE | ssl.server.keystore.location | ssl-server.xml
HTTPFS_SSL_KEYSTORE_PASS | ssl.server.keystore.password | ssl-server.xml
HTTPFS_TEMP | hadoop.http.temp.dir | httpfs-site.xml
HTTP Default Services
---------------------
@ -182,7 +183,7 @@ and `/stacks`, configure the following properties in `httpfs-site.xml`:
</property>
<property>
<name>hadoop.httpfs.http.administrators</name>
<name>httpfs.http.administrators</name>
<value></value>
<description>ACL for the admins, this configuration is used to control
who can access the default servlets for HttpFS server. The value

View File

@ -63,7 +63,7 @@ public static void beforeClass() throws Exception {
@Before
public void setUp() throws Exception {
Configuration conf = new Configuration();
conf.set(HttpFSServerWebServer.HTTP_HOST_KEY, "localhost");
conf.set(HttpFSServerWebServer.HTTP_HOSTNAME_KEY, "localhost");
conf.setInt(HttpFSServerWebServer.HTTP_PORT_KEY, 0);
Configuration sslConf = new Configuration();
webServer = new HttpFSServerWebServer(conf, sslConf);