From 0a74610d1c7c7f183d2b2d0b7a775add53cf6c94 Mon Sep 17 00:00:00 2001 From: Allen Wittenauer Date: Thu, 24 Mar 2016 08:47:00 -0700 Subject: [PATCH] HADOOP-11393. Revert HADOOP_PREFIX, go back to HADOOP_HOME (aw) --- .../hadoop-common/src/main/bin/hadoop | 12 +++--- .../src/main/bin/hadoop-config.sh | 6 ++- .../src/main/bin/hadoop-daemon.sh | 6 +-- .../src/main/bin/hadoop-daemons.sh | 6 +-- .../src/main/bin/hadoop-functions.sh | 37 +++++++++-------- .../src/main/bin/hadoop-layout.sh.example | 14 +++---- .../hadoop-common/src/main/bin/slaves.sh | 6 +-- .../hadoop-common/src/main/bin/start-all.sh | 4 +- .../hadoop-common/src/main/bin/stop-all.sh | 4 +- .../hadoop-common/src/main/conf/hadoop-env.sh | 10 ++--- .../org/apache/hadoop/tracing/TraceUtils.java | 4 +- .../src/site/markdown/ClusterSetup.md | 40 +++++++++---------- .../src/site/markdown/CommandsManual.md | 2 +- .../src/site/markdown/UnixShellGuide.md | 2 +- .../scripts/hadoop-functions_test_helper.bash | 3 +- .../src/test/scripts/hadoop_basic_init.bats | 2 +- .../src/test/scripts/hadoop_bootstrap.bats | 4 +- .../src/test/scripts/hadoop_confdir.bats | 24 +++++------ .../src/test/scripts/hadoop_finalize.bats | 2 +- .../hadoop-kms/src/main/conf/kms-env.sh | 4 +- .../hadoop-kms/src/main/libexec/kms-config.sh | 8 ++-- .../hadoop-kms/src/main/sbin/kms.sh | 4 +- .../src/main/conf/httpfs-env.sh | 4 +- .../src/main/libexec/httpfs-config.sh | 8 ++-- .../src/main/sbin/httpfs.sh | 4 +- .../src/main/native/fuse-dfs/doc/README | 6 +-- .../main/native/fuse-dfs/fuse_dfs_wrapper.sh | 12 +++--- .../src/main/bin/distribute-exclude.sh | 4 +- .../hadoop-hdfs/src/main/bin/hdfs | 4 +- .../hadoop-hdfs/src/main/bin/hdfs-config.sh | 6 +-- .../src/main/bin/refresh-namenodes.sh | 4 +- .../src/main/bin/start-balancer.sh | 4 +- .../hadoop-hdfs/src/main/bin/start-dfs.sh | 4 +- .../src/main/bin/start-secure-dns.sh | 4 +- .../hadoop-hdfs/src/main/bin/stop-balancer.sh | 4 +- .../hadoop-hdfs/src/main/bin/stop-dfs.sh | 4 +- .../src/main/bin/stop-secure-dns.sh | 4 +- .../src/site/markdown/Federation.md | 18 ++++----- .../markdown/HDFSHighAvailabilityWithNFS.md | 4 +- .../markdown/HDFSHighAvailabilityWithQJM.md | 4 +- .../src/site/markdown/HdfsNfsGateway.md | 8 ++-- .../apache/hadoop/tracing/TestTraceAdmin.java | 2 +- .../TestTracingShortCircuitLocalRead.java | 4 +- hadoop-mapreduce-project/bin/mapred | 4 +- hadoop-mapreduce-project/bin/mapred-config.sh | 6 +-- .../bin/mr-jobhistory-daemon.sh | 4 +- .../apache/hadoop/mapred/pipes/Submitter.java | 2 +- .../java/org/apache/hadoop/fs/DFSCIOTest.java | 2 +- .../apache/hadoop/mapred/ReliabilityTest.java | 2 +- .../hadoop/tools/HadoopArchiveLogs.java | 4 +- .../hadoop/tools/TestHadoopArchiveLogs.java | 4 +- .../hadoop/contrib/utils/join/README.txt | 2 +- .../native/pipes/debug/pipes-default-script | 5 ++- .../hadoop-sls/src/main/bin/rumen2sls.sh | 4 +- .../hadoop-sls/src/main/bin/slsrun.sh | 8 ++-- .../hadoop/streaming/DumpTypedBytes.java | 2 +- .../hadoop/streaming/HadoopStreaming.java | 2 +- .../hadoop/streaming/LoadTypedBytes.java | 2 +- .../apache/hadoop/streaming/StreamJob.java | 16 ++++---- .../hadoop-yarn/bin/start-yarn.sh | 4 +- .../hadoop-yarn/bin/stop-yarn.sh | 4 +- hadoop-yarn-project/hadoop-yarn/bin/yarn | 4 +- .../hadoop-yarn/bin/yarn-config.sh | 6 +-- .../hadoop-yarn/bin/yarn-daemon.sh | 4 +- .../hadoop-yarn/bin/yarn-daemons.sh | 4 +- .../TestDockerContainerExecutorWithMocks.java | 2 +- .../markdown/DockerContainerExecutor.md.vm | 2 +- 67 files changed, 211 insertions(+), 208 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop b/hadoop-common-project/hadoop-common/src/main/bin/hadoop index 46eaf27368..07569872e7 100755 --- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop +++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop @@ -47,8 +47,8 @@ function hadoop_usage # This script runs the hadoop core commands. # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" @@ -84,9 +84,9 @@ case ${COMMAND} in # shellcheck disable=SC2086 exec "${HADOOP_HDFS_HOME}/bin/hdfs" \ --config "${HADOOP_CONF_DIR}" "${COMMAND}" "$@" - elif [[ -f "${HADOOP_PREFIX}/bin/hdfs" ]]; then + elif [[ -f "${HADOOP_HOME}/bin/hdfs" ]]; then # shellcheck disable=SC2086 - exec "${HADOOP_PREFIX}/bin/hdfs" \ + exec "${HADOOP_HOME}/bin/hdfs" \ --config "${HADOOP_CONF_DIR}" "${COMMAND}" "$@" else hadoop_error "HADOOP_HDFS_HOME not found!" @@ -104,8 +104,8 @@ case ${COMMAND} in if [[ -f "${HADOOP_MAPRED_HOME}/bin/mapred" ]]; then exec "${HADOOP_MAPRED_HOME}/bin/mapred" \ --config "${HADOOP_CONF_DIR}" "${COMMAND}" "$@" - elif [[ -f "${HADOOP_PREFIX}/bin/mapred" ]]; then - exec "${HADOOP_PREFIX}/bin/mapred" \ + elif [[ -f "${HADOOP_HOME}/bin/mapred" ]]; then + exec "${HADOOP_HOME}/bin/mapred" \ --config "${HADOOP_CONF_DIR}" "${COMMAND}" "$@" else hadoop_error "HADOOP_MAPRED_HOME not found!" diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh index 0b52895aec..fd2c83e236 100755 --- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh +++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh @@ -63,6 +63,8 @@ else exit 1 fi +hadoop_deprecate_envvar HADOOP_PREFIX HADOOP_HOME + # allow overrides of the above and pre-defines of the below if [[ -n "${HADOOP_COMMON_HOME}" ]] && [[ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-layout.sh" ]]; then @@ -128,8 +130,8 @@ fi hadoop_shellprofiles_init # get the native libs in there pretty quick -hadoop_add_javalibpath "${HADOOP_PREFIX}/build/native" -hadoop_add_javalibpath "${HADOOP_PREFIX}/${HADOOP_COMMON_LIB_NATIVE_DIR}" +hadoop_add_javalibpath "${HADOOP_HOME}/build/native" +hadoop_add_javalibpath "${HADOOP_HOME}/${HADOOP_COMMON_LIB_NATIVE_DIR}" hadoop_shellprofiles_nativelib diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh index 5f094d631b..8118f54145 100755 --- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh +++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh @@ -21,8 +21,8 @@ function hadoop_usage } # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) @@ -47,7 +47,7 @@ daemonmode=$1 shift if [[ -z "${HADOOP_HDFS_HOME}" ]]; then - hdfsscript="${HADOOP_PREFIX}/bin/hdfs" + hdfsscript="${HADOOP_HOME}/bin/hdfs" else hdfsscript="${HADOOP_HDFS_HOME}/bin/hdfs" fi diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemons.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemons.sh index 604eb7eb33..ae1e324823 100755 --- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemons.sh +++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemons.sh @@ -27,8 +27,8 @@ this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi @@ -51,7 +51,7 @@ daemonmode=$1 shift if [[ -z "${HADOOP_HDFS_HOME}" ]]; then - hdfsscript="${HADOOP_PREFIX}/bin/hdfs" + hdfsscript="${HADOOP_HOME}/bin/hdfs" else hdfsscript="${HADOOP_HDFS_HOME}/bin/hdfs" fi diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh index 7f293b6475..6c4c3459f3 100755 --- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh +++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh @@ -278,7 +278,7 @@ function hadoop_bootstrap # By now, HADOOP_LIBEXEC_DIR should have been defined upstream # We can piggyback off of that to figure out where the default # HADOOP_FREFIX should be. This allows us to run without - # HADOOP_PREFIX ever being defined by a human! As a consequence + # HADOOP_HOME ever being defined by a human! As a consequence # HADOOP_LIBEXEC_DIR now becomes perhaps the single most powerful # env var within Hadoop. if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then @@ -286,8 +286,8 @@ function hadoop_bootstrap exit 1 fi HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P) - HADOOP_PREFIX=${HADOOP_PREFIX:-$HADOOP_DEFAULT_PREFIX} - export HADOOP_PREFIX + HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DEFAULT_PREFIX} + export HADOOP_HOME # # short-cuts. vendors may redefine these as well, preferably @@ -302,7 +302,7 @@ function hadoop_bootstrap YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"} MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"} MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"} - HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_PREFIX}} + HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_HOME}} HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"} HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"} @@ -326,12 +326,12 @@ function hadoop_find_confdir # An attempt at compatibility with some Hadoop 1.x # installs. - if [[ -e "${HADOOP_PREFIX}/conf/hadoop-env.sh" ]]; then + if [[ -e "${HADOOP_HOME}/conf/hadoop-env.sh" ]]; then conf_dir="conf" else conf_dir="etc/hadoop" fi - export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_PREFIX}/${conf_dir}}" + export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_HOME}/${conf_dir}}" hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}" } @@ -524,8 +524,8 @@ function hadoop_basic_init hadoop_debug "Initialize CLASSPATH" if [[ -z "${HADOOP_COMMON_HOME}" ]] && - [[ -d "${HADOOP_PREFIX}/${HADOOP_COMMON_DIR}" ]]; then - export HADOOP_COMMON_HOME="${HADOOP_PREFIX}" + [[ -d "${HADOOP_HOME}/${HADOOP_COMMON_DIR}" ]]; then + export HADOOP_COMMON_HOME="${HADOOP_HOME}" fi # default policy file for service-level authorization @@ -533,20 +533,20 @@ function hadoop_basic_init # define HADOOP_HDFS_HOME if [[ -z "${HADOOP_HDFS_HOME}" ]] && - [[ -d "${HADOOP_PREFIX}/${HDFS_DIR}" ]]; then - export HADOOP_HDFS_HOME="${HADOOP_PREFIX}" + [[ -d "${HADOOP_HOME}/${HDFS_DIR}" ]]; then + export HADOOP_HDFS_HOME="${HADOOP_HOME}" fi # define HADOOP_YARN_HOME if [[ -z "${HADOOP_YARN_HOME}" ]] && - [[ -d "${HADOOP_PREFIX}/${YARN_DIR}" ]]; then - export HADOOP_YARN_HOME="${HADOOP_PREFIX}" + [[ -d "${HADOOP_HOME}/${YARN_DIR}" ]]; then + export HADOOP_YARN_HOME="${HADOOP_HOME}" fi # define HADOOP_MAPRED_HOME if [[ -z "${HADOOP_MAPRED_HOME}" ]] && - [[ -d "${HADOOP_PREFIX}/${MAPRED_DIR}" ]]; then - export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}" + [[ -d "${HADOOP_HOME}/${MAPRED_DIR}" ]]; then + export HADOOP_MAPRED_HOME="${HADOOP_HOME}" fi if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then @@ -573,7 +573,7 @@ function hadoop_basic_init # let's define it as 'hadoop' HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER} HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-hadoop} - HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"} + HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_HOME}/logs"} HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log} HADOOP_LOGLEVEL=${HADOOP_LOGLEVEL:-INFO} HADOOP_NICENESS=${HADOOP_NICENESS:-0} @@ -1219,7 +1219,6 @@ function hadoop_finalize_hadoop_opts hadoop_translate_cygwin_path HADOOP_LOG_DIR hadoop_add_param HADOOP_OPTS hadoop.log.dir "-Dhadoop.log.dir=${HADOOP_LOG_DIR}" hadoop_add_param HADOOP_OPTS hadoop.log.file "-Dhadoop.log.file=${HADOOP_LOGFILE}" - HADOOP_HOME=${HADOOP_PREFIX} hadoop_translate_cygwin_path HADOOP_HOME export HADOOP_HOME hadoop_add_param HADOOP_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}" @@ -1252,11 +1251,11 @@ function hadoop_finalize_catalina_opts local prefix=${HADOOP_CATALINA_PREFIX} - hadoop_add_param CATALINA_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_PREFIX}" + hadoop_add_param CATALINA_OPTS hadoop.home.dir "-Dhadoop.home.dir=${HADOOP_HOME}" if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then hadoop_add_param CATALINA_OPTS java.library.path "-Djava.library.path=${JAVA_LIBRARY_PATH}" fi - hadoop_add_param CATALINA_OPTS "${prefix}.home.dir" "-D${prefix}.home.dir=${HADOOP_PREFIX}" + hadoop_add_param CATALINA_OPTS "${prefix}.home.dir" "-D${prefix}.home.dir=${HADOOP_HOME}" hadoop_add_param CATALINA_OPTS "${prefix}.config.dir" "-D${prefix}.config.dir=${HADOOP_CATALINA_CONFIG}" hadoop_add_param CATALINA_OPTS "${prefix}.log.dir" "-D${prefix}.log.dir=${HADOOP_CATALINA_LOG}" hadoop_add_param CATALINA_OPTS "${prefix}.temp.dir" "-D${prefix}.temp.dir=${HADOOP_CATALINA_TEMP}" @@ -1282,7 +1281,7 @@ function hadoop_finalize hadoop_finalize_hadoop_heap hadoop_finalize_hadoop_opts - hadoop_translate_cygwin_path HADOOP_PREFIX + hadoop_translate_cygwin_path HADOOP_HOME hadoop_translate_cygwin_path HADOOP_CONF_DIR hadoop_translate_cygwin_path HADOOP_COMMON_HOME hadoop_translate_cygwin_path HADOOP_HDFS_HOME diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-layout.sh.example b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-layout.sh.example index faa431740c..efba10cc1e 100755 --- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-layout.sh.example +++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-layout.sh.example @@ -26,8 +26,8 @@ ## ## If you move HADOOP_LIBEXEC_DIR from some location that ## isn't bin/../libexec, you MUST define either HADOOP_LIBEXEC_DIR -## or have HADOOP_PREFIX/libexec/hadoop-config.sh and -## HADOOP_PREFIX/libexec/hadoop-layout.sh (this file) exist. +## or have HADOOP_HOME/libexec/hadoop-config.sh and +## HADOOP_HOME/libexec/hadoop-layout.sh (this file) exist. ## NOTE: ## @@ -44,7 +44,7 @@ #### # Default location for the common/core Hadoop project -# export HADOOP_COMMON_HOME=${HADOOP_PREFIX} +# export HADOOP_COMMON_HOME=${HADOOP_HOME} # Relative locations where components under HADOOP_COMMON_HOME are located # export HADOOP_COMMON_DIR="share/hadoop/common" @@ -56,7 +56,7 @@ #### # Default location for the HDFS subproject -# export HADOOP_HDFS_HOME=${HADOOP_PREFIX} +# export HADOOP_HDFS_HOME=${HADOOP_HOME} # Relative locations where components under HADOOP_HDFS_HOME are located # export HDFS_DIR="share/hadoop/hdfs" @@ -67,7 +67,7 @@ #### # Default location for the YARN subproject -# export HADOOP_YARN_HOME=${HADOOP_PREFIX} +# export HADOOP_YARN_HOME=${HADOOP_HOME} # Relative locations where components under HADOOP_YARN_HOME are located # export YARN_DIR="share/hadoop/yarn" @@ -78,7 +78,7 @@ #### # Default location for the MapReduce subproject -# export HADOOP_MAPRED_HOME=${HADOOP_PREFIX} +# export HADOOP_MAPRED_HOME=${HADOOP_HOME} # Relative locations where components under HADOOP_MAPRED_HOME are located # export MAPRED_DIR="share/hadoop/mapreduce" @@ -92,6 +92,6 @@ # note that this path only gets added for certain commands and not # part of the general classpath unless HADOOP_OPTIONAL_TOOLS is used # to configure them in -# export HADOOP_TOOLS_HOME=${HADOOP_PREFIX} +# export HADOOP_TOOLS_HOME=${HADOOP_HOME} # export HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"} # export HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"} diff --git a/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh b/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh index 5859da03b2..34bf0ebb2b 100755 --- a/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh +++ b/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh @@ -22,7 +22,7 @@ # # HADOOP_SLAVES File naming remote hosts. # Default is ${HADOOP_CONF_DIR}/slaves. -# HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_PREFIX}/conf. +# HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf. # HADOOP_SLAVE_SLEEP Seconds to sleep between spawning remote commands. # HADOOP_SSH_OPTS Options passed to ssh when running remote commands. ## @@ -33,8 +33,8 @@ function hadoop_usage } # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) diff --git a/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh b/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh index 845ca378cc..142064209d 100755 --- a/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh +++ b/hadoop-common-project/hadoop-common/src/main/bin/start-all.sh @@ -21,8 +21,8 @@ exit 1 # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) diff --git a/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh b/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh index df7ae8d7c5..ee1f6eb569 100755 --- a/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh +++ b/hadoop-common-project/hadoop-common/src/main/bin/stop-all.sh @@ -22,8 +22,8 @@ echo "This script is deprecated. Use stop-dfs.sh and stop-yarn.sh instead." exit 1 # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) diff --git a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh index 3c554aa8d0..3f19e459c4 100644 --- a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh +++ b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh @@ -55,14 +55,14 @@ # Location of Hadoop. By default, Hadoop will attempt to determine # this location based upon its execution path. -# export HADOOP_PREFIX= +# export HADOOP_HOME= # Location of Hadoop's configuration information. i.e., where this # file is probably living. Many sites will also set this in the # same location where JAVA_HOME is defined. If this is not defined # Hadoop will attempt to locate it based upon its execution # path. -# export HADOOP_CONF_DIR=$HADOOP_PREFIX/etc/hadoop +# export HADOOP_CONF_DIR=${HADOOP_HOME}/etc/hadoop # The maximum amount of heap to use (Java -Xmx). If no unit # is provided, it will be converted to MB. Daemons will @@ -186,10 +186,10 @@ esac # non-secure) # -# Where (primarily) daemon log files are stored. # $HADOOP_PREFIX/logs -# by default. +# Where (primarily) daemon log files are stored. +# ${HADOOP_HOME}/logs by default. # Java property: hadoop.log.dir -# export HADOOP_LOG_DIR=${HADOOP_PREFIX}/logs +# export HADOOP_LOG_DIR=${HADOOP_HOME}/logs # A string representing this instance of hadoop. $USER by default. # This is used in writing log and pid files, so keep that in mind! diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java index 09acb35bcd..0ae6d03933 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java @@ -32,7 +32,7 @@ @InterfaceAudience.Private public class TraceUtils { private static List EMPTY = Collections.emptyList(); - static final String DEFAULT_HADOOP_PREFIX = "hadoop.htrace."; + static final String DEFAULT_HADOOP_TRACE_PREFIX = "hadoop.htrace."; public static HTraceConfiguration wrapHadoopConf(final String prefix, final Configuration conf) { @@ -52,7 +52,7 @@ public String get(String key) { if (ret != null) { return ret; } - return getInternal(DEFAULT_HADOOP_PREFIX + key); + return getInternal(DEFAULT_HADOOP_TRACE_PREFIX + key); } @Override diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/ClusterSetup.md b/hadoop-common-project/hadoop-common/src/site/markdown/ClusterSetup.md index bf9419a7ae..d2479e7611 100644 --- a/hadoop-common-project/hadoop-common/src/site/markdown/ClusterSetup.md +++ b/hadoop-common-project/hadoop-common/src/site/markdown/ClusterSetup.md @@ -86,10 +86,10 @@ Other useful configuration parameters that you can customize include: In most cases, you should specify the `HADOOP_PID_DIR` and `HADOOP_LOG_DIR` directories such that they can only be written to by the users that are going to run the hadoop daemons. Otherwise there is the potential for a symlink attack. -It is also traditional to configure `HADOOP_PREFIX` in the system-wide shell environment configuration. For example, a simple script inside `/etc/profile.d`: +It is also traditional to configure `HADOOP_HOME` in the system-wide shell environment configuration. For example, a simple script inside `/etc/profile.d`: - HADOOP_PREFIX=/path/to/hadoop - export HADOOP_PREFIX + HADOOP_HOME=/path/to/hadoop + export HADOOP_HOME | Daemon | Environment Variable | |:---- |:---- | @@ -243,73 +243,73 @@ To start a Hadoop cluster you will need to start both the HDFS and YARN cluster. The first time you bring up HDFS, it must be formatted. Format a new distributed filesystem as *hdfs*: - [hdfs]$ $HADOOP_PREFIX/bin/hdfs namenode -format + [hdfs]$ $HADOOP_HOME/bin/hdfs namenode -format Start the HDFS NameNode with the following command on the designated node as *hdfs*: - [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start namenode + [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start namenode Start a HDFS DataNode with the following command on each designated node as *hdfs*: - [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start datanode + [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start datanode If `etc/hadoop/slaves` and ssh trusted access is configured (see [Single Node Setup](./SingleCluster.html)), all of the HDFS processes can be started with a utility script. As *hdfs*: - [hdfs]$ $HADOOP_PREFIX/sbin/start-dfs.sh + [hdfs]$ $HADOOP_HOME/sbin/start-dfs.sh Start the YARN with the following command, run on the designated ResourceManager as *yarn*: - [yarn]$ $HADOOP_PREFIX/bin/yarn --daemon start resourcemanager + [yarn]$ $HADOOP_HOME/bin/yarn --daemon start resourcemanager Run a script to start a NodeManager on each designated host as *yarn*: - [yarn]$ $HADOOP_PREFIX/bin/yarn --daemon start nodemanager + [yarn]$ $HADOOP_HOME/bin/yarn --daemon start nodemanager Start a standalone WebAppProxy server. Run on the WebAppProxy server as *yarn*. If multiple servers are used with load balancing it should be run on each of them: - [yarn]$ $HADOOP_PREFIX/bin/yarn --daemon start proxyserver + [yarn]$ $HADOOP_HOME/bin/yarn --daemon start proxyserver If `etc/hadoop/slaves` and ssh trusted access is configured (see [Single Node Setup](./SingleCluster.html)), all of the YARN processes can be started with a utility script. As *yarn*: - [yarn]$ $HADOOP_PREFIX/sbin/start-yarn.sh + [yarn]$ $HADOOP_HOME/sbin/start-yarn.sh Start the MapReduce JobHistory Server with the following command, run on the designated server as *mapred*: - [mapred]$ $HADOOP_PREFIX/bin/mapred --daemon start historyserver + [mapred]$ $HADOOP_HOME/bin/mapred --daemon start historyserver ### Hadoop Shutdown Stop the NameNode with the following command, run on the designated NameNode as *hdfs*: - [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon stop namenode + [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon stop namenode Run a script to stop a DataNode as *hdfs*: - [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon stop datanode + [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon stop datanode If `etc/hadoop/slaves` and ssh trusted access is configured (see [Single Node Setup](./SingleCluster.html)), all of the HDFS processes may be stopped with a utility script. As *hdfs*: - [hdfs]$ $HADOOP_PREFIX/sbin/stop-dfs.sh + [hdfs]$ $HADOOP_HOME/sbin/stop-dfs.sh Stop the ResourceManager with the following command, run on the designated ResourceManager as *yarn*: - [yarn]$ $HADOOP_PREFIX/bin/yarn --daemon stop resourcemanager + [yarn]$ $HADOOP_HOME/bin/yarn --daemon stop resourcemanager Run a script to stop a NodeManager on a slave as *yarn*: - [yarn]$ $HADOOP_PREFIX/bin/yarn --daemon stop nodemanager + [yarn]$ $HADOOP_HOME/bin/yarn --daemon stop nodemanager If `etc/hadoop/slaves` and ssh trusted access is configured (see [Single Node Setup](./SingleCluster.html)), all of the YARN processes can be stopped with a utility script. As *yarn*: - [yarn]$ $HADOOP_PREFIX/sbin/stop-yarn.sh + [yarn]$ $HADOOP_HOME/sbin/stop-yarn.sh Stop the WebAppProxy server. Run on the WebAppProxy server as *yarn*. If multiple servers are used with load balancing it should be run on each of them: - [yarn]$ $HADOOP_PREFIX/bin/yarn stop proxyserver + [yarn]$ $HADOOP_HOME/bin/yarn stop proxyserver Stop the MapReduce JobHistory Server with the following command, run on the designated server as *mapred*: - [mapred]$ $HADOOP_PREFIX/bin/mapred --daemon stop historyserver + [mapred]$ $HADOOP_HOME/bin/mapred --daemon stop historyserver Web Interfaces -------------- diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md b/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md index 59ea19896b..365a8448e6 100644 --- a/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md +++ b/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md @@ -39,7 +39,7 @@ All of the shell commands will accept a common set of options. For some commands | SHELL\_OPTION | Description | |:---- |:---- | | `--buildpaths` | Enables developer versions of jars. | -| `--config confdir` | Overwrites the default Configuration directory. Default is `$HADOOP_PREFIX/etc/hadoop`. | +| `--config confdir` | Overwrites the default Configuration directory. Default is `$HADOOP_HOME/etc/hadoop`. | | `--daemon mode` | If the command supports daemonization (e.g., `hdfs namenode`), execute in the appropriate mode. Supported modes are `start` to start the process in daemon mode, `stop` to stop the process, and `status` to determine the active status of the process. `status` will return an [LSB-compliant](http://refspecs.linuxbase.org/LSB_3.0.0/LSB-generic/LSB-generic/iniscrptact.html) result code. If no option is provided, commands that support daemonization will run in the foreground. For commands that do not support daemonization, this option is ignored. | | `--debug` | Enables shell level configuration debugging information | | `--help` | Shell script usage information. | diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md b/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md index ffab5a0a93..caa3aa761a 100644 --- a/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md +++ b/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md @@ -83,7 +83,7 @@ Apache Hadoop allows for third parties to easily add new features through a vari Core to this functionality is the concept of a shell profile. Shell profiles are shell snippets that can do things such as add jars to the classpath, configure Java system properties and more. -Shell profiles may be installed in either `${HADOOP_CONF_DIR}/shellprofile.d` or `${HADOOP_PREFIX}/libexec/shellprofile.d`. Shell profiles in the `libexec` directory are part of the base installation and cannot be overriden by the user. Shell profiles in the configuration directory may be ignored if the end user changes the configuration directory at runtime. +Shell profiles may be installed in either `${HADOOP_CONF_DIR}/shellprofile.d` or `${HADOOP_HOME}/libexec/shellprofile.d`. Shell profiles in the `libexec` directory are part of the base installation and cannot be overriden by the user. Shell profiles in the configuration directory may be ignored if the end user changes the configuration directory at runtime. An example of a shell profile is in the libexec directory. diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash index f7183455e9..be2d7f58b2 100755 --- a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash +++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash @@ -27,6 +27,7 @@ setup() { # shellcheck disable=SC2034 HADOOP_SHELL_SCRIPT_DEBUG=true unset HADOOP_CONF_DIR + # we unset both of these for bw compat unset HADOOP_HOME unset HADOOP_PREFIX @@ -53,4 +54,4 @@ strstr() { else echo false fi -} \ No newline at end of file +} diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats index ae20248dad..79ede4273f 100644 --- a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats +++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats @@ -45,7 +45,7 @@ basicinitsetup () { unset ${j} done - HADOOP_PREFIX=${TMP} + HADOOP_HOME=${TMP} } check_var_values () { diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats index 9114c70733..de4edd493a 100644 --- a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats +++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats @@ -22,7 +22,7 @@ load hadoop-functions_test_helper } @test "hadoop_bootstrap (libexec)" { - unset HADOOP_PREFIX + unset HADOOP_HOME unset HADOOP_COMMON_DIR unset HADOOP_COMMON_LIB_JARS_DIR unset HDFS_DIR @@ -39,7 +39,7 @@ load hadoop-functions_test_helper hadoop_bootstrap # all of these should be set - [ -n ${HADOOP_PREFIX} ] + [ -n ${HADOOP_HOME} ] [ -n ${HADOOP_COMMON_DIR} ] [ -n ${HADOOP_COMMON_LIB_JARS_DIR} ] [ -n ${HDFS_DIR} ] diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats index 3e42da936c..1f0c706ca7 100644 --- a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats +++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats @@ -16,10 +16,10 @@ load hadoop-functions_test_helper create_fake_dirs () { - HADOOP_PREFIX=${TMP} + HADOOP_HOME=${TMP} for j in conf etc/hadoop; do - mkdir -p "${HADOOP_PREFIX}/${j}" - echo "unittest=${j}" > "${HADOOP_PREFIX}/${j}/hadoop-env.sh" + mkdir -p "${HADOOP_HOME}/${j}" + echo "unittest=${j}" > "${HADOOP_HOME}/${j}/hadoop-env.sh" done } @@ -32,27 +32,27 @@ create_fake_dirs () { @test "hadoop_find_confdir (bw compat: conf)" { create_fake_dirs hadoop_find_confdir - echo ">${HADOOP_CONF_DIR}< >${HADOOP_PREFIX}/conf<" - [ "${HADOOP_CONF_DIR}" = ${HADOOP_PREFIX}/conf ] + echo ">${HADOOP_CONF_DIR}< >${HADOOP_HOME}/conf<" + [ "${HADOOP_CONF_DIR}" = ${HADOOP_HOME}/conf ] } @test "hadoop_find_confdir (etc/hadoop)" { create_fake_dirs - rm -rf "${HADOOP_PREFIX}/conf" + rm -rf "${HADOOP_HOME}/conf" hadoop_find_confdir - [ "${HADOOP_CONF_DIR}" = ${HADOOP_PREFIX}/etc/hadoop ] + [ "${HADOOP_CONF_DIR}" = ${HADOOP_HOME}/etc/hadoop ] } @test "hadoop_verify_confdir (negative) " { create_fake_dirs - HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf + HADOOP_CONF_DIR=${HADOOP_HOME}/conf run hadoop_verify_confdir [ -n "${output}" ] } @test "hadoop_verify_confdir (positive) " { create_fake_dirs - HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf + HADOOP_CONF_DIR=${HADOOP_HOME}/conf touch "${HADOOP_CONF_DIR}/log4j.properties" run hadoop_verify_confdir [ -z "${output}" ] @@ -60,7 +60,7 @@ create_fake_dirs () { @test "hadoop_exec_hadoopenv (positive) " { create_fake_dirs - HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf + HADOOP_CONF_DIR=${HADOOP_HOME}/conf hadoop_exec_hadoopenv [ -n "${HADOOP_ENV_PROCESSED}" ] [ "${unittest}" = conf ] @@ -68,7 +68,7 @@ create_fake_dirs () { @test "hadoop_exec_hadoopenv (negative) " { create_fake_dirs - HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf + HADOOP_CONF_DIR=${HADOOP_HOME}/conf HADOOP_ENV_PROCESSED=true hadoop_exec_hadoopenv [ -z "${unittest}" ] @@ -76,7 +76,7 @@ create_fake_dirs () { @test "hadoop_exec_userfuncs" { create_fake_dirs - HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf + HADOOP_CONF_DIR=${HADOOP_HOME}/conf echo "unittest=userfunc" > "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" hadoop_exec_userfuncs [ "${unittest}" = "userfunc" ] diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats index 668c115825..b9339f3c56 100644 --- a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats +++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats @@ -100,7 +100,7 @@ load hadoop-functions_test_helper hadoop_finalize_hadoop_heap () { true; } hadoop_finalize_hadoop_opts () { true; } hadoop_translate_cygwin_path () { - if [ $1 = HADOOP_PREFIX ]; then + if [ $1 = HADOOP_HOME ]; then testvar=prefix; fi } diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh index c3bc772b26..7044fa8670 100644 --- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh +++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh @@ -20,7 +20,7 @@ # KMS temporary directory # -# export KMS_TEMP=${HADOOP_PREFIX}/temp +# export KMS_TEMP=${HADOOP_HOME}/temp # The HTTP port used by KMS # @@ -59,7 +59,7 @@ # # Location of tomcat # -# export KMS_CATALINA_HOME=${HADOOP_PREFIX}/share/hadoop/kms/tomcat +# export KMS_CATALINA_HOME=${HADOOP_HOME}/share/hadoop/kms/tomcat # Java System properties for KMS should be specified in this variable. # The java.library.path and hadoop.home.dir properties are automatically diff --git a/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh b/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh index c88aa87962..5e1ffa40c9 100644 --- a/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh +++ b/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh @@ -28,7 +28,7 @@ function hadoop_subproject_init export HADOOP_CATALINA_PREFIX=kms - export HADOOP_CATALINA_TEMP="${KMS_TEMP:-${HADOOP_PREFIX}/temp}" + export HADOOP_CATALINA_TEMP="${KMS_TEMP:-${HADOOP_HOME}/temp}" hadoop_deprecate_envvar KMS_CONFIG HADOOP_CONF_DIR @@ -49,7 +49,7 @@ function hadoop_subproject_init # shellcheck disable=SC2086 export KMS_SSL_TRUSTSTORE_PASS=${KMS_SSL_TRUSTSTORE_PASS:-"$(echo ${CATALINA_OPTS} | grep -o 'trustStorePassword=[^ ]*' | cut -f2 -d= )"} - export CATALINA_BASE="${CATALINA_BASE:-${HADOOP_PREFIX}/share/hadoop/kms/tomcat}" + export CATALINA_BASE="${CATALINA_BASE:-${HADOOP_HOME}/share/hadoop/kms/tomcat}" export HADOOP_CATALINA_HOME="${KMS_CATALINA_HOME:-${CATALINA_BASE}}" export CATALINA_OUT="${CATALINA_OUT:-${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-kms-${HOSTNAME}.out}" @@ -69,8 +69,8 @@ if [[ -n "${HADOOP_COMMON_HOME}" ]] && . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" -elif [[ -e "${HADOOP_PREFIX}/libexec/hadoop-config.sh" ]]; then - . "${HADOOP_PREFIX}/libexec/hadoop-config.sh" +elif [[ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]]; then + . "${HADOOP_HOME}/libexec/hadoop-config.sh" else echo "ERROR: Hadoop common not found." 2>&1 exit 1 diff --git a/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh b/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh index 4ed4725aee..6708cd917d 100755 --- a/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh +++ b/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh @@ -30,8 +30,8 @@ function hadoop_usage } # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh index a4edef625f..f01245372f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh @@ -20,7 +20,7 @@ # HTTPFS temporary directory # -# export HTTPFS_TEMP=${HADOOP_PREFIX}/temp +# export HTTPFS_TEMP=${HADOOP_HOME}/temp # The HTTP port used by HTTPFS # @@ -53,7 +53,7 @@ # # Location of tomcat # -# export HTTPFS_CATALINA_HOME=${HADOOP_PREFIX}/share/hadoop/httpfs/tomcat +# export HTTPFS_CATALINA_HOME=${HADOOP_HOME}/share/hadoop/httpfs/tomcat # Java System properties for HTTPFS should be specified in this variable. # The java.library.path and hadoop.home.dir properties are automatically diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh index 767bd6e70d..ba4b4068df 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh @@ -28,7 +28,7 @@ function hadoop_subproject_init export HADOOP_CATALINA_PREFIX=httpfs - export HADOOP_CATALINA_TEMP="${HTTPFS_TEMP:-${HADOOP_PREFIX}/temp}" + export HADOOP_CATALINA_TEMP="${HTTPFS_TEMP:-${HADOOP_HOME}/temp}" hadoop_deprecate_envvar HTTPFS_CONFIG HADOOP_CONF_DIR @@ -47,7 +47,7 @@ function hadoop_subproject_init export HADOOP_CATALINA_SSL_KEYSTORE_FILE="${HTTPFS_SSL_KEYSTORE_FILE:-${HOME}/.keystore}" - export CATALINA_BASE="${CATALINA_BASE:-${HADOOP_PREFIX}/share/hadoop/httpfs/tomcat}" + export CATALINA_BASE="${CATALINA_BASE:-${HADOOP_HOME}/share/hadoop/httpfs/tomcat}" export HADOOP_CATALINA_HOME="${HTTPFS_CATALINA_HOME:-${CATALINA_BASE}}" export CATALINA_OUT="${CATALINA_OUT:-${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-httpfs-${HOSTNAME}.out}" @@ -67,8 +67,8 @@ if [[ -n "${HADOOP_COMMON_HOME}" ]] && . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" -elif [[ -e "${HADOOP_PREFIX}/libexec/hadoop-config.sh" ]]; then - . "${HADOOP_PREFIX}/libexec/hadoop-config.sh" +elif [[ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]]; then + . "${HADOOP_HOME}/libexec/hadoop-config.sh" else echo "ERROR: Hadoop common not found." 2>&1 exit 1 diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh index 18c1af0a38..3e7cdf8d69 100755 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh @@ -30,8 +30,8 @@ function hadoop_usage } # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/doc/README b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/doc/README index 672265e1a7..e8cc0e509f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/doc/README +++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/doc/README @@ -35,9 +35,9 @@ Requirements BUILDING fuse-dfs executable can be built by setting `require.fuse` option to true using Maven. For example: - in HADOOP_PREFIX: `mvn package -Pnative -Drequire.fuse=true -DskipTests -Dmaven.javadoc.skip=true` + in HADOOP_HOME: `mvn package -Pnative -Drequire.fuse=true -DskipTests -Dmaven.javadoc.skip=true` - The executable `fuse_dfs` will be located at HADOOP_PREFIX/hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs/ + The executable `fuse_dfs` will be located at HADOOP_HOME/hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs/ Common build problems include not finding the libjvm.so in JAVA_HOME/jre/lib/OS_ARCH/server or not finding fuse in FUSE_HOME or /usr/local. @@ -109,7 +109,7 @@ NOTE - you cannot export this with a FUSE module built into the kernel RECOMMENDATIONS -1. From /bin, `ln -s HADOOP_PREFIX/hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs/fuse_dfs* .` +1. From /bin, `ln -s HADOOP_HOME/hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs/fuse_dfs* .` 2. Always start with debug on so you can see if you are missing a classpath or something like that. diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/fuse_dfs_wrapper.sh b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/fuse_dfs_wrapper.sh index 26dfd19005..c52c5f9606 100755 --- a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/fuse_dfs_wrapper.sh +++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/fuse-dfs/fuse_dfs_wrapper.sh @@ -16,12 +16,12 @@ # limitations under the License. # -if [ "$HADOOP_PREFIX" = "" ]; then - echo "HADOOP_PREFIX is empty. Set it to the root directory of Hadoop source code" +if [ "$HADOOP_HOME" = "" ]; then + echo "HADOOP_HOME is empty. Set it to the root directory of Hadoop source code" exit 1 fi -export FUSEDFS_PATH="$HADOOP_PREFIX/hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs" -export LIBHDFS_PATH="$HADOOP_PREFIX/hadoop-hdfs-project/hadoop-hdfs-native-client/target/usr/local/lib" +export FUSEDFS_PATH="$HADOOP_HOME/hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs" +export LIBHDFS_PATH="$HADOOP_HOME/hadoop-hdfs-project/hadoop-hdfs-native-client/target/usr/local/lib" if [ "$OS_ARCH" = "" ]; then export OS_ARCH=amd64 @@ -38,12 +38,12 @@ fi while IFS= read -r -d '' file do export CLASSPATH=$CLASSPATH:$file -done < <(find "$HADOOP_PREFIX/hadoop-client" -name "*.jar" -print0) +done < <(find "$HADOOP_HOME/hadoop-client" -name "*.jar" -print0) while IFS= read -r -d '' file do export CLASSPATH=$CLASSPATH:$file -done < <(find "$HADOOP_PREFIX/hhadoop-hdfs-project" -name "*.jar" -print0) +done < <(find "$HADOOP_HOME/hhadoop-hdfs-project" -name "*.jar" -print0) export CLASSPATH=$HADOOP_CONF_DIR:$CLASSPATH export PATH=$FUSEDFS_PATH:$PATH diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/distribute-exclude.sh b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/distribute-exclude.sh index cfd44e390a..97f04f708a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/distribute-exclude.sh +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/distribute-exclude.sh @@ -52,8 +52,8 @@ if [ ! -f "$excludeFilenameLocal" ] ; then exit 1 fi -namenodes=$("$HADOOP_PREFIX/bin/hdfs" getconf -namenodes) -excludeFilenameRemote=$("$HADOOP_PREFIX/bin/hdfs" getconf -excludeFile) +namenodes=$("$HADOOP_HOME/bin/hdfs" getconf -namenodes) +excludeFilenameRemote=$("$HADOOP_HOME/bin/hdfs" getconf -excludeFile) if [ "$excludeFilenameRemote" = '' ] ; then echo \ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs index bcd04d1608..c365250f59 100755 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs @@ -60,8 +60,8 @@ function hadoop_usage } # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs-config.sh b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs-config.sh index 244e5a9da5..d440210992 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs-config.sh +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs-config.sh @@ -49,7 +49,7 @@ function hadoop_subproject_init hadoop_deprecate_envvar HADOOP_HDFS_IDENT_STRING HADOOP_IDENT_STRING - HADOOP_HDFS_HOME="${HADOOP_HDFS_HOME:-$HADOOP_PREFIX}" + HADOOP_HDFS_HOME="${HADOOP_HDFS_HOME:-$HADOOP_HOME}" # turn on the defaults export HDFS_AUDIT_LOGGER=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} @@ -71,8 +71,8 @@ if [[ -n "${HADOOP_COMMON_HOME}" ]] && . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" -elif [ -e "${HADOOP_PREFIX}/libexec/hadoop-config.sh" ]; then - . "${HADOOP_PREFIX}/libexec/hadoop-config.sh" +elif [ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]; then + . "${HADOOP_HOME}/libexec/hadoop-config.sh" else echo "ERROR: Hadoop common not found." 2>&1 exit 1 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/refresh-namenodes.sh b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/refresh-namenodes.sh index 318a2826a0..f51dd0fc57 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/refresh-namenodes.sh +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/refresh-namenodes.sh @@ -21,8 +21,8 @@ # for dfsadmin to support multiple namenodes. # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh index 32ca2b23d4..df044fe48b 100755 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh @@ -30,8 +30,8 @@ function hadoop_usage bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-dfs.sh b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-dfs.sh index 9c5a172c87..1e35e7d8a6 100755 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-dfs.sh +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-dfs.sh @@ -29,8 +29,8 @@ this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-secure-dns.sh b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-secure-dns.sh index f904640d54..3fce34572d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-secure-dns.sh +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-secure-dns.sh @@ -26,8 +26,8 @@ this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh index bb51a8a5a1..ec94080ece 100755 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh @@ -28,8 +28,8 @@ function hadoop_usage bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-dfs.sh b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-dfs.sh index cc0d11d652..e693374211 100755 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-dfs.sh +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-dfs.sh @@ -28,8 +28,8 @@ this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-secure-dns.sh b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-secure-dns.sh index 816a3e31c9..2a973b1e83 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-secure-dns.sh +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-secure-dns.sh @@ -26,8 +26,8 @@ this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/Federation.md b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/Federation.md index 38c10703a8..99a41a2901 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/Federation.md +++ b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/Federation.md @@ -150,13 +150,13 @@ Here is an example configuration with two Namenodes: **Step 1**: Format a Namenode using the following command: - [hdfs]$ $HADOOP_PREFIX/bin/hdfs namenode -format [-clusterId ] + [hdfs]$ $HADOOP_HOME/bin/hdfs namenode -format [-clusterId ] Choose a unique cluster\_id which will not conflict other clusters in your environment. If a cluster\_id is not provided, then a unique one is auto generated. **Step 2**: Format additional Namenodes using the following command: - [hdfs]$ $HADOOP_PREFIX/bin/hdfs namenode -format -clusterId + [hdfs]$ $HADOOP_HOME/bin/hdfs namenode -format -clusterId Note that the cluster\_id in step 2 must be same as that of the cluster\_id in step 1. If they are different, the additional Namenodes will not be part of the federated cluster. @@ -164,7 +164,7 @@ Note that the cluster\_id in step 2 must be same as that of the cluster\_id in s Older releases only support a single Namenode. Upgrade the cluster to newer release in order to enable federation During upgrade you can provide a ClusterID as follows: - [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start namenode -upgrade -clusterId + [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start namenode -upgrade -clusterId If cluster\_id is not provided, it is auto generated. @@ -187,7 +187,7 @@ Perform the following steps: * Refresh the Datanodes to pickup the newly added Namenode by running the following command against all the Datanodes in the cluster: - [hdfs]$ $HADOOP_PREFIX/bin/hdfs dfsadmin -refreshNamenodes : + [hdfs]$ $HADOOP_HOME/bin/hdfs dfsadmin -refreshNamenodes : Managing the cluster -------------------- @@ -196,11 +196,11 @@ Managing the cluster To start the cluster run the following command: - [hdfs]$ $HADOOP_PREFIX/sbin/start-dfs.sh + [hdfs]$ $HADOOP_HOME/sbin/start-dfs.sh To stop the cluster run the following command: - [hdfs]$ $HADOOP_PREFIX/sbin/stop-dfs.sh + [hdfs]$ $HADOOP_HOME/sbin/stop-dfs.sh These commands can be run from any node where the HDFS configuration is available. The command uses the configuration to determine the Namenodes in the cluster and then starts the Namenode process on those nodes. The Datanodes are started on the nodes specified in the `slaves` file. The script can be used as a reference for building your own scripts to start and stop the cluster. @@ -208,7 +208,7 @@ These commands can be run from any node where the HDFS configuration is availabl The Balancer has been changed to work with multiple Namenodes. The Balancer can be run using the command: - [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start balancer [-policy ] + [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start balancer [-policy ] The policy parameter can be any of the following: @@ -227,11 +227,11 @@ Decommissioning is similar to prior releases. The nodes that need to be decomiss **Step 1**: To distribute an exclude file to all the Namenodes, use the following command: - [hdfs]$ $HADOOP_PREFIX/sbin/distribute-exclude.sh + [hdfs]$ $HADOOP_HOME/sbin/distribute-exclude.sh **Step 2**: Refresh all the Namenodes to pick up the new exclude file: - [hdfs]$ $HADOOP_PREFIX/sbin/refresh-namenodes.sh + [hdfs]$ $HADOOP_HOME/sbin/refresh-namenodes.sh The above command uses HDFS configuration to determine the configured Namenodes in the cluster and refreshes them to pick up the new exclude file. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithNFS.md b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithNFS.md index 51a88c9fba..f888966ed0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithNFS.md +++ b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithNFS.md @@ -475,7 +475,7 @@ There are also several other configuration parameters which may be set to contro After the configuration keys have been added, the next step is to initialize required state in ZooKeeper. You can do so by running the following command from one of the NameNode hosts. - [hdfs]$ $HADOOP_PREFIX/bin/zkfc -formatZK + [hdfs]$ $HADOOP_HOME/bin/zkfc -formatZK This will create a znode in ZooKeeper inside of which the automatic failover system stores its data. @@ -487,7 +487,7 @@ Since automatic failover has been enabled in the configuration, the `start-dfs.s If you manually manage the services on your cluster, you will need to manually start the `zkfc` daemon on each of the machines that runs a NameNode. You can start the daemon by running: - [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start zkfc + [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start zkfc ### Securing access to ZooKeeper diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithQJM.md b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithQJM.md index 8b42386046..9a97add2b2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithQJM.md +++ b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HDFSHighAvailabilityWithQJM.md @@ -523,7 +523,7 @@ There are also several other configuration parameters which may be set to contro After the configuration keys have been added, the next step is to initialize required state in ZooKeeper. You can do so by running the following command from one of the NameNode hosts. - [hdfs]$ $HADOOP_PREFIX/bin/hdfs zkfc -formatZK + [hdfs]$ $HADOOP_HOME/bin/hdfs zkfc -formatZK This will create a znode in ZooKeeper inside of which the automatic failover system stores its data. @@ -535,7 +535,7 @@ Since automatic failover has been enabled in the configuration, the `start-dfs.s If you manually manage the services on your cluster, you will need to manually start the `zkfc` daemon on each of the machines that runs a NameNode. You can start the daemon by running: - [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start zkfc + [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start zkfc ### Securing access to ZooKeeper diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HdfsNfsGateway.md b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HdfsNfsGateway.md index 7dc2fe46fe..6731189158 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HdfsNfsGateway.md +++ b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/HdfsNfsGateway.md @@ -215,7 +215,7 @@ Three daemons are required to provide NFS service: rpcbind (or portmap), mountd 2. Start Hadoop's portmap (needs root privileges): - [root]> $HADOOP_PREFIX/bin/hdfs --daemon start portmap + [root]> $HADOOP_HOME/bin/hdfs --daemon start portmap 3. Start mountd and nfsd. @@ -224,12 +224,12 @@ Three daemons are required to provide NFS service: rpcbind (or portmap), mountd While in secure mode, any user can start NFS gateway as long as the user has read access to the Kerberos keytab defined in "nfs.keytab.file". - [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon start nfs3 + [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon start nfs3 4. Stop NFS gateway services. - [hdfs]$ $HADOOP_PREFIX/bin/hdfs --daemon stop nfs3 - [root]> $HADOOP_PREFIX/bin/hdfs --daemon stop portmap + [hdfs]$ $HADOOP_HOME/bin/hdfs --daemon stop nfs3 + [root]> $HADOOP_HOME/bin/hdfs --daemon stop portmap Optionally, you can forgo running the Hadoop-provided portmap daemon and instead use the system portmap daemon on all operating systems if you start the NFS Gateway as root. This will allow the HDFS NFS Gateway to work around the aforementioned bug and still register using the system portmap daemon. To do so, just start the NFS gateway daemon as you normally would, but make sure to do so as the "root" user, and also set the "HADOOP\_PRIVILEGED\_NFS\_USER" environment variable to an unprivileged user. In this mode the NFS Gateway will start as root to perform its initial registration with the system portmap, and then will drop privileges back to the user specified by the HADOOP\_PRIVILEGED\_NFS\_USER afterward and for the rest of the duration of the lifetime of the NFS Gateway process. Note that if you choose this route, you should skip steps 1 and 2 above. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTraceAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTraceAdmin.java index 198dafb341..71c9c56712 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTraceAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTraceAdmin.java @@ -62,7 +62,7 @@ private String getHostPortForNN(MiniDFSCluster cluster) { public void testCreateAndDestroySpanReceiver() throws Exception { Configuration conf = new Configuration(); conf = new Configuration(); - conf.set(TraceUtils.DEFAULT_HADOOP_PREFIX + + conf.set(TraceUtils.DEFAULT_HADOOP_TRACE_PREFIX + Tracer.SPAN_RECEIVER_CLASSES_KEY, ""); MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java index 37c09d1890..b3cf402110 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java @@ -65,10 +65,10 @@ public static void shutdown() throws IOException { public void testShortCircuitTraceHooks() throws IOException { assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS); conf = new Configuration(); - conf.set(TraceUtils.DEFAULT_HADOOP_PREFIX + + conf.set(TraceUtils.DEFAULT_HADOOP_TRACE_PREFIX + Tracer.SPAN_RECEIVER_CLASSES_KEY, SetSpanReceiver.class.getName()); - conf.set(TraceUtils.DEFAULT_HADOOP_PREFIX + + conf.set(TraceUtils.DEFAULT_HADOOP_TRACE_PREFIX + Tracer.SAMPLER_CLASSES_KEY, "AlwaysSampler"); conf.setLong("dfs.blocksize", 100 * 1024); diff --git a/hadoop-mapreduce-project/bin/mapred b/hadoop-mapreduce-project/bin/mapred index fab5b87f2e..f280f311f2 100755 --- a/hadoop-mapreduce-project/bin/mapred +++ b/hadoop-mapreduce-project/bin/mapred @@ -37,8 +37,8 @@ function hadoop_usage bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi diff --git a/hadoop-mapreduce-project/bin/mapred-config.sh b/hadoop-mapreduce-project/bin/mapred-config.sh index fe6e9e6cfe..a9897929c5 100644 --- a/hadoop-mapreduce-project/bin/mapred-config.sh +++ b/hadoop-mapreduce-project/bin/mapred-config.sh @@ -47,7 +47,7 @@ function hadoop_subproject_init hadoop_deprecate_envvar HADOOP_MAPRED_ROOT_LOGGER HADOOP_ROOT_LOGGER - HADOOP_MAPRED_HOME="${HADOOP_MAPRED_HOME:-$HADOOP_PREFIX}" + HADOOP_MAPRED_HOME="${HADOOP_MAPRED_HOME:-$HADOOP_HOME}" hadoop_deprecate_envvar HADOOP_MAPRED_IDENT_STRING HADOOP_IDENT_STRING } @@ -62,8 +62,8 @@ if [[ -n "${HADOOP_COMMON_HOME}" ]] && . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" -elif [ -e "${HADOOP_PREFIX}/libexec/hadoop-config.sh" ]; then - . "${HADOOP_PREFIX}/libexec/hadoop-config.sh" +elif [ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]; then + . "${HADOOP_HOME}/libexec/hadoop-config.sh" else echo "ERROR: Hadoop common not found." 2>&1 exit 1 diff --git a/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh b/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh index 57b1ebd01d..998ca90dca 100644 --- a/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh +++ b/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh @@ -21,8 +21,8 @@ function hadoop_usage } # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java index 4f5b6a1c9c..ae457829ad 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/pipes/Submitter.java @@ -315,7 +315,7 @@ private static void setupPipesJob(JobConf conf) throws IOException { // # if (exec.contains("#")) { // set default gdb commands for map and reduce task - String defScript = "$HADOOP_PREFIX/src/c++/pipes/debug/pipes-default-script"; + String defScript = "$HADOOP_HOME/src/c++/pipes/debug/pipes-default-script"; setIfUnset(conf, MRJobConfig.MAP_DEBUG_SCRIPT,defScript); setIfUnset(conf, MRJobConfig.REDUCE_DEBUG_SCRIPT,defScript); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java index 12bec0869f..b01954e1c8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DFSCIOTest.java @@ -450,7 +450,7 @@ public static void main(String[] args) { } //Copy the executables over to the remote filesystem - String hadoopHome = System.getenv("HADOOP_PREFIX"); + String hadoopHome = System.getenv("HADOOP_HOME"); fs.copyFromLocalFile(new Path(hadoopHome + "/libhdfs/libhdfs.so." + HDFS_LIB_VERSION), HDFS_SHLIB); fs.copyFromLocalFile(new Path(hadoopHome + "/libhdfs/hdfs_read"), HDFS_READ); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java index e6e12ebbe4..ecac83af5b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java @@ -179,7 +179,7 @@ private void runSortValidatorTest(final JobClient jc, private String normalizeCommandPath(String command) { final String hadoopHome; - if ((hadoopHome = System.getenv("HADOOP_PREFIX")) != null) { + if ((hadoopHome = System.getenv("HADOOP_HOME")) != null) { command = hadoopHome + "/" + command; } return command; diff --git a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java index 6b8af97e9c..c502ffd173 100644 --- a/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java +++ b/hadoop-tools/hadoop-archive-logs/src/main/java/org/apache/hadoop/tools/HadoopArchiveLogs.java @@ -450,7 +450,7 @@ public int compare(AppInfo o1, AppInfo o2) { fi export HADOOP_CLIENT_OPTS="-Xmx1024m" export HADOOP_CLASSPATH=/dist/share/hadoop/tools/lib/hadoop-archive-logs-2.8.0-SNAPSHOT.jar:/dist/share/hadoop/tools/lib/hadoop-archives-2.8.0-SNAPSHOT.jar - "$HADOOP_PREFIX"/bin/hadoop org.apache.hadoop.tools.HadoopArchiveLogsRunner -appId "$appId" -user "$user" -workingDir /tmp/logs/archive-logs-work -remoteRootLogDir /tmp/logs -suffix logs + "$HADOOP_HOME"/bin/hadoop org.apache.hadoop.tools.HadoopArchiveLogsRunner -appId "$appId" -user "$user" -workingDir /tmp/logs/archive-logs-work -remoteRootLogDir /tmp/logs -suffix logs */ @VisibleForTesting void generateScript(File localScript, Path workingDir, @@ -484,7 +484,7 @@ void generateScript(File localScript, Path workingDir, fw.write("m\"\n"); fw.write("export HADOOP_CLASSPATH="); fw.write(classpath); - fw.write("\n\"$HADOOP_PREFIX\"/bin/hadoop "); + fw.write("\n\"$HADOOP_HOME\"/bin/hadoop "); fw.write(HadoopArchiveLogsRunner.class.getName()); fw.write(" -appId \"$appId\" -user \"$user\" -workingDir "); fw.write(workingDir.toString()); diff --git a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java index 7fcb0bfad8..d2d7801caf 100644 --- a/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java +++ b/hadoop-tools/hadoop-archive-logs/src/test/java/org/apache/hadoop/tools/TestHadoopArchiveLogs.java @@ -294,14 +294,14 @@ private void _testGenerateScript(boolean proxy) throws Exception { Assert.assertTrue(lines[14].startsWith("export HADOOP_CLASSPATH=")); if (proxy) { Assert.assertEquals( - "\"$HADOOP_PREFIX\"/bin/hadoop org.apache.hadoop.tools." + + "\"$HADOOP_HOME\"/bin/hadoop org.apache.hadoop.tools." + "HadoopArchiveLogsRunner -appId \"$appId\" -user \"$user\" " + "-workingDir " + workingDir.toString() + " -remoteRootLogDir " + remoteRootLogDir.toString() + " -suffix " + suffix, lines[15]); } else { Assert.assertEquals( - "\"$HADOOP_PREFIX\"/bin/hadoop org.apache.hadoop.tools." + + "\"$HADOOP_HOME\"/bin/hadoop org.apache.hadoop.tools." + "HadoopArchiveLogsRunner -appId \"$appId\" -user \"$user\" " + "-workingDir " + workingDir.toString() + " -remoteRootLogDir " + remoteRootLogDir.toString() + " -suffix " + suffix + " -noProxy", diff --git a/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/README.txt b/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/README.txt index 73fd6ef15a..47ef31c467 100644 --- a/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/README.txt +++ b/hadoop-tools/hadoop-datajoin/src/test/java/org/apache/hadoop/contrib/utils/join/README.txt @@ -20,7 +20,7 @@ B.a31 B.a32 ***************************** *** Invoke SampleDataJoin *** ***************************** -[:~]$ $HADOOP_PREFIX/bin/hadoop jar hadoop-datajoin-examples.jar org.apache.hadoop.contrib.utils.join.DataJoinJob datajoin/input datajoin/output Text 1 org.apache.hadoop.contrib.utils.join.SampleDataJoinMapper org.apache.hadoop.contrib.utils.join.SampleDataJoinReducer org.apache.hadoop.contrib.utils.join.SampleTaggedMapOutput Text +[:~]$ $HADOOP_HOME/bin/hadoop jar hadoop-datajoin-examples.jar org.apache.hadoop.contrib.utils.join.DataJoinJob datajoin/input datajoin/output Text 1 org.apache.hadoop.contrib.utils.join.SampleDataJoinMapper org.apache.hadoop.contrib.utils.join.SampleDataJoinReducer org.apache.hadoop.contrib.utils.join.SampleTaggedMapOutput Text Using TextInputFormat: Text Using TextOutputFormat: Text 07/06/01 19:58:23 INFO mapred.FileInputFormat: Total input paths to process : 2 diff --git a/hadoop-tools/hadoop-pipes/src/main/native/pipes/debug/pipes-default-script b/hadoop-tools/hadoop-pipes/src/main/native/pipes/debug/pipes-default-script index 6bacc437e4..7b74fb67a1 100644 --- a/hadoop-tools/hadoop-pipes/src/main/native/pipes/debug/pipes-default-script +++ b/hadoop-tools/hadoop-pipes/src/main/native/pipes/debug/pipes-default-script @@ -1,3 +1,4 @@ +#!/usr/bin/env bash # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at @@ -9,6 +10,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -core=`find . -name 'core*'` +core=$(find . -name 'core*') #Only pipes programs have 5th argument as program name. -gdb -quiet $5 -c $core -x $HADOOP_PREFIX/src/c++/pipes/debug/pipes-default-gdb-commands.txt +gdb -quiet "${5}" -c "${core}" -x "${HADOOP_HOME}/src/c++/pipes/debug/pipes-default-gdb-commands.txt" diff --git a/hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh b/hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh index f9bfaef2db..0bd291bb8f 100644 --- a/hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh +++ b/hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh @@ -77,8 +77,8 @@ function run_sls_generator() } # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) diff --git a/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh b/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh index 30fd60a444..403c4bb05f 100644 --- a/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh +++ b/hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh @@ -71,8 +71,8 @@ function parse_args() function calculate_classpath { hadoop_add_to_classpath_tools hadoop-sls - hadoop_debug "Injecting ${HADOOP_PREFIX}/share/hadoop/tools/sls/html into CLASSPATH" - hadoop_add_classpath "${HADOOP_PREFIX}/share/hadoop/tools/sls/html" + hadoop_debug "Injecting ${HADOOP_TOOLS_DIR}/sls/html into CLASSPATH" + hadoop_add_classpath "${HADOOP_TOOLS_DIR}/sls/html" } function run_simulation() { @@ -105,8 +105,8 @@ function run_simulation() { } # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/DumpTypedBytes.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/DumpTypedBytes.java index 42007a0756..5a07cc325c 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/DumpTypedBytes.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/DumpTypedBytes.java @@ -91,7 +91,7 @@ public int run(String[] args) throws Exception { } private void printUsage() { - System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar" + System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar" + " dumptb "); System.out.println(" Dumps all files that match the given pattern to " + "standard output as typed bytes."); diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/HadoopStreaming.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/HadoopStreaming.java index 5d0112458f..eabf46c83c 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/HadoopStreaming.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/HadoopStreaming.java @@ -56,7 +56,7 @@ public static void main(String[] args) throws Exception { } private static void printUsage() { - System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar" + System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar" + " [options]"); System.out.println("Options:"); System.out.println(" dumptb Dumps all files that match the" diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/LoadTypedBytes.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/LoadTypedBytes.java index 6470393cba..a7a001cff6 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/LoadTypedBytes.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/LoadTypedBytes.java @@ -89,7 +89,7 @@ public int run(String[] args) throws Exception { } private void printUsage() { - System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar" + System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar" + " loadtb "); System.out.println(" Reads typed bytes from standard input" + " and stores them in a sequence file in"); diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java index 118e0fb899..9f5b293b36 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java @@ -502,7 +502,7 @@ public void exitUsage(boolean detailed) { } private void printUsage(boolean detailed) { - System.out.println("Usage: $HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar" + System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar" + " [options]"); System.out.println("Options:"); System.out.println(" -input DFS input file(s) for the Map" @@ -551,7 +551,7 @@ private void printUsage(boolean detailed) { System.out.println(); System.out.println("For more details about these options:"); System.out.println("Use " + - "$HADOOP_PREFIX/bin/hadoop jar hadoop-streaming.jar -info"); + "$HADOOP_HOME/bin/hadoop jar hadoop-streaming.jar -info"); return; } System.out.println(); @@ -611,7 +611,7 @@ private void printUsage(boolean detailed) { System.out.println(" -D stream.non.zero.exit.is.failure=false"); System.out.println("Use a custom hadoop streaming build along with standard" + " hadoop install:"); - System.out.println(" $HADOOP_PREFIX/bin/hadoop jar " + + System.out.println(" $HADOOP_HOME/bin/hadoop jar " + "/path/my-hadoop-streaming.jar [...]\\"); System.out.println(" [...] -D stream.shipped.hadoopstreaming=" + "/path/my-hadoop-streaming.jar"); @@ -625,7 +625,7 @@ private void printUsage(boolean detailed) { System.out.println(" -cmdenv EXAMPLE_DIR=/home/example/dictionaries/"); System.out.println(); System.out.println("Shortcut:"); - System.out.println(" setenv HSTREAMING \"$HADOOP_PREFIX/bin/hadoop jar " + + System.out.println(" setenv HSTREAMING \"$HADOOP_HOME/bin/hadoop jar " + "hadoop-streaming.jar\""); System.out.println(); System.out.println("Example: $HSTREAMING -mapper " + @@ -648,9 +648,9 @@ public void fail(String message) { // -------------------------------------------- protected String getHadoopClientHome() { - String h = env_.getProperty("HADOOP_PREFIX"); // standard Hadoop + String h = env_.getProperty("HADOOP_HOME"); // standard Hadoop if (h == null) { - //fail("Missing required environment variable: HADOOP_PREFIX"); + //fail("Missing required environment variable: HADOOP_HOME"); h = "UNDEF"; } return h; @@ -674,8 +674,8 @@ protected String packageJobJar() throws IOException { // usually found in: build/contrib or build/hadoop--dev-streaming.jar // First try an explicit spec: it's too hard to find our own location in this case: - // $HADOOP_PREFIX/bin/hadoop jar /not/first/on/classpath/custom-hadoop-streaming.jar - // where findInClasspath() would find the version of hadoop-streaming.jar in $HADOOP_PREFIX + // $HADOOP_HOME/bin/hadoop jar /not/first/on/classpath/custom-hadoop-streaming.jar + // where findInClasspath() would find the version of hadoop-streaming.jar in $HADOOP_HOME String runtimeClasses = config_.get("stream.shipped.hadoopstreaming"); // jar or class dir if (runtimeClasses == null) { diff --git a/hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh b/hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh index f462fad61d..3b41299630 100755 --- a/hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh @@ -26,8 +26,8 @@ function hadoop_usage bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi diff --git a/hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh b/hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh index 33059894dc..358f0c9011 100755 --- a/hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh @@ -26,8 +26,8 @@ function hadoop_usage bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn b/hadoop-yarn-project/hadoop-yarn/bin/yarn index cb2364b395..cac3bb6c0c 100755 --- a/hadoop-yarn-project/hadoop-yarn/bin/yarn +++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn @@ -51,8 +51,8 @@ function hadoop_usage # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P) HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn-config.sh b/hadoop-yarn-project/hadoop-yarn/bin/yarn-config.sh index 7df9fa1019..d7fa4066f6 100644 --- a/hadoop-yarn-project/hadoop-yarn/bin/yarn-config.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn-config.sh @@ -53,7 +53,7 @@ function hadoop_subproject_init hadoop_deprecate_envvar YARN_SLAVES HADOOP_SLAVES - HADOOP_YARN_HOME="${HADOOP_YARN_HOME:-$HADOOP_PREFIX}" + HADOOP_YARN_HOME="${HADOOP_YARN_HOME:-$HADOOP_HOME}" # YARN-1429 added the completely superfluous YARN_USER_CLASSPATH # env var. We're going to override HADOOP_USER_CLASSPATH to keep @@ -74,8 +74,8 @@ if [[ -n "${HADOOP_COMMON_HOME}" ]] && . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" -elif [ -e "${HADOOP_PREFIX}/libexec/hadoop-config.sh" ]; then - . "${HADOOP_PREFIX}/libexec/hadoop-config.sh" +elif [ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]; then + . "${HADOOP_HOME}/libexec/hadoop-config.sh" else echo "ERROR: Hadoop common not found." 2>&1 exit 1 diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemon.sh b/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemon.sh index 2f886f2a47..a195c60d00 100644 --- a/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemon.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemon.sh @@ -21,8 +21,8 @@ function hadoop_usage } # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemons.sh b/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemons.sh index 4ef0864824..958c8bd975 100644 --- a/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemons.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemons.sh @@ -25,8 +25,8 @@ this="${BASH_SOURCE-$0}" bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P) # let's locate libexec... -if [[ -n "${HADOOP_PREFIX}" ]]; then - HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec" +if [[ -n "${HADOOP_HOME}" ]]; then + HADOOP_DEFAULT_LIBEXEC_DIR="${HADOOP_HOME}/libexec" else HADOOP_DEFAULT_LIBEXEC_DIR="${bin}/../libexec" fi diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutorWithMocks.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutorWithMocks.java index c4500c4f88..27ff4383e2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutorWithMocks.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutorWithMocks.java @@ -169,7 +169,7 @@ public void testContainerLaunchInvalidImage() throws IOException { String appSubmitter = "nobody"; String appId = "APP_ID"; String containerId = "CONTAINER_ID"; - String testImage = "testrepo.com/test-image rm -rf $HADOOP_PREFIX/*"; + String testImage = "testrepo.com/test-image rm -rf $HADOOP_HOME/*"; Container container = mock(Container.class, RETURNS_DEEP_STUBS); ContainerId cId = mock(ContainerId.class, RETURNS_DEEP_STUBS); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/DockerContainerExecutor.md.vm b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/DockerContainerExecutor.md.vm index fbfe04b8c2..6a7db436ec 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/DockerContainerExecutor.md.vm +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/DockerContainerExecutor.md.vm @@ -142,7 +142,7 @@ Step 2. Pick a custom Docker image if you want. In this example, we'll use seque Step 3. Run. ```bash -hadoop jar $HADOOP_PREFIX/share/hadoop/mapreduce/hadoop-mapreduce-examples-${project.version}.jar \ +hadoop jar $HADOOP_HOME/share/hadoop/mapreduce/hadoop-mapreduce-examples-${project.version}.jar \ teragen \ -Dmapreduce.map.env="yarn.nodemanager.docker-container-executor.image-name=sequenceiq/hadoop-docker:2.4.1" \ -Dyarn.app.mapreduce.am.env="yarn.nodemanager.docker-container-executor.image-name=sequenceiq/hadoop-docker:2.4.1" \