HADOOP-11013. CLASSPATH handling should be consolidated, debuggable (aw)
This commit is contained in:
parent
7119bd49c8
commit
d8774cc577
@ -125,6 +125,8 @@ Trunk (Unreleased)
|
||||
|
||||
HADOOP-10485. Remove dead classes in hadoop-streaming. (wheat9)
|
||||
|
||||
HADOOP-11013. CLASSPATH handling should be consolidated, debuggable (aw)
|
||||
|
||||
BUG FIXES
|
||||
|
||||
HADOOP-9451. Fault single-layer config if node group topology is enabled.
|
||||
|
@ -114,6 +114,7 @@ case ${COMMAND} in
|
||||
;;
|
||||
archive)
|
||||
CLASS=org.apache.hadoop.tools.HadoopArchives
|
||||
hadoop_debug "Injecting TOOL_PATH into CLASSPATH"
|
||||
hadoop_add_classpath "${TOOL_PATH}"
|
||||
;;
|
||||
checknative)
|
||||
@ -136,10 +137,12 @@ case ${COMMAND} in
|
||||
;;
|
||||
distch)
|
||||
CLASS=org.apache.hadoop.tools.DistCh
|
||||
hadoop_debug "Injecting TOOL_PATH into CLASSPATH"
|
||||
hadoop_add_classpath "${TOOL_PATH}"
|
||||
;;
|
||||
distcp)
|
||||
CLASS=org.apache.hadoop.tools.DistCp
|
||||
hadoop_debug "Injecting TOOL_PATH into CLASSPATH"
|
||||
hadoop_add_classpath "${TOOL_PATH}"
|
||||
;;
|
||||
fs)
|
||||
@ -168,11 +171,11 @@ case ${COMMAND} in
|
||||
esac
|
||||
|
||||
# Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS
|
||||
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||
|
||||
hadoop_add_param HADOOP_OPTS Xmx "${JAVA_HEAP_MAX}"
|
||||
|
||||
hadoop_finalize
|
||||
export CLASSPATH
|
||||
hadoop_java_exec "${COMMAND}" "${CLASS}" "$@"
|
||||
|
||||
|
@ -129,6 +129,11 @@ while [[ -z "${_hadoop_common_done}" ]]; do
|
||||
hadoop_exit_with_usage 1
|
||||
fi
|
||||
;;
|
||||
--debug)
|
||||
shift
|
||||
# shellcheck disable=SC2034
|
||||
HADOOP_SHELL_SCRIPT_DEBUG=true
|
||||
;;
|
||||
--help|-help|-h|help|--h|--\?|-\?|\?)
|
||||
hadoop_exit_with_usage 0
|
||||
;;
|
||||
|
@ -21,6 +21,13 @@ function hadoop_error
|
||||
echo "$*" 1>&2
|
||||
}
|
||||
|
||||
function hadoop_debug
|
||||
{
|
||||
if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
|
||||
echo "DEBUG: $*" 1>&2
|
||||
fi
|
||||
}
|
||||
|
||||
function hadoop_bootstrap_init
|
||||
{
|
||||
# NOTE: This function is not user replaceable.
|
||||
@ -62,6 +69,7 @@ function hadoop_bootstrap_init
|
||||
|
||||
# defaults
|
||||
export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
|
||||
hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
|
||||
}
|
||||
|
||||
function hadoop_find_confdir
|
||||
@ -80,6 +88,8 @@ function hadoop_find_confdir
|
||||
conf_dir="etc/hadoop"
|
||||
fi
|
||||
export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-${HADOOP_PREFIX}/${conf_dir}}"
|
||||
|
||||
hadoop_debug "HADOOP_CONF_DIR=${HADOOP_CONF_DIR}"
|
||||
}
|
||||
|
||||
function hadoop_exec_hadoopenv
|
||||
@ -105,6 +115,7 @@ function hadoop_basic_init
|
||||
|
||||
# CLASSPATH initially contains $HADOOP_CONF_DIR
|
||||
CLASSPATH="${HADOOP_CONF_DIR}"
|
||||
hadoop_debug "Initial CLASSPATH=${HADOOP_CONF_DIR}"
|
||||
|
||||
if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
|
||||
[[ -d "${HADOOP_PREFIX}/${HADOOP_COMMON_DIR}" ]]; then
|
||||
@ -116,19 +127,19 @@ function hadoop_basic_init
|
||||
|
||||
# define HADOOP_HDFS_HOME
|
||||
if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
|
||||
[[ -d "${HADOOP_PREFIX}/${HDFS_DIR}" ]]; then
|
||||
[[ -d "${HADOOP_PREFIX}/${HDFS_DIR}" ]]; then
|
||||
export HADOOP_HDFS_HOME="${HADOOP_PREFIX}"
|
||||
fi
|
||||
|
||||
# define HADOOP_YARN_HOME
|
||||
if [[ -z "${HADOOP_YARN_HOME}" ]] &&
|
||||
[[ -d "${HADOOP_PREFIX}/${YARN_DIR}" ]]; then
|
||||
[[ -d "${HADOOP_PREFIX}/${YARN_DIR}" ]]; then
|
||||
export HADOOP_YARN_HOME="${HADOOP_PREFIX}"
|
||||
fi
|
||||
|
||||
# define HADOOP_MAPRED_HOME
|
||||
if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
|
||||
[[ -d "${HADOOP_PREFIX}/${MAPRED_DIR}" ]]; then
|
||||
[[ -d "${HADOOP_PREFIX}/${MAPRED_DIR}" ]]; then
|
||||
export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}"
|
||||
fi
|
||||
|
||||
@ -274,6 +285,9 @@ function hadoop_add_param
|
||||
if [[ ! ${!1} =~ $2 ]] ; then
|
||||
# shellcheck disable=SC2086
|
||||
eval $1="'${!1} $3'"
|
||||
hadoop_debug "$1 accepted $3"
|
||||
else
|
||||
hadoop_debug "$1 declined $3"
|
||||
fi
|
||||
}
|
||||
|
||||
@ -283,8 +297,8 @@ function hadoop_add_classpath
|
||||
# $1 = directory, file, wildcard, whatever to add
|
||||
# $2 = before or after, which determines where in the
|
||||
# classpath this object should go. default is after
|
||||
# return 0 = success
|
||||
# return 1 = failure (duplicate, doesn't exist, whatever)
|
||||
# return 0 = success (added or duplicate)
|
||||
# return 1 = failure (doesn't exist, whatever)
|
||||
|
||||
# However, with classpath (& JLP), we can do dedupe
|
||||
# along with some sanity checking (e.g., missing directories)
|
||||
@ -295,23 +309,29 @@ function hadoop_add_classpath
|
||||
if [[ $1 =~ ^.*\*$ ]]; then
|
||||
local mp=$(dirname "$1")
|
||||
if [[ ! -d "${mp}" ]]; then
|
||||
hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# no wildcard in the middle, so check existence
|
||||
# (doesn't matter *what* it is)
|
||||
elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
|
||||
hadoop_debug "Rejected CLASSPATH: $1 (does not exist)"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ -z "${CLASSPATH}" ]]; then
|
||||
CLASSPATH=$1
|
||||
hadoop_debug "Initial CLASSPATH=$1"
|
||||
elif [[ ":${CLASSPATH}:" != *":$1:"* ]]; then
|
||||
if [[ "$2" = "before" ]]; then
|
||||
CLASSPATH="$1:${CLASSPATH}"
|
||||
hadoop_debug "Prepend CLASSPATH: $1"
|
||||
else
|
||||
CLASSPATH+=:$1
|
||||
hadoop_debug "Append CLASSPATH: $1"
|
||||
fi
|
||||
else
|
||||
hadoop_debug "Dupe CLASSPATH: $1"
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
@ -331,14 +351,20 @@ function hadoop_add_colonpath
|
||||
if [[ -z "${!1}" ]]; then
|
||||
# shellcheck disable=SC2086
|
||||
eval $1="'$2'"
|
||||
hadoop_debug "Initial colonpath($1): $2"
|
||||
elif [[ "$3" = "before" ]]; then
|
||||
# shellcheck disable=SC2086
|
||||
eval $1="'$2:${!1}'"
|
||||
hadoop_debug "Prepend colonpath($1): $2"
|
||||
else
|
||||
# shellcheck disable=SC2086
|
||||
eval $1+="'$2'"
|
||||
hadoop_debug "Append colonpath($1): $2"
|
||||
fi
|
||||
return 0
|
||||
fi
|
||||
hadoop_debug "Rejected colonpath($1): $2"
|
||||
return 1
|
||||
}
|
||||
|
||||
function hadoop_add_javalibpath
|
||||
@ -397,6 +423,7 @@ function hadoop_add_to_classpath_hdfs
|
||||
|
||||
function hadoop_add_to_classpath_yarn
|
||||
{
|
||||
local i
|
||||
#
|
||||
# get all of the yarn jars+config in the path
|
||||
#
|
||||
@ -459,7 +486,7 @@ function hadoop_add_to_classpath_userpath
|
||||
local i
|
||||
local j
|
||||
let c=0
|
||||
|
||||
|
||||
if [[ -n "${HADOOP_CLASSPATH}" ]]; then
|
||||
# I wonder if Java runs on VMS.
|
||||
for i in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
|
||||
@ -715,6 +742,11 @@ function hadoop_java_exec
|
||||
local command=$1
|
||||
local class=$2
|
||||
shift 2
|
||||
|
||||
hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
|
||||
hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
|
||||
|
||||
export CLASSPATH
|
||||
#shellcheck disable=SC2086
|
||||
exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
|
||||
}
|
||||
@ -727,6 +759,11 @@ function hadoop_start_daemon
|
||||
local command=$1
|
||||
local class=$2
|
||||
shift 2
|
||||
|
||||
hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
|
||||
hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
|
||||
|
||||
export CLASSPATH
|
||||
#shellcheck disable=SC2086
|
||||
exec "${JAVA}" "-Dproc_${command}" ${HADOOP_OPTS} "${class}" "$@"
|
||||
}
|
||||
@ -807,6 +844,9 @@ function hadoop_start_secure_daemon
|
||||
# note that shellcheck will throw a
|
||||
# bogus for-our-use-case 2086 here.
|
||||
# it doesn't properly support multi-line situations
|
||||
|
||||
hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
|
||||
hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
|
||||
|
||||
exec "${jsvc}" \
|
||||
"-Dproc_${daemonname}" \
|
||||
|
@ -23,6 +23,7 @@ this="$bin/$script"
|
||||
|
||||
DEFAULT_LIBEXEC_DIR="$bin"/../libexec
|
||||
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
|
||||
# shellcheck disable=SC2034
|
||||
HADOOP_NEW_CONFIG=true
|
||||
. "$HADOOP_LIBEXEC_DIR/hadoop-config.sh"
|
||||
|
||||
@ -33,10 +34,10 @@ fi
|
||||
CLASS='org.apache.hadoop.record.compiler.generated.Rcc'
|
||||
|
||||
# Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS
|
||||
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
|
||||
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||
|
||||
hadoop_add_param HADOOP_OPTS Xmx "$JAVA_HEAP_MAX"
|
||||
hadoop_add_param HADOOP_OPTS Xmx "${JAVA_HEAP_MAX}"
|
||||
|
||||
hadoop_finalize
|
||||
export CLASSPATH
|
||||
hadoop_java_exec rcc "${CLASS}" "$@"
|
||||
|
@ -80,6 +80,7 @@ shift
|
||||
case ${COMMAND} in
|
||||
balancer)
|
||||
CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
|
||||
hadoop_debug "Appending HADOOP_BALANCER_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_BALANCER_OPTS}"
|
||||
;;
|
||||
cacheadmin)
|
||||
@ -105,19 +106,24 @@ case ${COMMAND} in
|
||||
HADOOP_SECURE_PID_DIR="${HADOOP_SECURE_PID_DIR:-$HADOOP_SECURE_DN_PID_DIR}"
|
||||
HADOOP_SECURE_LOG_DIR="${HADOOP_SECURE_LOG_DIR:-$HADOOP_SECURE_DN_LOG_DIR}"
|
||||
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_DN_SECURE_EXTRA_OPTS} ${HADOOP_DATANODE_OPTS}"
|
||||
hadoop_debug "Appending HADOOP_DATANODE_OPTS onto HADOOP_OPTS"
|
||||
hadoop_debug "Appending HADOOP_DN_SECURE_EXTRA_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_DATANODE_OPTS} ${HADOOP_DN_SECURE_EXTRA_OPTS}"
|
||||
CLASS="org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter"
|
||||
else
|
||||
hadoop_debug "Appending HADOOP_DATANODE_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_DATANODE_OPTS}"
|
||||
CLASS='org.apache.hadoop.hdfs.server.datanode.DataNode'
|
||||
fi
|
||||
;;
|
||||
dfs)
|
||||
CLASS=org.apache.hadoop.fs.FsShell
|
||||
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||
;;
|
||||
dfsadmin)
|
||||
CLASS=org.apache.hadoop.hdfs.tools.DFSAdmin
|
||||
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||
;;
|
||||
fetchdt)
|
||||
@ -125,6 +131,7 @@ case ${COMMAND} in
|
||||
;;
|
||||
fsck)
|
||||
CLASS=org.apache.hadoop.hdfs.tools.DFSck
|
||||
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||
;;
|
||||
getconf)
|
||||
@ -135,12 +142,15 @@ case ${COMMAND} in
|
||||
;;
|
||||
haadmin)
|
||||
CLASS=org.apache.hadoop.hdfs.tools.DFSHAAdmin
|
||||
CLASSPATH="${CLASSPATH}:${TOOL_PATH}"
|
||||
hadoop_debug "Injecting TOOL_PATH into CLASSPATH"
|
||||
hadoop_add_classpath "${TOOL_PATH}"
|
||||
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||
;;
|
||||
journalnode)
|
||||
daemon="true"
|
||||
CLASS='org.apache.hadoop.hdfs.qjournal.server.JournalNode'
|
||||
hadoop_debug "Appending HADOOP_JOURNALNODE_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_JOURNALNODE_OPTS}"
|
||||
;;
|
||||
jmxget)
|
||||
@ -152,6 +162,7 @@ case ${COMMAND} in
|
||||
namenode)
|
||||
daemon="true"
|
||||
CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'
|
||||
hadoop_debug "Appending HADOOP_NAMENODE_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_NAMENODE_OPTS}"
|
||||
;;
|
||||
nfs3)
|
||||
@ -164,9 +175,12 @@ case ${COMMAND} in
|
||||
HADOOP_SECURE_PID_DIR="${HADOOP_SECURE_PID_DIR:-$HADOOP_SECURE_NFS3_PID_DIR}"
|
||||
HADOOP_SECURE_LOG_DIR="${HADOOP_SECURE_LOG_DIR:-$HADOOP_SECURE_NFS3_LOG_DIR}"
|
||||
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_NFS3_SECURE_EXTRA_OPTS} ${HADOOP_NFS3_OPTS}"
|
||||
hadoop_debug "Appending HADOOP_NFS3_OPTS onto HADOOP_OPTS"
|
||||
hadoop_debug "Appending HADOOP_NFS3_SECURE_EXTRA_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_NFS3_OPTS} ${HADOOP_NFS3_SECURE_EXTRA_OPTS}"
|
||||
CLASS=org.apache.hadoop.hdfs.nfs.nfs3.PrivilegedNfsGatewayStarter
|
||||
else
|
||||
hadoop_debug "Appending HADOOP_NFS3_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_NFS3_OPTS}"
|
||||
CLASS=org.apache.hadoop.hdfs.nfs.nfs3.Nfs3
|
||||
fi
|
||||
@ -183,11 +197,13 @@ case ${COMMAND} in
|
||||
portmap)
|
||||
daemon="true"
|
||||
CLASS=org.apache.hadoop.portmap.Portmap
|
||||
hadoop_debug "Appending HADOOP_PORTMAP_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_PORTMAP_OPTS}"
|
||||
;;
|
||||
secondarynamenode)
|
||||
daemon="true"
|
||||
CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
|
||||
hadoop_debug "Appending HADOOP_SECONDARYNAMENODE_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_SECONDARYNAMENODE_OPTS}"
|
||||
;;
|
||||
snapshotDiff)
|
||||
@ -196,6 +212,7 @@ case ${COMMAND} in
|
||||
zkfc)
|
||||
daemon="true"
|
||||
CLASS='org.apache.hadoop.hdfs.tools.DFSZKFailoverController'
|
||||
hadoop_debug "Appending HADOOP_ZKFC_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_ZKFC_OPTS}"
|
||||
;;
|
||||
-*)
|
||||
@ -236,8 +253,6 @@ fi
|
||||
hadoop_add_param HADOOP_OPTS Xmx "${JAVA_HEAP_MAX}"
|
||||
hadoop_finalize
|
||||
|
||||
export CLASSPATH
|
||||
|
||||
if [[ -n "${daemon}" ]]; then
|
||||
if [[ -n "${secure_service}" ]]; then
|
||||
hadoop_secure_daemon_handler \
|
||||
|
@ -64,13 +64,15 @@ shift
|
||||
|
||||
case ${COMMAND} in
|
||||
mradmin|jobtracker|tasktracker|groups)
|
||||
echo "Sorry, the ${COMMAND} command is no longer supported."
|
||||
echo "You may find similar functionality with the \"yarn\" shell command."
|
||||
hadoop_error "Sorry, the ${COMMAND} command is no longer supported."
|
||||
hadoop_error "You may find similar functionality with the \"yarn\" shell command."
|
||||
hadoop_exit_with_usage 1
|
||||
;;
|
||||
archive)
|
||||
CLASS=org.apache.hadoop.tools.HadoopArchives
|
||||
hadoop_debug "Injecting TOOL_PATH into CLASSPATH"
|
||||
hadoop_add_classpath "${TOOL_PATH}"
|
||||
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||
;;
|
||||
classpath)
|
||||
@ -80,12 +82,15 @@ case ${COMMAND} in
|
||||
;;
|
||||
distcp)
|
||||
CLASS=org.apache.hadoop.tools.DistCp
|
||||
hadoop_debug "Injecting TOOL_PATH into CLASSPATH"
|
||||
hadoop_add_classpath "${TOOL_PATH}"
|
||||
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||
;;
|
||||
historyserver)
|
||||
daemon="true"
|
||||
CLASS=org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer
|
||||
hadoop_debug "Appending HADOOP_JOB_HISTORYSERVER_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_JOB_HISTORYSERVER_OPTS}"
|
||||
if [ -n "${HADOOP_JOB_HISTORYSERVER_HEAPSIZE}" ]; then
|
||||
JAVA_HEAP_MAX="-Xmx${HADOOP_JOB_HISTORYSERVER_HEAPSIZE}m"
|
||||
@ -97,6 +102,7 @@ case ${COMMAND} in
|
||||
;;
|
||||
pipes)
|
||||
CLASS=org.apache.hadoop.mapred.pipes.Submitter
|
||||
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||
;;
|
||||
queue)
|
||||
@ -104,10 +110,12 @@ case ${COMMAND} in
|
||||
;;
|
||||
sampler)
|
||||
CLASS=org.apache.hadoop.mapred.lib.InputSampler
|
||||
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||
;;
|
||||
version)
|
||||
CLASS=org.apache.hadoop.util.VersionInfo
|
||||
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||
;;
|
||||
-*|*)
|
||||
@ -130,8 +138,6 @@ fi
|
||||
hadoop_add_param HADOOP_OPTS Xmx "${JAVA_HEAP_MAX}"
|
||||
hadoop_finalize
|
||||
|
||||
export CLASSPATH
|
||||
|
||||
if [[ -n "${daemon}" ]]; then
|
||||
if [[ -n "${secure_service}" ]]; then
|
||||
hadoop_secure_daemon_handler "${HADOOP_DAEMON_MODE}" "${COMMAND}"\
|
||||
|
@ -72,6 +72,7 @@ shift
|
||||
case "${COMMAND}" in
|
||||
application|applicationattempt|container)
|
||||
CLASS=org.apache.hadoop.yarn.client.cli.ApplicationCLI
|
||||
hadoop_debug "Append YARN_CLIENT_OPTS onto YARN_OPTS"
|
||||
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
||||
set -- "${COMMAND}" "$@"
|
||||
;;
|
||||
@ -82,10 +83,12 @@ case "${COMMAND}" in
|
||||
;;
|
||||
daemonlog)
|
||||
CLASS=org.apache.hadoop.log.LogLevel
|
||||
hadoop_debug "Append YARN_CLIENT_OPTS onto YARN_OPTS"
|
||||
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
||||
;;
|
||||
jar)
|
||||
CLASS=org.apache.hadoop.util.RunJar
|
||||
hadoop_debug "Append YARN_CLIENT_OPTS onto YARN_OPTS"
|
||||
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
||||
;;
|
||||
historyserver)
|
||||
@ -97,15 +100,18 @@ case "${COMMAND}" in
|
||||
;;
|
||||
logs)
|
||||
CLASS=org.apache.hadoop.yarn.logaggregation.LogDumper
|
||||
hadoop_debug "Append YARN_CLIENT_OPTS onto YARN_OPTS"
|
||||
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
||||
;;
|
||||
node)
|
||||
CLASS=org.apache.hadoop.yarn.client.cli.NodeCLI
|
||||
hadoop_debug "Append YARN_CLIENT_OPTS onto YARN_OPTS"
|
||||
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
||||
;;
|
||||
nodemanager)
|
||||
daemon="true"
|
||||
CLASS='org.apache.hadoop.yarn.server.nodemanager.NodeManager'
|
||||
hadoop_debug "Append YARN_NODEMANAGER_OPTS onto YARN_OPTS"
|
||||
YARN_OPTS="${YARN_OPTS} ${YARN_NODEMANAGER_OPTS}"
|
||||
if [[ -n "${YARN_NODEMANAGER_HEAPSIZE}" ]]; then
|
||||
JAVA_HEAP_MAX="-Xmx${YARN_NODEMANAGER_HEAPSIZE}m"
|
||||
@ -114,6 +120,7 @@ case "${COMMAND}" in
|
||||
proxyserver)
|
||||
daemon="true"
|
||||
CLASS='org.apache.hadoop.yarn.server.webproxy.WebAppProxyServer'
|
||||
hadoop_debug "Append YARN_PROXYSERVER_OPTS onto YARN_OPTS"
|
||||
YARN_OPTS="${YARN_OPTS} ${YARN_PROXYSERVER_OPTS}"
|
||||
if [[ -n "${YARN_PROXYSERVER_HEAPSIZE}" ]]; then
|
||||
JAVA_HEAP_MAX="-Xmx${YARN_PROXYSERVER_HEAPSIZE}m"
|
||||
@ -123,17 +130,20 @@ case "${COMMAND}" in
|
||||
daemon="true"
|
||||
CLASS='org.apache.hadoop.yarn.server.resourcemanager.ResourceManager'
|
||||
YARN_OPTS="${YARN_OPTS} ${YARN_RESOURCEMANAGER_OPTS}"
|
||||
hadoop_debug "Append YARN_RESOURCEMANAGER_OPTS onto YARN_OPTS"
|
||||
if [[ -n "${YARN_RESOURCEMANAGER_HEAPSIZE}" ]]; then
|
||||
JAVA_HEAP_MAX="-Xmx${YARN_RESOURCEMANAGER_HEAPSIZE}m"
|
||||
fi
|
||||
;;
|
||||
rmadmin)
|
||||
CLASS='org.apache.hadoop.yarn.client.cli.RMAdminCLI'
|
||||
hadoop_debug "Append YARN_CLIENT_OPTS onto YARN_OPTS"
|
||||
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
||||
;;
|
||||
timelineserver)
|
||||
daemon="true"
|
||||
CLASS='org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer'
|
||||
hadoop_debug "Append YARN_TIMELINESERVER_OPTS onto YARN_OPTS"
|
||||
YARN_OPTS="${YARN_OPTS} ${YARN_TIMELINESERVER_OPTS}"
|
||||
if [[ -n "${YARN_TIMELINESERVER_HEAPSIZE}" ]]; then
|
||||
JAVA_HEAP_MAX="-Xmx${YARN_TIMELINESERVER_HEAPSIZE}m"
|
||||
@ -141,6 +151,7 @@ case "${COMMAND}" in
|
||||
;;
|
||||
version)
|
||||
CLASS=org.apache.hadoop.util.VersionInfo
|
||||
hadoop_debug "Append YARN_CLIENT_OPTS onto YARN_OPTS"
|
||||
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
||||
;;
|
||||
-*)
|
||||
@ -153,6 +164,8 @@ esac
|
||||
|
||||
# set HADOOP_OPTS to YARN_OPTS so that we can use
|
||||
# finalize, etc, without doing anything funky
|
||||
hadoop_debug "Resetting HADOOP_OPTS=YARN_OPTS"
|
||||
# shellcheck disable=SC2034
|
||||
HADOOP_OPTS="${YARN_OPTS}"
|
||||
|
||||
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${COMMAND}-${HOSTNAME}.out"
|
||||
@ -180,8 +193,6 @@ hadoop_add_param HADOOP_OPTS yarn.root.logger "-Dyarn.root.logger=${YARN_ROOT_LO
|
||||
|
||||
hadoop_finalize
|
||||
|
||||
export CLASSPATH
|
||||
|
||||
if [[ -n "${daemon}" ]]; then
|
||||
if [[ -n "${secure_service}" ]]; then
|
||||
hadoop_secure_daemon_handler "${HADOOP_DAEMON_MODE}" "${COMMAND}" \
|
||||
|
Loading…
Reference in New Issue
Block a user