HADOOP-11208. Replace "daemon" with better name in script subcommands (aw)
This commit is contained in:
parent
bc4ee5e06f
commit
72c141ba96
@ -141,6 +141,8 @@ Trunk (Unreleased)
|
|||||||
HADOOP-11150. hadoop command should show the reason on failure by
|
HADOOP-11150. hadoop command should show the reason on failure by
|
||||||
invalid COMMAND or CLASSNAME (Masatake Iwasaki via aw)
|
invalid COMMAND or CLASSNAME (Masatake Iwasaki via aw)
|
||||||
|
|
||||||
|
HADOOP-11208. Replace "daemon" with better name in script subcommands (aw)
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
||||||
HADOOP-9451. Fault single-layer config if node group topology is enabled.
|
HADOOP-9451. Fault single-layer config if node group topology is enabled.
|
||||||
|
@ -85,7 +85,7 @@ shift
|
|||||||
|
|
||||||
case ${COMMAND} in
|
case ${COMMAND} in
|
||||||
balancer)
|
balancer)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
|
CLASS=org.apache.hadoop.hdfs.server.balancer.Balancer
|
||||||
hadoop_debug "Appending HADOOP_BALANCER_OPTS onto HADOOP_OPTS"
|
hadoop_debug "Appending HADOOP_BALANCER_OPTS onto HADOOP_OPTS"
|
||||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_BALANCER_OPTS}"
|
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_BALANCER_OPTS}"
|
||||||
@ -102,7 +102,7 @@ case ${COMMAND} in
|
|||||||
CLASS=org.apache.hadoop.hdfs.tools.CryptoAdmin
|
CLASS=org.apache.hadoop.hdfs.tools.CryptoAdmin
|
||||||
;;
|
;;
|
||||||
datanode)
|
datanode)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
# Determine if we're starting a secure datanode, and
|
# Determine if we're starting a secure datanode, and
|
||||||
# if so, redefine appropriate variables
|
# if so, redefine appropriate variables
|
||||||
if [[ -n "${HADOOP_SECURE_DN_USER}" ]]; then
|
if [[ -n "${HADOOP_SECURE_DN_USER}" ]]; then
|
||||||
@ -158,7 +158,7 @@ case ${COMMAND} in
|
|||||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||||
;;
|
;;
|
||||||
journalnode)
|
journalnode)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
CLASS='org.apache.hadoop.hdfs.qjournal.server.JournalNode'
|
CLASS='org.apache.hadoop.hdfs.qjournal.server.JournalNode'
|
||||||
hadoop_debug "Appending HADOOP_JOURNALNODE_OPTS onto HADOOP_OPTS"
|
hadoop_debug "Appending HADOOP_JOURNALNODE_OPTS onto HADOOP_OPTS"
|
||||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_JOURNALNODE_OPTS}"
|
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_JOURNALNODE_OPTS}"
|
||||||
@ -170,19 +170,19 @@ case ${COMMAND} in
|
|||||||
CLASS=org.apache.hadoop.hdfs.tools.snapshot.LsSnapshottableDir
|
CLASS=org.apache.hadoop.hdfs.tools.snapshot.LsSnapshottableDir
|
||||||
;;
|
;;
|
||||||
mover)
|
mover)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
CLASS=org.apache.hadoop.hdfs.server.mover.Mover
|
CLASS=org.apache.hadoop.hdfs.server.mover.Mover
|
||||||
hadoop_debug "Appending HADOOP_MOVER_OPTS onto HADOOP_OPTS"
|
hadoop_debug "Appending HADOOP_MOVER_OPTS onto HADOOP_OPTS"
|
||||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_MOVER_OPTS}"
|
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_MOVER_OPTS}"
|
||||||
;;
|
;;
|
||||||
namenode)
|
namenode)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'
|
CLASS='org.apache.hadoop.hdfs.server.namenode.NameNode'
|
||||||
hadoop_debug "Appending HADOOP_NAMENODE_OPTS onto HADOOP_OPTS"
|
hadoop_debug "Appending HADOOP_NAMENODE_OPTS onto HADOOP_OPTS"
|
||||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_NAMENODE_OPTS}"
|
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_NAMENODE_OPTS}"
|
||||||
;;
|
;;
|
||||||
nfs3)
|
nfs3)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
if [[ -n "${HADOOP_PRIVILEGED_NFS_USER}" ]]; then
|
if [[ -n "${HADOOP_PRIVILEGED_NFS_USER}" ]]; then
|
||||||
secure_service="true"
|
secure_service="true"
|
||||||
secure_user="${HADOOP_PRIVILEGED_NFS_USER}"
|
secure_user="${HADOOP_PRIVILEGED_NFS_USER}"
|
||||||
@ -211,13 +211,13 @@ case ${COMMAND} in
|
|||||||
CLASS=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer
|
CLASS=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer
|
||||||
;;
|
;;
|
||||||
portmap)
|
portmap)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
CLASS=org.apache.hadoop.portmap.Portmap
|
CLASS=org.apache.hadoop.portmap.Portmap
|
||||||
hadoop_debug "Appending HADOOP_PORTMAP_OPTS onto HADOOP_OPTS"
|
hadoop_debug "Appending HADOOP_PORTMAP_OPTS onto HADOOP_OPTS"
|
||||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_PORTMAP_OPTS}"
|
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_PORTMAP_OPTS}"
|
||||||
;;
|
;;
|
||||||
secondarynamenode)
|
secondarynamenode)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
|
CLASS='org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode'
|
||||||
hadoop_debug "Appending HADOOP_SECONDARYNAMENODE_OPTS onto HADOOP_OPTS"
|
hadoop_debug "Appending HADOOP_SECONDARYNAMENODE_OPTS onto HADOOP_OPTS"
|
||||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_SECONDARYNAMENODE_OPTS}"
|
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_SECONDARYNAMENODE_OPTS}"
|
||||||
@ -232,7 +232,7 @@ case ${COMMAND} in
|
|||||||
CLASS=org.apache.hadoop.util.VersionInfo
|
CLASS=org.apache.hadoop.util.VersionInfo
|
||||||
;;
|
;;
|
||||||
zkfc)
|
zkfc)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
CLASS='org.apache.hadoop.hdfs.tools.DFSZKFailoverController'
|
CLASS='org.apache.hadoop.hdfs.tools.DFSZKFailoverController'
|
||||||
hadoop_debug "Appending HADOOP_ZKFC_OPTS onto HADOOP_OPTS"
|
hadoop_debug "Appending HADOOP_ZKFC_OPTS onto HADOOP_OPTS"
|
||||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_ZKFC_OPTS}"
|
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_ZKFC_OPTS}"
|
||||||
@ -274,7 +274,7 @@ fi
|
|||||||
hadoop_add_param HADOOP_OPTS Xmx "${JAVA_HEAP_MAX}"
|
hadoop_add_param HADOOP_OPTS Xmx "${JAVA_HEAP_MAX}"
|
||||||
hadoop_finalize
|
hadoop_finalize
|
||||||
|
|
||||||
if [[ -n "${daemon}" ]]; then
|
if [[ -n "${supportdaemonization}" ]]; then
|
||||||
if [[ -n "${secure_service}" ]]; then
|
if [[ -n "${secure_service}" ]]; then
|
||||||
hadoop_secure_daemon_handler \
|
hadoop_secure_daemon_handler \
|
||||||
"${HADOOP_DAEMON_MODE}" "${COMMAND}" "${CLASS}"\
|
"${HADOOP_DAEMON_MODE}" "${COMMAND}" "${CLASS}"\
|
||||||
|
@ -90,7 +90,7 @@ case ${COMMAND} in
|
|||||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||||
;;
|
;;
|
||||||
historyserver)
|
historyserver)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
CLASS=org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer
|
CLASS=org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer
|
||||||
hadoop_debug "Appending HADOOP_JOB_HISTORYSERVER_OPTS onto HADOOP_OPTS"
|
hadoop_debug "Appending HADOOP_JOB_HISTORYSERVER_OPTS onto HADOOP_OPTS"
|
||||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_JOB_HISTORYSERVER_OPTS}"
|
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_JOB_HISTORYSERVER_OPTS}"
|
||||||
@ -150,7 +150,7 @@ fi
|
|||||||
hadoop_add_param HADOOP_OPTS Xmx "${JAVA_HEAP_MAX}"
|
hadoop_add_param HADOOP_OPTS Xmx "${JAVA_HEAP_MAX}"
|
||||||
hadoop_finalize
|
hadoop_finalize
|
||||||
|
|
||||||
if [[ -n "${daemon}" ]]; then
|
if [[ -n "${supportdaemonization}" ]]; then
|
||||||
if [[ -n "${secure_service}" ]]; then
|
if [[ -n "${secure_service}" ]]; then
|
||||||
hadoop_secure_daemon_handler "${HADOOP_DAEMON_MODE}" "${COMMAND}"\
|
hadoop_secure_daemon_handler "${HADOOP_DAEMON_MODE}" "${COMMAND}"\
|
||||||
"${CLASS}" "${daemon_pidfile}" "${daemon_outfile}" \
|
"${CLASS}" "${daemon_pidfile}" "${daemon_outfile}" \
|
||||||
|
@ -95,7 +95,7 @@ case "${COMMAND}" in
|
|||||||
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
||||||
;;
|
;;
|
||||||
historyserver)
|
historyserver)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
echo "DEPRECATED: Use of this command to start the timeline server is deprecated." 1>&2
|
echo "DEPRECATED: Use of this command to start the timeline server is deprecated." 1>&2
|
||||||
echo "Instead use the timelineserver command for it." 1>&2
|
echo "Instead use the timelineserver command for it." 1>&2
|
||||||
echo "Starting the History Server anyway..." 1>&2
|
echo "Starting the History Server anyway..." 1>&2
|
||||||
@ -112,7 +112,7 @@ case "${COMMAND}" in
|
|||||||
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
||||||
;;
|
;;
|
||||||
nodemanager)
|
nodemanager)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
CLASS='org.apache.hadoop.yarn.server.nodemanager.NodeManager'
|
CLASS='org.apache.hadoop.yarn.server.nodemanager.NodeManager'
|
||||||
hadoop_debug "Append YARN_NODEMANAGER_OPTS onto YARN_OPTS"
|
hadoop_debug "Append YARN_NODEMANAGER_OPTS onto YARN_OPTS"
|
||||||
YARN_OPTS="${YARN_OPTS} ${YARN_NODEMANAGER_OPTS}"
|
YARN_OPTS="${YARN_OPTS} ${YARN_NODEMANAGER_OPTS}"
|
||||||
@ -121,7 +121,7 @@ case "${COMMAND}" in
|
|||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
proxyserver)
|
proxyserver)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
CLASS='org.apache.hadoop.yarn.server.webproxy.WebAppProxyServer'
|
CLASS='org.apache.hadoop.yarn.server.webproxy.WebAppProxyServer'
|
||||||
hadoop_debug "Append YARN_PROXYSERVER_OPTS onto YARN_OPTS"
|
hadoop_debug "Append YARN_PROXYSERVER_OPTS onto YARN_OPTS"
|
||||||
YARN_OPTS="${YARN_OPTS} ${YARN_PROXYSERVER_OPTS}"
|
YARN_OPTS="${YARN_OPTS} ${YARN_PROXYSERVER_OPTS}"
|
||||||
@ -135,7 +135,7 @@ case "${COMMAND}" in
|
|||||||
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
||||||
;;
|
;;
|
||||||
resourcemanager)
|
resourcemanager)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
CLASS='org.apache.hadoop.yarn.server.resourcemanager.ResourceManager'
|
CLASS='org.apache.hadoop.yarn.server.resourcemanager.ResourceManager'
|
||||||
YARN_OPTS="${YARN_OPTS} ${YARN_RESOURCEMANAGER_OPTS}"
|
YARN_OPTS="${YARN_OPTS} ${YARN_RESOURCEMANAGER_OPTS}"
|
||||||
hadoop_debug "Append YARN_RESOURCEMANAGER_OPTS onto YARN_OPTS"
|
hadoop_debug "Append YARN_RESOURCEMANAGER_OPTS onto YARN_OPTS"
|
||||||
@ -149,7 +149,7 @@ case "${COMMAND}" in
|
|||||||
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
|
||||||
;;
|
;;
|
||||||
timelineserver)
|
timelineserver)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
CLASS='org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer'
|
CLASS='org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer'
|
||||||
hadoop_debug "Append YARN_TIMELINESERVER_OPTS onto YARN_OPTS"
|
hadoop_debug "Append YARN_TIMELINESERVER_OPTS onto YARN_OPTS"
|
||||||
YARN_OPTS="${YARN_OPTS} ${YARN_TIMELINESERVER_OPTS}"
|
YARN_OPTS="${YARN_OPTS} ${YARN_TIMELINESERVER_OPTS}"
|
||||||
@ -158,7 +158,7 @@ case "${COMMAND}" in
|
|||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
sharedcachemanager)
|
sharedcachemanager)
|
||||||
daemon="true"
|
supportdaemonization="true"
|
||||||
CLASS='org.apache.hadoop.yarn.server.sharedcachemanager.SharedCacheManager'
|
CLASS='org.apache.hadoop.yarn.server.sharedcachemanager.SharedCacheManager'
|
||||||
YARN_OPTS="$YARN_OPTS $YARN_SHAREDCACHEMANAGER_OPTS"
|
YARN_OPTS="$YARN_OPTS $YARN_SHAREDCACHEMANAGER_OPTS"
|
||||||
;;
|
;;
|
||||||
@ -206,7 +206,7 @@ hadoop_add_param HADOOP_OPTS yarn.root.logger "-Dyarn.root.logger=${YARN_ROOT_LO
|
|||||||
|
|
||||||
hadoop_finalize
|
hadoop_finalize
|
||||||
|
|
||||||
if [[ -n "${daemon}" ]]; then
|
if [[ -n "${supportdaemonization}" ]]; then
|
||||||
if [[ -n "${secure_service}" ]]; then
|
if [[ -n "${secure_service}" ]]; then
|
||||||
hadoop_secure_daemon_handler "${HADOOP_DAEMON_MODE}" "${COMMAND}" \
|
hadoop_secure_daemon_handler "${HADOOP_DAEMON_MODE}" "${COMMAND}" \
|
||||||
"${CLASS}" "${daemon_pidfile}" "${daemon_outfile}" \
|
"${CLASS}" "${daemon_pidfile}" "${daemon_outfile}" \
|
||||||
|
Loading…
Reference in New Issue
Block a user