2009-05-19 04:30:38 +00:00
|
|
|
#!/usr/bin/env bash
|
|
|
|
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
|
|
|
|
# Runs a Hadoop command as a daemon.
|
|
|
|
#
|
|
|
|
# Environment Variables
|
|
|
|
#
|
2011-05-27 16:35:02 +00:00
|
|
|
# HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_PREFIX}/conf.
|
2009-05-19 04:30:38 +00:00
|
|
|
# HADOOP_LOG_DIR Where log files are stored. PWD by default.
|
|
|
|
# HADOOP_MASTER host:path where hadoop code should be rsync'd from
|
|
|
|
# HADOOP_PID_DIR The pid files are stored. /tmp by default.
|
|
|
|
# HADOOP_IDENT_STRING A string representing this instance of hadoop. $USER by default
|
|
|
|
# HADOOP_NICENESS The scheduling priority for daemons. Defaults to 0.
|
|
|
|
##
|
|
|
|
|
|
|
|
usage="Usage: hadoop-daemon.sh [--config <conf-dir>] [--hosts hostlistfile] [--script script] (start|stop) <hadoop-command> <args...>"
|
|
|
|
|
|
|
|
# if no args specified, show usage
|
|
|
|
if [ $# -le 1 ]; then
|
|
|
|
echo $usage
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
2010-04-29 23:23:53 +00:00
|
|
|
bin=`dirname "${BASH_SOURCE-$0}"`
|
2009-05-19 04:30:38 +00:00
|
|
|
bin=`cd "$bin"; pwd`
|
|
|
|
|
2011-11-18 00:48:54 +00:00
|
|
|
DEFAULT_LIBEXEC_DIR="$bin"/../libexec
|
|
|
|
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
|
|
|
|
. $HADOOP_LIBEXEC_DIR/hadoop-config.sh
|
2009-05-19 04:30:38 +00:00
|
|
|
|
|
|
|
# get arguments
|
|
|
|
|
|
|
|
#default value
|
2011-05-27 16:35:02 +00:00
|
|
|
hadoopScript="$HADOOP_PREFIX"/bin/hadoop
|
2009-05-19 04:30:38 +00:00
|
|
|
if [ "--script" = "$1" ]
|
|
|
|
then
|
|
|
|
shift
|
|
|
|
hadoopScript=$1
|
|
|
|
shift
|
|
|
|
fi
|
|
|
|
startStop=$1
|
|
|
|
shift
|
|
|
|
command=$1
|
|
|
|
shift
|
|
|
|
|
|
|
|
hadoop_rotate_log ()
|
|
|
|
{
|
|
|
|
log=$1;
|
|
|
|
num=5;
|
|
|
|
if [ -n "$2" ]; then
|
|
|
|
num=$2
|
|
|
|
fi
|
|
|
|
if [ -f "$log" ]; then # rotate logs
|
|
|
|
while [ $num -gt 1 ]; do
|
|
|
|
prev=`expr $num - 1`
|
|
|
|
[ -f "$log.$prev" ] && mv "$log.$prev" "$log.$num"
|
|
|
|
num=$prev
|
|
|
|
done
|
|
|
|
mv "$log" "$log.$num";
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
|
|
|
if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
|
|
|
|
. "${HADOOP_CONF_DIR}/hadoop-env.sh"
|
|
|
|
fi
|
|
|
|
|
2010-08-02 21:10:21 +00:00
|
|
|
# Determine if we're starting a secure datanode, and if so, redefine appropriate variables
|
|
|
|
if [ "$command" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
|
|
|
|
export HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR
|
|
|
|
export HADOOP_LOG_DIR=$HADOOP_SECURE_DN_LOG_DIR
|
|
|
|
export HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ "$HADOOP_IDENT_STRING" = "" ]; then
|
|
|
|
export HADOOP_IDENT_STRING="$USER"
|
|
|
|
fi
|
|
|
|
|
|
|
|
|
2009-05-19 04:30:38 +00:00
|
|
|
# get log directory
|
|
|
|
if [ "$HADOOP_LOG_DIR" = "" ]; then
|
2011-05-27 16:35:02 +00:00
|
|
|
export HADOOP_LOG_DIR="$HADOOP_PREFIX/logs"
|
2009-05-19 04:30:38 +00:00
|
|
|
fi
|
2012-02-02 18:45:07 +00:00
|
|
|
|
|
|
|
if [ ! -w "$HADOOP_LOG_DIR" ] ; then
|
|
|
|
mkdir -p "$HADOOP_LOG_DIR"
|
|
|
|
chown $HADOOP_IDENT_STRING $HADOOP_LOG_DIR
|
|
|
|
fi
|
2009-05-19 04:30:38 +00:00
|
|
|
|
|
|
|
if [ "$HADOOP_PID_DIR" = "" ]; then
|
|
|
|
HADOOP_PID_DIR=/tmp
|
|
|
|
fi
|
|
|
|
|
|
|
|
# some variables
|
|
|
|
export HADOOP_LOGFILE=hadoop-$HADOOP_IDENT_STRING-$command-$HOSTNAME.log
|
2012-03-29 21:00:35 +00:00
|
|
|
export HADOOP_ROOT_LOGGER=${HADOOP_ROOT_LOGGER:-"INFO,RFA"}
|
|
|
|
export HADOOP_SECURITY_LOGGER=${HADOOP_SECURITY_LOGGER:-"INFO,RFAS"}
|
2009-05-19 04:30:38 +00:00
|
|
|
log=$HADOOP_LOG_DIR/hadoop-$HADOOP_IDENT_STRING-$command-$HOSTNAME.out
|
|
|
|
pid=$HADOOP_PID_DIR/hadoop-$HADOOP_IDENT_STRING-$command.pid
|
2012-05-11 16:15:18 +00:00
|
|
|
HADOOP_STOP_TIMEOUT=${HADOOP_STOP_TIMEOUT:-5}
|
2009-05-19 04:30:38 +00:00
|
|
|
|
|
|
|
# Set default scheduling priority
|
|
|
|
if [ "$HADOOP_NICENESS" = "" ]; then
|
|
|
|
export HADOOP_NICENESS=0
|
|
|
|
fi
|
|
|
|
|
|
|
|
case $startStop in
|
|
|
|
|
|
|
|
(start)
|
|
|
|
|
2012-02-02 18:45:07 +00:00
|
|
|
[ -w "$HADOOP_PID_DIR" ] || mkdir -p "$HADOOP_PID_DIR"
|
2009-05-19 04:30:38 +00:00
|
|
|
|
|
|
|
if [ -f $pid ]; then
|
|
|
|
if kill -0 `cat $pid` > /dev/null 2>&1; then
|
|
|
|
echo $command running as process `cat $pid`. Stop it first.
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ "$HADOOP_MASTER" != "" ]; then
|
|
|
|
echo rsync from $HADOOP_MASTER
|
2011-05-27 16:35:02 +00:00
|
|
|
rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $HADOOP_MASTER/ "$HADOOP_PREFIX"
|
2009-05-19 04:30:38 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
hadoop_rotate_log $log
|
|
|
|
echo starting $command, logging to $log
|
2011-05-27 16:35:02 +00:00
|
|
|
cd "$HADOOP_PREFIX"
|
2011-07-25 21:12:18 +00:00
|
|
|
case $command in
|
|
|
|
namenode|secondarynamenode|datanode|dfs|dfsadmin|fsck|balancer)
|
|
|
|
if [ -z "$HADOOP_HDFS_HOME" ]; then
|
|
|
|
hdfsScript="$HADOOP_PREFIX"/bin/hdfs
|
|
|
|
else
|
|
|
|
hdfsScript="$HADOOP_HDFS_HOME"/bin/hdfs
|
|
|
|
fi
|
|
|
|
nohup nice -n $HADOOP_NICENESS $hdfsScript --config $HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
|
|
|
|
;;
|
|
|
|
(*)
|
|
|
|
nohup nice -n $HADOOP_NICENESS $hadoopScript --config $HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
|
|
|
|
;;
|
|
|
|
esac
|
2009-05-19 04:30:38 +00:00
|
|
|
echo $! > $pid
|
|
|
|
sleep 1; head "$log"
|
2011-06-02 21:42:56 +00:00
|
|
|
sleep 3;
|
|
|
|
if ! ps -p $! > /dev/null ; then
|
|
|
|
exit 1
|
|
|
|
fi
|
2009-05-19 04:30:38 +00:00
|
|
|
;;
|
|
|
|
|
|
|
|
(stop)
|
|
|
|
|
|
|
|
if [ -f $pid ]; then
|
2012-05-11 16:15:18 +00:00
|
|
|
TARGET_PID=`cat $pid`
|
|
|
|
if kill -0 $TARGET_PID > /dev/null 2>&1; then
|
2009-05-19 04:30:38 +00:00
|
|
|
echo stopping $command
|
2012-05-11 16:15:18 +00:00
|
|
|
kill $TARGET_PID
|
|
|
|
sleep $HADOOP_STOP_TIMEOUT
|
|
|
|
if kill -0 $TARGET_PID > /dev/null 2>&1; then
|
|
|
|
echo "$command did not stop gracefully after $HADOOP_STOP_TIMEOUT seconds: killing with kill -9"
|
|
|
|
kill -9 $TARGET_PID
|
|
|
|
fi
|
2009-05-19 04:30:38 +00:00
|
|
|
else
|
|
|
|
echo no $command to stop
|
|
|
|
fi
|
|
|
|
else
|
|
|
|
echo no $command to stop
|
|
|
|
fi
|
|
|
|
;;
|
|
|
|
|
|
|
|
(*)
|
|
|
|
echo $usage
|
|
|
|
exit 1
|
|
|
|
;;
|
|
|
|
|
|
|
|
esac
|
|
|
|
|
|
|
|
|