HDFS-2323. start-dfs.sh script fails for tarball install

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1170233 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Thomas White 2011-09-13 16:53:57 +00:00
parent 6c3b59505b
commit e4dc2e1f56
6 changed files with 15 additions and 10 deletions

View File

@ -1034,7 +1034,9 @@ Release 0.23.0 - Unreleased
(todd)
HDFS-2289. Ensure jsvc is bundled with the HDFS distribution artifact.
(Alejandro Abdelnur via acmurthy)
(Alejandro Abdelnur via acmurthy)
HDFS-2323. start-dfs.sh script fails for tarball install (tomwhite)
BREAKDOWN OF HDFS-1073 SUBTASKS

View File

@ -51,7 +51,7 @@ NAMENODES=$($HADOOP_PREFIX/bin/hdfs getconf -namenodes)
echo "Starting namenodes on [$NAMENODES]"
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
--config "$HADOOP_CONF_DIR" \
--hostnames "$NAMENODES" \
--script "$bin/hdfs" start namenode $nameStartOpt
@ -64,7 +64,7 @@ if [ -n "$HADOOP_SECURE_DN_USER" ]; then
"Attempting to start secure cluster, skipping datanodes. " \
"Run start-secure-dns.sh as root to complete startup."
else
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
--config "$HADOOP_CONF_DIR" \
--script "$bin/hdfs" start datanode $dataStartOpt
fi
@ -84,7 +84,7 @@ if [ "$SECONDARY_NAMENODES" = '0.0.0.0' ] ; then
else
echo "Starting secondary namenodes [$SECONDARY_NAMENODES]"
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
--config "$HADOOP_CONF_DIR" \
--hostnames "$SECONDARY_NAMENODES" \
--script "$bin/hdfs" start secondarynamenode

View File

@ -25,7 +25,7 @@ bin=`cd "$bin"; pwd`
. "$bin"/../libexec/hdfs-config.sh
if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
"$HADOOP_PREFIX"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start datanode $dataStartOpt
"$HADOOP_PREFIX"/sbin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start datanode $dataStartOpt
else
echo $usage
fi

View File

@ -27,7 +27,7 @@ NAMENODES=$($HADOOP_PREFIX/bin/hdfs getconf -namenodes)
echo "Stopping namenodes on [$NAMENODES]"
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
--config "$HADOOP_CONF_DIR" \
--hostnames "$NAMENODES" \
--script "$bin/hdfs" stop namenode
@ -40,7 +40,7 @@ if [ -n "$HADOOP_SECURE_DN_USER" ]; then
"Attempting to stop secure cluster, skipping datanodes. " \
"Run stop-secure-dns.sh as root to complete shutdown."
else
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
--config "$HADOOP_CONF_DIR" \
--script "$bin/hdfs" stop datanode
fi
@ -60,7 +60,7 @@ if [ "$SECONDARY_NAMENODES" = '0.0.0.0' ] ; then
else
echo "Stopping secondary namenodes [$SECONDARY_NAMENODES]"
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
--config "$HADOOP_CONF_DIR" \
--hostnames "$SECONDARY_NAMENODES" \
--script "$bin/hdfs" stop secondarynamenode

View File

@ -25,7 +25,7 @@ bin=`cd "$bin"; pwd`
. "$bin"/../libexec/hdfs-config.sh
if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
"$HADOOP_PREFIX"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs stop datanode
"$HADOOP_PREFIX"/sbin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs stop datanode
else
echo $usage
fi

View File

@ -227,7 +227,10 @@ void printOut(String message) {
void printList(List<InetSocketAddress> list) {
StringBuilder buffer = new StringBuilder();
for (InetSocketAddress address : list) {
buffer.append(address.getHostName()).append(" ");
if (buffer.length() > 0) {
buffer.append(" ");
}
buffer.append(address.getHostName());
}
printOut(buffer.toString());
}