HADOOP-7563. Setup HADOOP_HDFS_HOME, HADOOP_MAPRED_HOME and classpath correction. Contributed by Eric Yang.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1161329 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
ebf61db566
commit
bb0005cfec
@ -113,7 +113,6 @@
|
|||||||
<useProjectArtifact>false</useProjectArtifact>
|
<useProjectArtifact>false</useProjectArtifact>
|
||||||
<excludes>
|
<excludes>
|
||||||
<exclude>org.apache.ant:*:jar</exclude>
|
<exclude>org.apache.ant:*:jar</exclude>
|
||||||
<exclude>org.apache.hadoop:hadoop-*:jar</exclude>
|
|
||||||
<exclude>jdiff:jdiff:jar</exclude>
|
<exclude>jdiff:jdiff:jar</exclude>
|
||||||
</excludes>
|
</excludes>
|
||||||
</dependencySet>
|
</dependencySet>
|
||||||
|
@ -522,6 +522,9 @@ Trunk (unreleased changes)
|
|||||||
HADOOP-7567. 'mvn eclipse:eclipse' fails for hadoop-alfredo (auth).
|
HADOOP-7567. 'mvn eclipse:eclipse' fails for hadoop-alfredo (auth).
|
||||||
(Alejandro Abdelnur via tomwhite)
|
(Alejandro Abdelnur via tomwhite)
|
||||||
|
|
||||||
|
HADOOP-7563. Setup HADOOP_HDFS_HOME, HADOOP_MAPRED_HOME and classpath
|
||||||
|
correction. (Eric Yang via acmurthy)
|
||||||
|
|
||||||
Release 0.22.0 - Unreleased
|
Release 0.22.0 - Unreleased
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -21,11 +21,7 @@ bin=`which $0`
|
|||||||
bin=`dirname ${bin}`
|
bin=`dirname ${bin}`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
|
. "$bin"/../libexec/hadoop-config.sh
|
||||||
. "$bin"/../libexec/hadoop-config.sh
|
|
||||||
else
|
|
||||||
. "$bin"/hadoop-config.sh
|
|
||||||
fi
|
|
||||||
|
|
||||||
function print_usage(){
|
function print_usage(){
|
||||||
echo "Usage: hadoop [--config confdir] COMMAND"
|
echo "Usage: hadoop [--config confdir] COMMAND"
|
||||||
|
@ -139,64 +139,19 @@ fi
|
|||||||
# CLASSPATH initially contains $HADOOP_CONF_DIR
|
# CLASSPATH initially contains $HADOOP_CONF_DIR
|
||||||
CLASSPATH="${HADOOP_CONF_DIR}"
|
CLASSPATH="${HADOOP_CONF_DIR}"
|
||||||
|
|
||||||
# for developers, add Hadoop classes to CLASSPATH
|
|
||||||
if [ -d "$HADOOP_PREFIX/build/classes" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/classes
|
|
||||||
fi
|
|
||||||
if [ -d "$HADOOP_PREFIX/build/webapps" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build
|
|
||||||
fi
|
|
||||||
if [ -d "$HADOOP_PREFIX/build/test/classes" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/test/classes
|
|
||||||
fi
|
|
||||||
if [ -d "$HADOOP_PREFIX/build/test/core/classes" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/test/core/classes
|
|
||||||
fi
|
|
||||||
|
|
||||||
# so that filenames w/ spaces are handled correctly in loops below
|
# so that filenames w/ spaces are handled correctly in loops below
|
||||||
IFS=
|
IFS=
|
||||||
|
|
||||||
# for releases, add core hadoop jar & webapps to CLASSPATH
|
# for releases, add core hadoop jar & webapps to CLASSPATH
|
||||||
if [ -d "$HADOOP_PREFIX/webapps" ]; then
|
if [ -d "$HADOOP_PREFIX/share/hadoop/common/webapps" ]; then
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX
|
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop/common/webapps
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -d "$HADOOP_PREFIX/share/hadoop/common/lib" ]; then
|
if [ -d "$HADOOP_PREFIX/share/hadoop/common/lib" ]; then
|
||||||
for f in $HADOOP_PREFIX/share/hadoop/common/lib/*.jar; do
|
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop/common/lib'/*'
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
for f in $HADOOP_PREFIX/share/hadoop/common/*.jar; do
|
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop/common'/*'
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
|
|
||||||
# for developers, add libs to CLASSPATH
|
|
||||||
for f in $HADOOP_PREFIX/lib/*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ -d "$HADOOP_PREFIX/build/ivy/lib/Hadoop-Common/common" ]; then
|
|
||||||
for f in $HADOOP_PREFIX/build/ivy/lib/Hadoop-Common/common/*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "$HADOOP_PREFIX/build/ivy/lib/hadoop-hdfs/hdfs" ]; then
|
|
||||||
for f in $HADOOP_PREFIX/build/ivy/lib/hadoop-hdfs/hdfs/*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "$HADOOP_PREFIX/build/ivy/lib/Hadoop/mapred" ]; then
|
|
||||||
for f in $HADOOP_PREFIX/build/ivy/lib/Hadoop/mapred/*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
for f in $HADOOP_PREFIX/lib/jsp-2.1/*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
|
|
||||||
# add user-specified CLASSPATH last
|
# add user-specified CLASSPATH last
|
||||||
if [ "$HADOOP_CLASSPATH" != "" ]; then
|
if [ "$HADOOP_CLASSPATH" != "" ]; then
|
||||||
@ -274,37 +229,20 @@ HADOOP_OPTS="$HADOOP_OPTS -Djava.net.preferIPv4Stack=true"
|
|||||||
# put hdfs in classpath if present
|
# put hdfs in classpath if present
|
||||||
if [ "$HADOOP_HDFS_HOME" = "" ]; then
|
if [ "$HADOOP_HDFS_HOME" = "" ]; then
|
||||||
if [ -d "${HADOOP_PREFIX}/share/hadoop/hdfs" ]; then
|
if [ -d "${HADOOP_PREFIX}/share/hadoop/hdfs" ]; then
|
||||||
HADOOP_HDFS_HOME=$HADOOP_PREFIX/share/hadoop/hdfs
|
HADOOP_HDFS_HOME=$HADOOP_PREFIX
|
||||||
#echo Found HDFS installed at $HADOOP_HDFS_HOME
|
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -d "${HADOOP_HDFS_HOME}" ]; then
|
if [ -d "$HADOOP_HDFS_HOME/share/hadoop/hdfs/webapps" ]; then
|
||||||
|
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/share/hadoop/hdfs
|
||||||
if [ -d "$HADOOP_HDFS_HOME/webapps" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -d "${HADOOP_CONF_DIR}" ] && [ -d "${HADOOP_HDFS_HOME}/conf" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:${HADOOP_HDFS_HOME}/conf
|
|
||||||
fi
|
|
||||||
|
|
||||||
for f in $HADOOP_HDFS_HOME/hadoop-hdfs-*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
|
|
||||||
# add libs to CLASSPATH
|
|
||||||
if [ -d "${HADOOP_HDFS_HOME}/lib" ]; then
|
|
||||||
for f in $HADOOP_HDFS_HOME/lib/*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "$HADOOP_HDFS_HOME/build/classes" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/classes
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [ -d "$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib" ]; then
|
||||||
|
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib'/*'
|
||||||
|
fi
|
||||||
|
|
||||||
|
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/share/hadoop/hdfs'/*'
|
||||||
|
|
||||||
# cygwin path translation
|
# cygwin path translation
|
||||||
if $cygwin; then
|
if $cygwin; then
|
||||||
HADOOP_HDFS_HOME=`cygpath -w "$HADOOP_HDFS_HOME"`
|
HADOOP_HDFS_HOME=`cygpath -w "$HADOOP_HDFS_HOME"`
|
||||||
@ -313,44 +251,16 @@ fi
|
|||||||
# set mapred home if mapred is present
|
# set mapred home if mapred is present
|
||||||
if [ "$HADOOP_MAPRED_HOME" = "" ]; then
|
if [ "$HADOOP_MAPRED_HOME" = "" ]; then
|
||||||
if [ -d "${HADOOP_PREFIX}/share/hadoop/mapreduce" ]; then
|
if [ -d "${HADOOP_PREFIX}/share/hadoop/mapreduce" ]; then
|
||||||
HADOOP_MAPRED_HOME=$HADOOP_PREFIX/share/hadoop/mapreduce
|
HADOOP_MAPRED_HOME=$HADOOP_PREFIX
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -d "${HADOOP_MAPRED_HOME}" ]; then
|
if [ -d "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/webapps" ]; then
|
||||||
|
CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/webapps
|
||||||
|
fi
|
||||||
|
|
||||||
if [ -d "$HADOOP_MAPRED_HOME/webapps" ]; then
|
if [ -d "$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib" ]; then
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME
|
CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib'/*'
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ! -d "${HADOOP_CONF_DIR}" ] && [ -d "${HADOOP_MAPRED_HOME}/conf" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:${HADOOP_MAPRED_HOME}/conf
|
|
||||||
fi
|
|
||||||
|
|
||||||
for f in $HADOOP_MAPRED_HOME/hadoop-mapreduce-*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ -d "${HADOOP_MAPRED_HOME}/lib" ]; then
|
|
||||||
for f in $HADOOP_MAPRED_HOME/lib/*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "$HADOOP_MAPRED_HOME/build/classes" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/classes
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "$HADOOP_MAPRED_HOME/build/tools" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/tools
|
|
||||||
fi
|
|
||||||
|
|
||||||
for f in $HADOOP_MAPRED_HOME/hadoop-mapreduce-tools-*.jar; do
|
|
||||||
TOOL_PATH=${TOOL_PATH}:$f;
|
|
||||||
done
|
|
||||||
for f in $HADOOP_MAPRED_HOME/build/hadoop-mapreduce-tools-*.jar; do
|
|
||||||
TOOL_PATH=${TOOL_PATH}:$f;
|
|
||||||
done
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# cygwin path translation
|
# cygwin path translation
|
||||||
|
@ -39,11 +39,7 @@ fi
|
|||||||
bin=`dirname "${BASH_SOURCE-$0}"`
|
bin=`dirname "${BASH_SOURCE-$0}"`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
|
. "$bin"/../libexec/hadoop-config.sh
|
||||||
. "$bin"/../libexec/hadoop-config.sh
|
|
||||||
else
|
|
||||||
. "$bin"/hadoop-config.sh
|
|
||||||
fi
|
|
||||||
|
|
||||||
# get arguments
|
# get arguments
|
||||||
|
|
||||||
|
@ -29,10 +29,6 @@ fi
|
|||||||
bin=`dirname "${BASH_SOURCE-$0}"`
|
bin=`dirname "${BASH_SOURCE-$0}"`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
|
. "$bin"/../libexec/hadoop-config.sh
|
||||||
. "$bin"/../libexec/hadoop-config.sh
|
|
||||||
else
|
|
||||||
. "$bin"/hadoop-config.sh
|
|
||||||
fi
|
|
||||||
|
|
||||||
exec "$bin/slaves.sh" --config $HADOOP_CONF_DIR cd "$HADOOP_PREFIX" \; "$bin/hadoop-daemon.sh" --config $HADOOP_CONF_DIR "$@"
|
exec "$bin/slaves.sh" --config $HADOOP_CONF_DIR cd "$HADOOP_PREFIX" \; "$bin/hadoop-daemon.sh" --config $HADOOP_CONF_DIR "$@"
|
||||||
|
@ -50,40 +50,6 @@ fi
|
|||||||
JAVA=$JAVA_HOME/bin/java
|
JAVA=$JAVA_HOME/bin/java
|
||||||
JAVA_HEAP_MAX=-Xmx1000m
|
JAVA_HEAP_MAX=-Xmx1000m
|
||||||
|
|
||||||
# CLASSPATH initially contains $HADOOP_CONF_DIR
|
|
||||||
CLASSPATH="${HADOOP_CONF_DIR}"
|
|
||||||
|
|
||||||
# for developers, add Hadoop classes to CLASSPATH
|
|
||||||
if [ -d "$HADOOP_PREFIX/build/classes" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/classes
|
|
||||||
fi
|
|
||||||
if [ -d "$HADOOP_PREFIX/build/webapps" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build
|
|
||||||
fi
|
|
||||||
if [ -d "$HADOOP_PREFIX/build/test/classes" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/test/classes
|
|
||||||
fi
|
|
||||||
|
|
||||||
# so that filenames w/ spaces are handled correctly in loops below
|
|
||||||
IFS=
|
|
||||||
|
|
||||||
# for releases, add core hadoop jar & webapps to CLASSPATH
|
|
||||||
if [ -d "$HADOOP_PREFIX/webapps" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX
|
|
||||||
fi
|
|
||||||
for f in $HADOOP_PREFIX/hadoop-*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
|
|
||||||
# add libs to CLASSPATH
|
|
||||||
for f in $HADOOP_PREFIX/lib/*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
|
|
||||||
for f in $HADOOP_PREFIX/lib/jetty-ext/*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
|
|
||||||
# restore ordinary behaviour
|
# restore ordinary behaviour
|
||||||
unset IFS
|
unset IFS
|
||||||
|
|
||||||
|
@ -38,11 +38,7 @@ fi
|
|||||||
bin=`dirname "${BASH_SOURCE-$0}"`
|
bin=`dirname "${BASH_SOURCE-$0}"`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
|
. "$bin"/../libexec/hadoop-config.sh
|
||||||
. "$bin"/../libexec/hadoop-config.sh
|
|
||||||
else
|
|
||||||
. "$bin"/hadoop-config.sh
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
|
if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
|
||||||
. "${HADOOP_CONF_DIR}/hadoop-env.sh"
|
. "${HADOOP_CONF_DIR}/hadoop-env.sh"
|
||||||
|
@ -23,11 +23,7 @@ echo "This script is Deprecated. Instead use start-dfs.sh and start-mapred.sh"
|
|||||||
bin=`dirname "${BASH_SOURCE-$0}"`
|
bin=`dirname "${BASH_SOURCE-$0}"`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
|
. "$bin"/../libexec/hadoop-config.sh
|
||||||
. "$bin"/../libexec/hadoop-config.sh
|
|
||||||
else
|
|
||||||
. "$bin"/hadoop-config.sh
|
|
||||||
fi
|
|
||||||
|
|
||||||
# start hdfs daemons if hdfs is present
|
# start hdfs daemons if hdfs is present
|
||||||
if [ -f "${HADOOP_HDFS_HOME}"/bin/start-dfs.sh ]; then
|
if [ -f "${HADOOP_HDFS_HOME}"/bin/start-dfs.sh ]; then
|
||||||
|
@ -23,11 +23,7 @@ echo "This script is Deprecated. Instead use stop-dfs.sh and stop-mapred.sh"
|
|||||||
bin=`dirname "${BASH_SOURCE-$0}"`
|
bin=`dirname "${BASH_SOURCE-$0}"`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
|
. "$bin"/../libexec/hadoop-config.sh
|
||||||
. "$bin"/../libexec/hadoop-config.sh
|
|
||||||
else
|
|
||||||
. "$bin"/hadoop-config.sh
|
|
||||||
fi
|
|
||||||
|
|
||||||
# stop hdfs daemons if hdfs is present
|
# stop hdfs daemons if hdfs is present
|
||||||
if [ -f "${HADOOP_HDFS_HOME}"/bin/stop-dfs.sh ]; then
|
if [ -f "${HADOOP_HDFS_HOME}"/bin/stop-dfs.sh ]; then
|
||||||
|
@ -36,11 +36,7 @@
|
|||||||
bin=`dirname "$0"`
|
bin=`dirname "$0"`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hdfs-config.sh" ]; then
|
. "$bin/../libexec/hdfs-config.sh"
|
||||||
. "$bin/../libexec/hdfs-config.sh"
|
|
||||||
else
|
|
||||||
. "$bin/hdfs-config.sh"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$1" = '' ] ; then
|
if [ "$1" = '' ] ; then
|
||||||
"Error: please specify local exclude file as a first argument"
|
"Error: please specify local exclude file as a first argument"
|
||||||
|
@ -19,11 +19,7 @@ bin=`which $0`
|
|||||||
bin=`dirname ${bin}`
|
bin=`dirname ${bin}`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hdfs-config.sh" ]; then
|
. "$bin"/../libexec/hdfs-config.sh
|
||||||
. "$bin"/../libexec/hdfs-config.sh
|
|
||||||
else
|
|
||||||
. "$bin/hdfs-config.sh"
|
|
||||||
fi
|
|
||||||
|
|
||||||
function print_usage(){
|
function print_usage(){
|
||||||
echo "Usage: hdfs [--config confdir] COMMAND"
|
echo "Usage: hdfs [--config confdir] COMMAND"
|
||||||
@ -109,45 +105,6 @@ else
|
|||||||
CLASS="$COMMAND"
|
CLASS="$COMMAND"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# for developers, add hdfs classes to CLASSPATH
|
|
||||||
if [ -d "$HADOOP_HDFS_HOME/build/classes" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/classes
|
|
||||||
fi
|
|
||||||
if [ -d "$HADOOP_HDFS_HOME/build/web/webapps" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/web
|
|
||||||
fi
|
|
||||||
if [ -d "$HADOOP_HDFS_HOME/build/test/classes" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/test/classes
|
|
||||||
fi
|
|
||||||
if [ -d "$HADOOP_HDFS_HOME/build/tools" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/tools
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "$HADOOP_HDFS_HOME/build/ivy/lib/hadoop-hdfs/common" ]; then
|
|
||||||
for f in $HADOOP_HDFS_HOME/build/ivy/lib/hadoop-hdfs/common/*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d "$HADOOP_HDFS_HOME/build/ivy/lib/hadoop-hdfs/hdfs" ]; then
|
|
||||||
for f in $HADOOP_HDFS_HOME/build/ivy/lib/hadoop-hdfs/hdfs/*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
# for releases, add core hdfs jar & webapps to CLASSPATH
|
|
||||||
if [ -d "$HADOOP_PREFIX/share/hadoop/hdfs/webapps" ]; then
|
|
||||||
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop/hdfs
|
|
||||||
fi
|
|
||||||
for f in $HADOOP_PREFIX/share/hadoop-hdfs/*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
|
|
||||||
# add libs to CLASSPATH
|
|
||||||
for f in $HADOOP_PREFIX/lib/*.jar; do
|
|
||||||
CLASSPATH=${CLASSPATH}:$f;
|
|
||||||
done
|
|
||||||
|
|
||||||
if $cygwin; then
|
if $cygwin; then
|
||||||
CLASSPATH=`cygpath -p -w "$CLASSPATH"`
|
CLASSPATH=`cygpath -p -w "$CLASSPATH"`
|
||||||
fi
|
fi
|
||||||
@ -161,7 +118,7 @@ if [ "$starting_secure_dn" = "true" ]; then
|
|||||||
HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid"
|
HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
exec "$HADOOP_PREFIX/bin/jsvc" \
|
exec "$HADOOP_HDFS_HOME/bin/jsvc" \
|
||||||
-Dproc_$COMMAND -outfile "$HADOOP_LOG_DIR/jsvc.out" \
|
-Dproc_$COMMAND -outfile "$HADOOP_LOG_DIR/jsvc.out" \
|
||||||
-errfile "$HADOOP_LOG_DIR/jsvc.err" \
|
-errfile "$HADOOP_LOG_DIR/jsvc.err" \
|
||||||
-pidfile "$HADOOP_SECURE_DN_PID" \
|
-pidfile "$HADOOP_SECURE_DN_PID" \
|
||||||
|
@ -26,12 +26,10 @@ export HADOOP_PREFIX="${HADOOP_PREFIX:-$bin/..}"
|
|||||||
|
|
||||||
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
|
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then
|
||||||
. $bin/../libexec/hadoop-config.sh
|
. $bin/../libexec/hadoop-config.sh
|
||||||
elif [ -e "${HADOOP_COMMON_HOME}/bin/hadoop-config.sh" ]; then
|
elif [ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" ]; then
|
||||||
. "$HADOOP_COMMON_HOME"/bin/hadoop-config.sh
|
. "$HADOOP_COMMON_HOME"/libexec/hadoop-config.sh
|
||||||
elif [ -e "${HADOOP_HOME}/bin/hadoop-config.sh" ]; then
|
elif [ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]; then
|
||||||
. "$HADOOP_HOME"/bin/hadoop-config.sh
|
. "$HADOOP_HOME"/libexec/hadoop-config.sh
|
||||||
elif [ -e "${HADOOP_HDFS_HOME}/bin/hadoop-config.sh" ]; then
|
|
||||||
. "$HADOOP_HDFS_HOME"/bin/hadoop-config.sh
|
|
||||||
else
|
else
|
||||||
echo "Hadoop common not found."
|
echo "Hadoop common not found."
|
||||||
exit
|
exit
|
||||||
|
@ -23,11 +23,7 @@
|
|||||||
bin=`dirname "$0"`
|
bin=`dirname "$0"`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hdfs-config.sh" ]; then
|
. "$bin/../libexec/hdfs-config.sh"
|
||||||
. "$bin/../libexec/hdfs-config.sh"
|
|
||||||
else
|
|
||||||
. "$bin/hdfs-config.sh"
|
|
||||||
fi
|
|
||||||
|
|
||||||
namenodes=$("$HADOOP_PREFIX/bin/hdfs" getconf -nnRpcAddresses)
|
namenodes=$("$HADOOP_PREFIX/bin/hdfs" getconf -nnRpcAddresses)
|
||||||
if [ "$?" != '0' ] ; then errorFlag='1' ;
|
if [ "$?" != '0' ] ; then errorFlag='1' ;
|
||||||
|
@ -18,11 +18,7 @@
|
|||||||
bin=`dirname "${BASH_SOURCE-$0}"`
|
bin=`dirname "${BASH_SOURCE-$0}"`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hdfs-config.sh" ]; then
|
. "$bin"/../libexec/hdfs-config.sh
|
||||||
. "$bin"/../libexec/hdfs-config.sh
|
|
||||||
else
|
|
||||||
. "$bin/hdfs-config.sh"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Start balancer daemon.
|
# Start balancer daemon.
|
||||||
|
|
||||||
|
@ -25,11 +25,7 @@ usage="Usage: start-dfs.sh [-upgrade|-rollback]"
|
|||||||
bin=`dirname "${BASH_SOURCE-$0}"`
|
bin=`dirname "${BASH_SOURCE-$0}"`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hdfs-config.sh" ]; then
|
. "$bin"/../libexec/hdfs-config.sh
|
||||||
. "$bin"/../libexec/hdfs-config.sh
|
|
||||||
else
|
|
||||||
. "$bin/hdfs-config.sh"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# get arguments
|
# get arguments
|
||||||
if [ $# -ge 1 ]; then
|
if [ $# -ge 1 ]; then
|
||||||
|
@ -22,12 +22,7 @@ usage="Usage (run as root in order to start secure datanodes): start-secure-dns.
|
|||||||
bin=`dirname "${BASH_SOURCE-$0}"`
|
bin=`dirname "${BASH_SOURCE-$0}"`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hdfs-config.sh" ]; then
|
. "$bin"/../libexec/hdfs-config.sh
|
||||||
. "$bin"/../libexec/hdfs-config.sh
|
|
||||||
else
|
|
||||||
. "$bin/hdfs-config.sh"
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
|
if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
|
||||||
"$HADOOP_PREFIX"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start datanode $dataStartOpt
|
"$HADOOP_PREFIX"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start datanode $dataStartOpt
|
||||||
|
@ -18,12 +18,7 @@
|
|||||||
bin=`dirname "${BASH_SOURCE-$0}"`
|
bin=`dirname "${BASH_SOURCE-$0}"`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hdfs-config.sh" ]; then
|
. "$bin"/../libexec/hdfs-config.sh
|
||||||
. "$bin"/../libexec/hdfs-config.sh
|
|
||||||
else
|
|
||||||
. "$bin/hdfs-config.sh"
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
# Stop balancer daemon.
|
# Stop balancer daemon.
|
||||||
# Run this on the machine where the balancer is running
|
# Run this on the machine where the balancer is running
|
||||||
|
@ -18,11 +18,7 @@
|
|||||||
bin=`dirname "${BASH_SOURCE-$0}"`
|
bin=`dirname "${BASH_SOURCE-$0}"`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hdfs-config.sh" ]; then
|
. "$bin"/../libexec/hdfs-config.sh
|
||||||
. "$bin"/../libexec/hdfs-config.sh
|
|
||||||
else
|
|
||||||
. "$bin/hdfs-config.sh"
|
|
||||||
fi
|
|
||||||
|
|
||||||
#---------------------------------------------------------
|
#---------------------------------------------------------
|
||||||
# namenodes
|
# namenodes
|
||||||
|
@ -22,11 +22,7 @@ usage="Usage (run as root in order to stop secure datanodes): stop-secure-dns.sh
|
|||||||
bin=`dirname "${BASH_SOURCE-$0}"`
|
bin=`dirname "${BASH_SOURCE-$0}"`
|
||||||
bin=`cd "$bin"; pwd`
|
bin=`cd "$bin"; pwd`
|
||||||
|
|
||||||
if [ -e "$bin/../libexec/hdfs-config.sh" ]; then
|
. "$bin"/../libexec/hdfs-config.sh
|
||||||
. "$bin"/../libexec/hdfs-config.sh
|
|
||||||
else
|
|
||||||
. "$bin/hdfs-config.sh"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
|
if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
|
||||||
"$HADOOP_PREFIX"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs stop datanode
|
"$HADOOP_PREFIX"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs stop datanode
|
||||||
|
@ -82,6 +82,7 @@
|
|||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-common</artifactId>
|
<artifactId>hadoop-common</artifactId>
|
||||||
<version>${hadoop-common.version}</version>
|
<version>${hadoop-common.version}</version>
|
||||||
|
<scope>provided</scope>
|
||||||
<exclusions>
|
<exclusions>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>commons-el</groupId>
|
<groupId>commons-el</groupId>
|
||||||
|
Loading…
Reference in New Issue
Block a user