HADOOP-11346. Rewrite sls/rumen to use new shell framework (John Smith via aw)
This commit is contained in:
parent
43d5caef5e
commit
f990e9d229
@ -148,6 +148,9 @@ Trunk (Unreleased)
|
|||||||
|
|
||||||
HADOOP-11460. Deprecate shell vars (John Smith via aw)
|
HADOOP-11460. Deprecate shell vars (John Smith via aw)
|
||||||
|
|
||||||
|
HADOOP-11346. Rewrite sls/rumen to use new shell framework (John Smith
|
||||||
|
via aw)
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
||||||
HADOOP-11473. test-patch says "-1 overall" even when all checks are +1
|
HADOOP-11473. test-patch says "-1 overall" even when all checks are +1
|
||||||
|
@ -13,18 +13,18 @@
|
|||||||
# limitations under the License. See accompanying LICENSE file.
|
# limitations under the License. See accompanying LICENSE file.
|
||||||
#
|
#
|
||||||
|
|
||||||
###############################################################################
|
function hadoop_usage()
|
||||||
printUsage() {
|
{
|
||||||
echo "Usage: rumen2sls.sh <OPTIONS>"
|
echo "Usage: rumen2sls.sh <OPTIONS>"
|
||||||
echo " --rumen-file=<RUMEN_FILE>"
|
echo " --rumen-file=<RUMEN_FILE>"
|
||||||
echo " --output-dir=<SLS_OUTPUT_DIR>"
|
echo " --output-dir=<SLS_OUTPUT_DIR>"
|
||||||
echo " [--output-prefix=<PREFIX>] (default is sls)"
|
echo " [--output-prefix=<PREFIX>] (default is sls)"
|
||||||
echo
|
echo
|
||||||
}
|
}
|
||||||
###############################################################################
|
|
||||||
parseArgs() {
|
function parse_args()
|
||||||
for i in $*
|
{
|
||||||
do
|
for i in "$@"; do
|
||||||
case $i in
|
case $i in
|
||||||
--rumen-file=*)
|
--rumen-file=*)
|
||||||
rumenfile=${i#*=}
|
rumenfile=${i#*=}
|
||||||
@ -36,71 +36,71 @@ parseArgs() {
|
|||||||
outputprefix=${i#*=}
|
outputprefix=${i#*=}
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
echo "Invalid option"
|
hadoop_error "ERROR: Invalid option ${i}"
|
||||||
echo
|
hadoop_exit_with_usage 1
|
||||||
printUsage
|
|
||||||
exit 1
|
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
if [[ "${rumenfile}" == "" || "${outputdir}" == "" ]] ; then
|
|
||||||
echo "Both --rumen-file ${rumenfile} and --output-dir \
|
if [[ -z "${rumenfile}" ]] ; then
|
||||||
${outputfdir} must be specified"
|
hadoop_error "ERROR: --rumen-file must be specified."
|
||||||
echo
|
hadoop_exit_with_usage 1
|
||||||
printUsage
|
fi
|
||||||
exit 1
|
|
||||||
|
if [[ -z "${outputdir}" ]] ; then
|
||||||
|
hadoop_error "ERROR: --output-dir must be specified."
|
||||||
|
hadoop_exit_with_usage 1
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
###############################################################################
|
|
||||||
calculateBasedir() {
|
|
||||||
# resolve links - $0 may be a softlink
|
|
||||||
PRG="${1}"
|
|
||||||
|
|
||||||
while [ -h "${PRG}" ]; do
|
function calculate_classpath()
|
||||||
ls=`ls -ld "${PRG}"`
|
{
|
||||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
hadoop_debug "Injecting TOOL_PATH into CLASSPATH"
|
||||||
if expr "$link" : '/.*' > /dev/null; then
|
hadoop_add_classpath "${TOOL_PATH}"
|
||||||
PRG="$link"
|
}
|
||||||
else
|
|
||||||
PRG=`dirname "${PRG}"`/"$link"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
BASEDIR=`dirname ${PRG}`
|
function run_sls_generator()
|
||||||
BASEDIR=`cd ${BASEDIR}/..;pwd`
|
{
|
||||||
}
|
if [[ -z "${outputprefix}" ]] ; then
|
||||||
###############################################################################
|
|
||||||
calculateClasspath() {
|
|
||||||
HADOOP_BASE=`which hadoop`
|
|
||||||
HADOOP_BASE=`dirname $HADOOP_BASE`
|
|
||||||
DEFAULT_LIBEXEC_DIR=${HADOOP_BASE}/../libexec
|
|
||||||
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
|
|
||||||
. $HADOOP_LIBEXEC_DIR/hadoop-config.sh
|
|
||||||
export HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${TOOL_PATH}"
|
|
||||||
}
|
|
||||||
###############################################################################
|
|
||||||
runSLSGenerator() {
|
|
||||||
if [[ "${outputprefix}" == "" ]] ; then
|
|
||||||
outputprefix="sls"
|
outputprefix="sls"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
slsJobs=${outputdir}/${outputprefix}-jobs.json
|
hadoop_add_param args -input "-input ${rumenfile}"
|
||||||
slsNodes=${outputdir}/${outputprefix}-nodes.json
|
hadoop_add_param args -outputJobs "-outputJobs ${outputdir}/${outputprefix}-jobs.json"
|
||||||
|
hadoop_add_param args -outputNodes "-outputNodes ${outputdir}/${outputprefix}-nodes.json"
|
||||||
|
|
||||||
args="-input ${rumenfile} -outputJobs ${slsJobs}";
|
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||||
args="${args} -outputNodes ${slsNodes}";
|
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||||
|
|
||||||
hadoop org.apache.hadoop.yarn.sls.RumenToSLSConverter ${args}
|
hadoop_finalize
|
||||||
|
# shellcheck disable=SC2086
|
||||||
|
hadoop_java_exec rumen2sls org.apache.hadoop.yarn.sls.RumenToSLSConverter ${args}
|
||||||
}
|
}
|
||||||
###############################################################################
|
|
||||||
|
|
||||||
calculateBasedir $0
|
# let's locate libexec...
|
||||||
calculateClasspath
|
if [[ -n "${HADOOP_PREFIX}" ]]; then
|
||||||
parseArgs "$@"
|
DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
|
||||||
runSLSGenerator
|
else
|
||||||
|
this="${BASH_SOURCE-$0}"
|
||||||
|
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
|
||||||
|
DEFAULT_LIBEXEC_DIR="${bin}/../../../../../libexec"
|
||||||
|
fi
|
||||||
|
|
||||||
echo
|
HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}"
|
||||||
echo "SLS simulation files available at: ${outputdir}"
|
# shellcheck disable=SC2034
|
||||||
echo
|
HADOOP_NEW_CONFIG=true
|
||||||
|
if [[ -f "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
|
||||||
|
. "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
|
||||||
|
else
|
||||||
|
echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hadoop-config.sh." 2>&1
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ $# = 0 ]; then
|
||||||
|
hadoop_exit_with_usage 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
parse_args "${@}"
|
||||||
|
calculate_classpath
|
||||||
|
run_sls_generator
|
||||||
|
|
||||||
exit 0
|
|
@ -13,20 +13,19 @@
|
|||||||
# limitations under the License. See accompanying LICENSE file.
|
# limitations under the License. See accompanying LICENSE file.
|
||||||
#
|
#
|
||||||
|
|
||||||
###############################################################################
|
function hadoop_usage()
|
||||||
printUsage() {
|
{
|
||||||
echo "Usage: slsrun.sh <OPTIONS>"
|
echo "Usage: slsrun.sh <OPTIONS> "
|
||||||
echo " --input-rumen|--input-sls=<FILE1,FILE2,...>"
|
echo " --input-rumen=<FILE1,FILE2,...> | --input-sls=<FILE1,FILE2,...>"
|
||||||
echo " --output-dir=<SLS_SIMULATION_OUTPUT_DIRECTORY>"
|
echo " --output-dir=<SLS_SIMULATION_OUTPUT_DIRECTORY>"
|
||||||
echo " [--nodes=<SLS_NODES_FILE>]"
|
echo " [--nodes=<SLS_NODES_FILE>]"
|
||||||
echo " [--track-jobs=<JOBID1,JOBID2,...>]"
|
echo " [--track-jobs=<JOBID1,JOBID2,...>]"
|
||||||
echo " [--print-simulation]"
|
echo " [--print-simulation]"
|
||||||
echo
|
|
||||||
}
|
}
|
||||||
###############################################################################
|
|
||||||
parseArgs() {
|
function parse_args()
|
||||||
for i in $*
|
{
|
||||||
do
|
for i in "$@"; do
|
||||||
case $i in
|
case $i in
|
||||||
--input-rumen=*)
|
--input-rumen=*)
|
||||||
inputrumen=${i#*=}
|
inputrumen=${i#*=}
|
||||||
@ -47,66 +46,87 @@ parseArgs() {
|
|||||||
printsimulation="true"
|
printsimulation="true"
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
echo "Invalid option"
|
hadoop_error "ERROR: Invalid option ${i}"
|
||||||
echo
|
hadoop_exit_with_usage 1
|
||||||
printUsage
|
|
||||||
exit 1
|
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
if [[ "${inputrumen}" == "" && "${inputsls}" == "" ]] ; then
|
if [[ -z "${inputrumen}" && -z "${inputsls}" ]] ; then
|
||||||
echo "Either --input-rumen or --input-sls must be specified"
|
hadoop_error "ERROR: Either --input-rumen or --input-sls must be specified."
|
||||||
echo
|
hadoop_exit_with_usage 1
|
||||||
printUsage
|
|
||||||
exit 1
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "${outputdir}" == "" ]] ; then
|
if [[ -n "${inputrumen}" && -n "${inputsls}" ]] ; then
|
||||||
echo "The output directory --output-dir must be specified"
|
hadoop_error "ERROR: Only specify one of --input-rumen or --input-sls."
|
||||||
echo
|
hadoop_exit_with_usage 1
|
||||||
printUsage
|
fi
|
||||||
exit 1
|
|
||||||
|
if [[ -z "${outputdir}" ]] ; then
|
||||||
|
hadoop_error "ERROR: The output directory --output-dir must be specified."
|
||||||
|
hadoop_exit_with_usage 1
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
###############################################################################
|
function calculate_classpath() {
|
||||||
calculateClasspath() {
|
hadoop_debug "Injecting TOOL_PATH into CLASSPATH"
|
||||||
HADOOP_BASE=`which hadoop`
|
hadoop_add_classpath "${TOOL_PATH}"
|
||||||
HADOOP_BASE=`dirname $HADOOP_BASE`
|
hadoop_debug "Injecting ${HADOOP_PREFIX}/share/hadoop/tools/sls/html into CLASSPATH"
|
||||||
DEFAULT_LIBEXEC_DIR=${HADOOP_BASE}/../libexec
|
hadoop_add_classpath "${HADOOP_PREFIX}/share/hadoop/tools/sls/html"
|
||||||
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
|
|
||||||
. $HADOOP_LIBEXEC_DIR/hadoop-config.sh
|
|
||||||
export HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${TOOL_PATH}:html"
|
|
||||||
}
|
}
|
||||||
###############################################################################
|
|
||||||
runSimulation() {
|
function run_simulation() {
|
||||||
if [[ "${inputsls}" == "" ]] ; then
|
if [[ "${inputsls}" == "" ]] ; then
|
||||||
args="-inputrumen ${inputrumen}"
|
hadoop_add_param args -inputrumen "-inputrumen ${inputrumen}"
|
||||||
else
|
else
|
||||||
args="-inputsls ${inputsls}"
|
hadoop_add_param args -inputsls "-inputsls ${inputsls}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
args="${args} -output ${outputdir}"
|
hadoop_add_param args -output "-output ${outputdir}"
|
||||||
|
|
||||||
if [[ "${nodes}" != "" ]] ; then
|
if [[ -n "${nodes}" ]] ; then
|
||||||
args="${args} -nodes ${nodes}"
|
hadoop_add_param args -nodes "-nodes ${nodes}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "${trackjobs}" != "" ]] ; then
|
if [[ -n "${trackjobs}" ]] ; then
|
||||||
args="${args} -trackjobs ${trackjobs}"
|
hadoop_add_param args -trackjobs "-trackjobs ${trackjobs}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "${printsimulation}" == "true" ]] ; then
|
if [[ "${printsimulation}" == "true" ]] ; then
|
||||||
args="${args} -printsimulation"
|
hadoop_add_param args -printsimulation "-printsimulation"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
hadoop org.apache.hadoop.yarn.sls.SLSRunner ${args}
|
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||||
|
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||||
|
|
||||||
|
hadoop_finalize
|
||||||
|
# shellcheck disable=SC2086
|
||||||
|
hadoop_java_exec sls org.apache.hadoop.yarn.sls.SLSRunner ${args}
|
||||||
}
|
}
|
||||||
###############################################################################
|
|
||||||
|
|
||||||
calculateClasspath
|
# let's locate libexec...
|
||||||
parseArgs "$@"
|
if [[ -n "${HADOOP_PREFIX}" ]]; then
|
||||||
runSimulation
|
DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
|
||||||
|
else
|
||||||
|
this="${BASH_SOURCE-$0}"
|
||||||
|
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
|
||||||
|
DEFAULT_LIBEXEC_DIR="${bin}/../../../../../libexec"
|
||||||
|
fi
|
||||||
|
|
||||||
exit 0
|
HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}"
|
||||||
|
# shellcheck disable=SC2034
|
||||||
|
HADOOP_NEW_CONFIG=true
|
||||||
|
if [[ -f "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
|
||||||
|
. "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
|
||||||
|
else
|
||||||
|
echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hadoop-config.sh." 2>&1
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $# = 0 ]]; then
|
||||||
|
hadoop_exit_with_usage 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
parse_args "${@}"
|
||||||
|
calculate_classpath
|
||||||
|
run_simulation
|
||||||
|
Loading…
Reference in New Issue
Block a user