HADOOP-11346. Rewrite sls/rumen to use new shell framework (John Smith via aw)
This commit is contained in:
parent
43d5caef5e
commit
f990e9d229
@ -148,6 +148,9 @@ Trunk (Unreleased)
|
||||
|
||||
HADOOP-11460. Deprecate shell vars (John Smith via aw)
|
||||
|
||||
HADOOP-11346. Rewrite sls/rumen to use new shell framework (John Smith
|
||||
via aw)
|
||||
|
||||
BUG FIXES
|
||||
|
||||
HADOOP-11473. test-patch says "-1 overall" even when all checks are +1
|
||||
|
@ -13,18 +13,18 @@
|
||||
# limitations under the License. See accompanying LICENSE file.
|
||||
#
|
||||
|
||||
###############################################################################
|
||||
printUsage() {
|
||||
function hadoop_usage()
|
||||
{
|
||||
echo "Usage: rumen2sls.sh <OPTIONS>"
|
||||
echo " --rumen-file=<RUMEN_FILE>"
|
||||
echo " --output-dir=<SLS_OUTPUT_DIR>"
|
||||
echo " [--output-prefix=<PREFIX>] (default is sls)"
|
||||
echo
|
||||
}
|
||||
###############################################################################
|
||||
parseArgs() {
|
||||
for i in $*
|
||||
do
|
||||
|
||||
function parse_args()
|
||||
{
|
||||
for i in "$@"; do
|
||||
case $i in
|
||||
--rumen-file=*)
|
||||
rumenfile=${i#*=}
|
||||
@ -36,71 +36,71 @@ parseArgs() {
|
||||
outputprefix=${i#*=}
|
||||
;;
|
||||
*)
|
||||
echo "Invalid option"
|
||||
echo
|
||||
printUsage
|
||||
exit 1
|
||||
hadoop_error "ERROR: Invalid option ${i}"
|
||||
hadoop_exit_with_usage 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
if [[ "${rumenfile}" == "" || "${outputdir}" == "" ]] ; then
|
||||
echo "Both --rumen-file ${rumenfile} and --output-dir \
|
||||
${outputfdir} must be specified"
|
||||
echo
|
||||
printUsage
|
||||
exit 1
|
||||
|
||||
if [[ -z "${rumenfile}" ]] ; then
|
||||
hadoop_error "ERROR: --rumen-file must be specified."
|
||||
hadoop_exit_with_usage 1
|
||||
fi
|
||||
|
||||
if [[ -z "${outputdir}" ]] ; then
|
||||
hadoop_error "ERROR: --output-dir must be specified."
|
||||
hadoop_exit_with_usage 1
|
||||
fi
|
||||
}
|
||||
###############################################################################
|
||||
calculateBasedir() {
|
||||
# resolve links - $0 may be a softlink
|
||||
PRG="${1}"
|
||||
|
||||
while [ -h "${PRG}" ]; do
|
||||
ls=`ls -ld "${PRG}"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "${PRG}"`/"$link"
|
||||
fi
|
||||
done
|
||||
function calculate_classpath()
|
||||
{
|
||||
hadoop_debug "Injecting TOOL_PATH into CLASSPATH"
|
||||
hadoop_add_classpath "${TOOL_PATH}"
|
||||
}
|
||||
|
||||
BASEDIR=`dirname ${PRG}`
|
||||
BASEDIR=`cd ${BASEDIR}/..;pwd`
|
||||
}
|
||||
###############################################################################
|
||||
calculateClasspath() {
|
||||
HADOOP_BASE=`which hadoop`
|
||||
HADOOP_BASE=`dirname $HADOOP_BASE`
|
||||
DEFAULT_LIBEXEC_DIR=${HADOOP_BASE}/../libexec
|
||||
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
|
||||
. $HADOOP_LIBEXEC_DIR/hadoop-config.sh
|
||||
export HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${TOOL_PATH}"
|
||||
}
|
||||
###############################################################################
|
||||
runSLSGenerator() {
|
||||
if [[ "${outputprefix}" == "" ]] ; then
|
||||
function run_sls_generator()
|
||||
{
|
||||
if [[ -z "${outputprefix}" ]] ; then
|
||||
outputprefix="sls"
|
||||
fi
|
||||
|
||||
slsJobs=${outputdir}/${outputprefix}-jobs.json
|
||||
slsNodes=${outputdir}/${outputprefix}-nodes.json
|
||||
hadoop_add_param args -input "-input ${rumenfile}"
|
||||
hadoop_add_param args -outputJobs "-outputJobs ${outputdir}/${outputprefix}-jobs.json"
|
||||
hadoop_add_param args -outputNodes "-outputNodes ${outputdir}/${outputprefix}-nodes.json"
|
||||
|
||||
args="-input ${rumenfile} -outputJobs ${slsJobs}";
|
||||
args="${args} -outputNodes ${slsNodes}";
|
||||
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||
|
||||
hadoop org.apache.hadoop.yarn.sls.RumenToSLSConverter ${args}
|
||||
hadoop_finalize
|
||||
# shellcheck disable=SC2086
|
||||
hadoop_java_exec rumen2sls org.apache.hadoop.yarn.sls.RumenToSLSConverter ${args}
|
||||
}
|
||||
###############################################################################
|
||||
|
||||
calculateBasedir $0
|
||||
calculateClasspath
|
||||
parseArgs "$@"
|
||||
runSLSGenerator
|
||||
# let's locate libexec...
|
||||
if [[ -n "${HADOOP_PREFIX}" ]]; then
|
||||
DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
|
||||
else
|
||||
this="${BASH_SOURCE-$0}"
|
||||
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
|
||||
DEFAULT_LIBEXEC_DIR="${bin}/../../../../../libexec"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "SLS simulation files available at: ${outputdir}"
|
||||
echo
|
||||
HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}"
|
||||
# shellcheck disable=SC2034
|
||||
HADOOP_NEW_CONFIG=true
|
||||
if [[ -f "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
|
||||
. "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
|
||||
else
|
||||
echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hadoop-config.sh." 2>&1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ $# = 0 ]; then
|
||||
hadoop_exit_with_usage 1
|
||||
fi
|
||||
|
||||
parse_args "${@}"
|
||||
calculate_classpath
|
||||
run_sls_generator
|
||||
|
||||
exit 0
|
@ -13,20 +13,19 @@
|
||||
# limitations under the License. See accompanying LICENSE file.
|
||||
#
|
||||
|
||||
###############################################################################
|
||||
printUsage() {
|
||||
function hadoop_usage()
|
||||
{
|
||||
echo "Usage: slsrun.sh <OPTIONS> "
|
||||
echo " --input-rumen|--input-sls=<FILE1,FILE2,...>"
|
||||
echo " --input-rumen=<FILE1,FILE2,...> | --input-sls=<FILE1,FILE2,...>"
|
||||
echo " --output-dir=<SLS_SIMULATION_OUTPUT_DIRECTORY>"
|
||||
echo " [--nodes=<SLS_NODES_FILE>]"
|
||||
echo " [--track-jobs=<JOBID1,JOBID2,...>]"
|
||||
echo " [--print-simulation]"
|
||||
echo
|
||||
}
|
||||
###############################################################################
|
||||
parseArgs() {
|
||||
for i in $*
|
||||
do
|
||||
|
||||
function parse_args()
|
||||
{
|
||||
for i in "$@"; do
|
||||
case $i in
|
||||
--input-rumen=*)
|
||||
inputrumen=${i#*=}
|
||||
@ -47,66 +46,87 @@ parseArgs() {
|
||||
printsimulation="true"
|
||||
;;
|
||||
*)
|
||||
echo "Invalid option"
|
||||
echo
|
||||
printUsage
|
||||
exit 1
|
||||
hadoop_error "ERROR: Invalid option ${i}"
|
||||
hadoop_exit_with_usage 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ "${inputrumen}" == "" && "${inputsls}" == "" ]] ; then
|
||||
echo "Either --input-rumen or --input-sls must be specified"
|
||||
echo
|
||||
printUsage
|
||||
exit 1
|
||||
if [[ -z "${inputrumen}" && -z "${inputsls}" ]] ; then
|
||||
hadoop_error "ERROR: Either --input-rumen or --input-sls must be specified."
|
||||
hadoop_exit_with_usage 1
|
||||
fi
|
||||
|
||||
if [[ "${outputdir}" == "" ]] ; then
|
||||
echo "The output directory --output-dir must be specified"
|
||||
echo
|
||||
printUsage
|
||||
exit 1
|
||||
if [[ -n "${inputrumen}" && -n "${inputsls}" ]] ; then
|
||||
hadoop_error "ERROR: Only specify one of --input-rumen or --input-sls."
|
||||
hadoop_exit_with_usage 1
|
||||
fi
|
||||
|
||||
if [[ -z "${outputdir}" ]] ; then
|
||||
hadoop_error "ERROR: The output directory --output-dir must be specified."
|
||||
hadoop_exit_with_usage 1
|
||||
fi
|
||||
}
|
||||
|
||||
###############################################################################
|
||||
calculateClasspath() {
|
||||
HADOOP_BASE=`which hadoop`
|
||||
HADOOP_BASE=`dirname $HADOOP_BASE`
|
||||
DEFAULT_LIBEXEC_DIR=${HADOOP_BASE}/../libexec
|
||||
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
|
||||
. $HADOOP_LIBEXEC_DIR/hadoop-config.sh
|
||||
export HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${TOOL_PATH}:html"
|
||||
function calculate_classpath() {
|
||||
hadoop_debug "Injecting TOOL_PATH into CLASSPATH"
|
||||
hadoop_add_classpath "${TOOL_PATH}"
|
||||
hadoop_debug "Injecting ${HADOOP_PREFIX}/share/hadoop/tools/sls/html into CLASSPATH"
|
||||
hadoop_add_classpath "${HADOOP_PREFIX}/share/hadoop/tools/sls/html"
|
||||
}
|
||||
###############################################################################
|
||||
runSimulation() {
|
||||
|
||||
function run_simulation() {
|
||||
if [[ "${inputsls}" == "" ]] ; then
|
||||
args="-inputrumen ${inputrumen}"
|
||||
hadoop_add_param args -inputrumen "-inputrumen ${inputrumen}"
|
||||
else
|
||||
args="-inputsls ${inputsls}"
|
||||
hadoop_add_param args -inputsls "-inputsls ${inputsls}"
|
||||
fi
|
||||
|
||||
args="${args} -output ${outputdir}"
|
||||
hadoop_add_param args -output "-output ${outputdir}"
|
||||
|
||||
if [[ "${nodes}" != "" ]] ; then
|
||||
args="${args} -nodes ${nodes}"
|
||||
if [[ -n "${nodes}" ]] ; then
|
||||
hadoop_add_param args -nodes "-nodes ${nodes}"
|
||||
fi
|
||||
|
||||
if [[ "${trackjobs}" != "" ]] ; then
|
||||
args="${args} -trackjobs ${trackjobs}"
|
||||
if [[ -n "${trackjobs}" ]] ; then
|
||||
hadoop_add_param args -trackjobs "-trackjobs ${trackjobs}"
|
||||
fi
|
||||
|
||||
if [[ "${printsimulation}" == "true" ]] ; then
|
||||
args="${args} -printsimulation"
|
||||
hadoop_add_param args -printsimulation "-printsimulation"
|
||||
fi
|
||||
|
||||
hadoop org.apache.hadoop.yarn.sls.SLSRunner ${args}
|
||||
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
|
||||
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
|
||||
|
||||
hadoop_finalize
|
||||
# shellcheck disable=SC2086
|
||||
hadoop_java_exec sls org.apache.hadoop.yarn.sls.SLSRunner ${args}
|
||||
}
|
||||
###############################################################################
|
||||
|
||||
calculateClasspath
|
||||
parseArgs "$@"
|
||||
runSimulation
|
||||
# let's locate libexec...
|
||||
if [[ -n "${HADOOP_PREFIX}" ]]; then
|
||||
DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
|
||||
else
|
||||
this="${BASH_SOURCE-$0}"
|
||||
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
|
||||
DEFAULT_LIBEXEC_DIR="${bin}/../../../../../libexec"
|
||||
fi
|
||||
|
||||
exit 0
|
||||
HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}"
|
||||
# shellcheck disable=SC2034
|
||||
HADOOP_NEW_CONFIG=true
|
||||
if [[ -f "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
|
||||
. "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
|
||||
else
|
||||
echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/hadoop-config.sh." 2>&1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ $# = 0 ]]; then
|
||||
hadoop_exit_with_usage 1
|
||||
fi
|
||||
|
||||
parse_args "${@}"
|
||||
calculate_classpath
|
||||
run_simulation
|
||||
|
Loading…
Reference in New Issue
Block a user