HADOOP-11485. Pluggable shell integration (aw)
This commit is contained in:
parent
17165d3df9
commit
5c79439568
@ -48,6 +48,11 @@
|
||||
</includes>
|
||||
<fileMode>0755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}/src/main/shellprofile.d</directory>
|
||||
<outputDirectory>/libexec/shellprofile.d</outputDirectory>
|
||||
<fileMode>0755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>${basedir}/src/main/bin</directory>
|
||||
<outputDirectory>/sbin</outputDirectory>
|
||||
|
@ -41,6 +41,11 @@
|
||||
</includes>
|
||||
<fileMode>0755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>shellprofile.d</directory>
|
||||
<outputDirectory>libexec/shellprofile.d</outputDirectory>
|
||||
<fileMode>0755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>bin</directory>
|
||||
<outputDirectory>sbin</outputDirectory>
|
||||
|
@ -46,6 +46,11 @@
|
||||
</includes>
|
||||
<fileMode>0755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>hadoop-yarn/shellprofile.d</directory>
|
||||
<outputDirectory>libexec/shellprofile.d</outputDirectory>
|
||||
<fileMode>0755</fileMode>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>hadoop-yarn/bin</directory>
|
||||
<outputDirectory>sbin</outputDirectory>
|
||||
|
@ -22,6 +22,8 @@ Trunk (Unreleased)
|
||||
HADOOP-9044. add FindClass main class to provide classpath checking
|
||||
of installations (Steve Loughran via aw)
|
||||
|
||||
HADOOP-11485. Pluggable shell integration (aw)
|
||||
|
||||
IMPROVEMENTS
|
||||
|
||||
HADOOP-8017. Configure hadoop-main pom to get rid of M2E plugin execution
|
||||
|
@ -1,4 +1,4 @@
|
||||
#
|
||||
|
||||
#
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
@ -76,7 +76,7 @@ fi
|
||||
#
|
||||
|
||||
# Let's go! Base definitions so we can move forward
|
||||
hadoop_bootstrap_init
|
||||
hadoop_bootstrap
|
||||
|
||||
# let's find our conf.
|
||||
#
|
||||
@ -158,8 +158,12 @@ while [[ -z "${_hadoop_common_done}" ]]; do
|
||||
esac
|
||||
done
|
||||
|
||||
#
|
||||
# Setup the base-line environment
|
||||
#
|
||||
hadoop_find_confdir
|
||||
hadoop_exec_hadoopenv
|
||||
hadoop_import_shellprofiles
|
||||
hadoop_exec_userfuncs
|
||||
|
||||
#
|
||||
@ -183,22 +187,20 @@ if declare -F hadoop_subproject_init >/dev/null ; then
|
||||
hadoop_subproject_init
|
||||
fi
|
||||
|
||||
hadoop_shellprofiles_init
|
||||
|
||||
# get the native libs in there pretty quick
|
||||
hadoop_add_javalibpath "${HADOOP_PREFIX}/build/native"
|
||||
hadoop_add_javalibpath "${HADOOP_PREFIX}/${HADOOP_COMMON_LIB_NATIVE_DIR}"
|
||||
|
||||
hadoop_shellprofiles_nativelib
|
||||
|
||||
# get the basic java class path for these subprojects
|
||||
# in as quickly as possible since other stuff
|
||||
# will definitely depend upon it.
|
||||
#
|
||||
# at some point, this will get replaced with something pluggable
|
||||
# so that these functions can sit in their projects rather than
|
||||
# common
|
||||
#
|
||||
for i in common hdfs yarn mapred
|
||||
do
|
||||
hadoop_add_to_classpath_$i
|
||||
done
|
||||
|
||||
hadoop_add_common_to_classpath
|
||||
hadoop_shellprofiles_classpath
|
||||
|
||||
#
|
||||
# backwards compatibility. new stuff should
|
||||
|
@ -50,13 +50,13 @@ function hadoop_deprecate_envvar
|
||||
fi
|
||||
}
|
||||
|
||||
function hadoop_bootstrap_init
|
||||
function hadoop_bootstrap
|
||||
{
|
||||
# NOTE: This function is not user replaceable.
|
||||
|
||||
# the root of the Hadoop installation
|
||||
# See HADOOP-6255 for the expected directory structure layout
|
||||
|
||||
|
||||
# By now, HADOOP_LIBEXEC_DIR should have been defined upstream
|
||||
# We can piggyback off of that to figure out where the default
|
||||
# HADOOP_FREFIX should be. This allows us to run without
|
||||
@ -70,7 +70,7 @@ function hadoop_bootstrap_init
|
||||
HADOOP_DEFAULT_PREFIX=$(cd -P -- "${HADOOP_LIBEXEC_DIR}/.." >/dev/null && pwd -P)
|
||||
HADOOP_PREFIX=${HADOOP_PREFIX:-$HADOOP_DEFAULT_PREFIX}
|
||||
export HADOOP_PREFIX
|
||||
|
||||
|
||||
#
|
||||
# short-cuts. vendors may redefine these as well, preferably
|
||||
# in hadoop-layouts.sh
|
||||
@ -88,7 +88,7 @@ function hadoop_bootstrap_init
|
||||
TOOL_PATH=${TOOL_PATH:-${HADOOP_PREFIX}/share/hadoop/tools/lib/*}
|
||||
|
||||
export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
|
||||
|
||||
|
||||
# defaults
|
||||
export HADOOP_OPTS=${HADOOP_OPTS:-"-Djava.net.preferIPv4Stack=true"}
|
||||
hadoop_debug "Initial HADOOP_OPTS=${HADOOP_OPTS}"
|
||||
@ -146,7 +146,7 @@ function hadoop_exec_userfuncs
|
||||
|
||||
function hadoop_exec_hadooprc
|
||||
{
|
||||
# Read the user's settings. This provides for users to override
|
||||
# Read the user's settings. This provides for users to override
|
||||
# and/or append hadoop-env.sh. It is not meant as a complete system override.
|
||||
|
||||
if [[ -f "${HOME}/.hadooprc" ]]; then
|
||||
@ -155,6 +155,87 @@ function hadoop_exec_hadooprc
|
||||
fi
|
||||
}
|
||||
|
||||
function hadoop_import_shellprofiles
|
||||
{
|
||||
local i
|
||||
local files1
|
||||
local files2
|
||||
|
||||
if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
|
||||
files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*)
|
||||
else
|
||||
hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
|
||||
fi
|
||||
|
||||
if [[ -d "${HADOOP_CONF_DIR}/shellprofile.d" ]]; then
|
||||
files2=(${HADOOP_CONF_DIR}/shellprofile.d/*)
|
||||
fi
|
||||
|
||||
for i in "${files1[@]}" "${files2[@]}"
|
||||
do
|
||||
if [[ -n "${i}" ]]; then
|
||||
hadoop_debug "Profiles: importing ${i}"
|
||||
. "${i}"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
function hadoop_shellprofiles_init
|
||||
{
|
||||
local i
|
||||
|
||||
for i in ${HADOOP_SHELL_PROFILES}
|
||||
do
|
||||
if declare -F _${i}_hadoop_init >/dev/null ; then
|
||||
hadoop_debug "Profiles: ${i} init"
|
||||
# shellcheck disable=SC2086
|
||||
_${i}_hadoop_init
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
function hadoop_shellprofiles_classpath
|
||||
{
|
||||
local i
|
||||
|
||||
for i in ${HADOOP_SHELL_PROFILES}
|
||||
do
|
||||
if declare -F _${i}_hadoop_classpath >/dev/null ; then
|
||||
hadoop_debug "Profiles: ${i} classpath"
|
||||
# shellcheck disable=SC2086
|
||||
_${i}_hadoop_classpath
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
function hadoop_shellprofiles_nativelib
|
||||
{
|
||||
local i
|
||||
|
||||
for i in ${HADOOP_SHELL_PROFILES}
|
||||
do
|
||||
if declare -F _${i}_hadoop_nativelib >/dev/null ; then
|
||||
hadoop_debug "Profiles: ${i} nativelib"
|
||||
# shellcheck disable=SC2086
|
||||
_${i}_hadoop_nativelib
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
function hadoop_shellprofiles_finalize
|
||||
{
|
||||
local i
|
||||
|
||||
for i in ${HADOOP_SHELL_PROFILES}
|
||||
do
|
||||
if declare -F _${i}_hadoop_finalize >/dev/null ; then
|
||||
hadoop_debug "Profiles: ${i} finalize"
|
||||
# shellcheck disable=SC2086
|
||||
_${i}_hadoop_finalize
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
function hadoop_basic_init
|
||||
{
|
||||
# Some of these are also set in hadoop-env.sh.
|
||||
@ -163,37 +244,36 @@ function hadoop_basic_init
|
||||
#
|
||||
# but it is important to note that if you update these
|
||||
# you also need to update hadoop-env.sh as well!!!
|
||||
|
||||
# CLASSPATH initially contains $HADOOP_CONF_DIR
|
||||
CLASSPATH="${HADOOP_CONF_DIR}"
|
||||
hadoop_debug "Initial CLASSPATH=${HADOOP_CONF_DIR}"
|
||||
|
||||
|
||||
CLASSPATH=""
|
||||
hadoop_debug "Initialize CLASSPATH"
|
||||
|
||||
if [[ -z "${HADOOP_COMMON_HOME}" ]] &&
|
||||
[[ -d "${HADOOP_PREFIX}/${HADOOP_COMMON_DIR}" ]]; then
|
||||
export HADOOP_COMMON_HOME="${HADOOP_PREFIX}"
|
||||
fi
|
||||
|
||||
|
||||
# default policy file for service-level authorization
|
||||
HADOOP_POLICYFILE=${HADOOP_POLICYFILE:-"hadoop-policy.xml"}
|
||||
|
||||
|
||||
# define HADOOP_HDFS_HOME
|
||||
if [[ -z "${HADOOP_HDFS_HOME}" ]] &&
|
||||
[[ -d "${HADOOP_PREFIX}/${HDFS_DIR}" ]]; then
|
||||
export HADOOP_HDFS_HOME="${HADOOP_PREFIX}"
|
||||
fi
|
||||
|
||||
|
||||
# define HADOOP_YARN_HOME
|
||||
if [[ -z "${HADOOP_YARN_HOME}" ]] &&
|
||||
[[ -d "${HADOOP_PREFIX}/${YARN_DIR}" ]]; then
|
||||
export HADOOP_YARN_HOME="${HADOOP_PREFIX}"
|
||||
fi
|
||||
|
||||
|
||||
# define HADOOP_MAPRED_HOME
|
||||
if [[ -z "${HADOOP_MAPRED_HOME}" ]] &&
|
||||
[[ -d "${HADOOP_PREFIX}/${MAPRED_DIR}" ]]; then
|
||||
export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}"
|
||||
fi
|
||||
|
||||
|
||||
HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
|
||||
HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"}
|
||||
HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
|
||||
@ -240,7 +320,7 @@ function hadoop_rotate_log
|
||||
#
|
||||
local log=$1;
|
||||
local num=${2:-5};
|
||||
|
||||
|
||||
if [[ -f "${log}" ]]; then # rotate logs
|
||||
while [[ ${num} -gt 1 ]]; do
|
||||
#shellcheck disable=SC2086
|
||||
@ -260,7 +340,7 @@ function hadoop_actual_ssh
|
||||
# should get hostname followed by rest of command line
|
||||
local slave=$1
|
||||
shift
|
||||
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
ssh ${HADOOP_SSH_OPTS} ${slave} $"${@// /\\ }" 2>&1 | sed "s/^/$slave: /"
|
||||
}
|
||||
@ -269,7 +349,7 @@ function hadoop_connect_to_hosts
|
||||
{
|
||||
# shellcheck disable=SC2124
|
||||
local params="$@"
|
||||
|
||||
|
||||
#
|
||||
# ssh (or whatever) to a host
|
||||
#
|
||||
@ -278,13 +358,13 @@ function hadoop_connect_to_hosts
|
||||
hadoop_error "ERROR: Both HADOOP_SLAVES and HADOOP_SLAVE_NAME were defined. Aborting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
if [[ -n "${HADOOP_SLAVE_NAMES}" ]] ; then
|
||||
SLAVE_NAMES=${HADOOP_SLAVE_NAMES}
|
||||
else
|
||||
SLAVE_FILE=${HADOOP_SLAVES:-${HADOOP_CONF_DIR}/slaves}
|
||||
fi
|
||||
|
||||
|
||||
# if pdsh is available, let's use it. otherwise default
|
||||
# to a loop around ssh. (ugh)
|
||||
if [[ -e '/usr/bin/pdsh' ]]; then
|
||||
@ -304,7 +384,7 @@ function hadoop_connect_to_hosts
|
||||
if [[ -z "${SLAVE_NAMES}" ]]; then
|
||||
SLAVE_NAMES=$(sed 's/#.*$//;/^$/d' "${SLAVE_FILE}")
|
||||
fi
|
||||
|
||||
|
||||
# quoting here gets tricky. it's easier to push it into a function
|
||||
# so that we don't have to deal with it. However...
|
||||
# xargs can't use a function so instead we'll export it out
|
||||
@ -314,8 +394,8 @@ function hadoop_connect_to_hosts
|
||||
export HADOOP_SSH_OPTS
|
||||
|
||||
# xargs is used with option -I to replace the placeholder in arguments
|
||||
# list with each hostname read from stdin/pipe. But it consider one
|
||||
# line as one argument while reading from stdin/pipe. So place each
|
||||
# list with each hostname read from stdin/pipe. But it consider one
|
||||
# line as one argument while reading from stdin/pipe. So place each
|
||||
# hostname in different lines while passing via pipe.
|
||||
SLAVE_NAMES=$(echo "$SLAVE_NAMES" | tr ' ' '\n' )
|
||||
echo "${SLAVE_NAMES}" | \
|
||||
@ -360,6 +440,12 @@ function hadoop_add_param
|
||||
fi
|
||||
}
|
||||
|
||||
function hadoop_add_profile
|
||||
{
|
||||
# shellcheck disable=SC2086
|
||||
hadoop_add_param HADOOP_SHELL_PROFILES $1 $1
|
||||
}
|
||||
|
||||
function hadoop_add_classpath
|
||||
{
|
||||
# two params:
|
||||
@ -368,7 +454,7 @@ function hadoop_add_classpath
|
||||
# classpath this object should go. default is after
|
||||
# return 0 = success (added or duplicate)
|
||||
# return 1 = failure (doesn't exist, whatever)
|
||||
|
||||
|
||||
# However, with classpath (& JLP), we can do dedupe
|
||||
# along with some sanity checking (e.g., missing directories)
|
||||
# since we have a better idea of what is legal
|
||||
@ -381,7 +467,7 @@ function hadoop_add_classpath
|
||||
hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
# no wildcard in the middle, so check existence
|
||||
# (doesn't matter *what* it is)
|
||||
elif [[ ! $1 =~ ^.*\*.*$ ]] && [[ ! -e "$1" ]]; then
|
||||
@ -413,7 +499,7 @@ function hadoop_add_colonpath
|
||||
# classpath this object should go
|
||||
# return 0 = success
|
||||
# return 1 = failure (duplicate)
|
||||
|
||||
|
||||
# this is CLASSPATH, JLP, etc but with dedupe but no
|
||||
# other checking
|
||||
if [[ -d "${2}" ]] && [[ ":${!1}:" != *":$2:"* ]]; then
|
||||
@ -446,101 +532,30 @@ function hadoop_add_ldlibpath
|
||||
{
|
||||
# specialized function for a common use case
|
||||
hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
|
||||
|
||||
|
||||
# note that we export this
|
||||
export LD_LIBRARY_PATH
|
||||
}
|
||||
|
||||
function hadoop_add_to_classpath_common
|
||||
function hadoop_add_common_to_classpath
|
||||
{
|
||||
|
||||
#
|
||||
# get all of the common jars+config in the path
|
||||
#
|
||||
|
||||
|
||||
# developers
|
||||
if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
|
||||
hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
|
||||
fi
|
||||
|
||||
|
||||
if [[ -d "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}/webapps" ]]; then
|
||||
hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"
|
||||
fi
|
||||
|
||||
|
||||
hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
|
||||
hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
|
||||
}
|
||||
|
||||
function hadoop_add_to_classpath_hdfs
|
||||
{
|
||||
#
|
||||
# get all of the hdfs jars+config in the path
|
||||
#
|
||||
# developers
|
||||
if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
|
||||
hadoop_add_classpath "${HADOOP_HDFS_HOME}/hadoop-hdfs/target/classes"
|
||||
fi
|
||||
|
||||
# put hdfs in classpath if present
|
||||
if [[ -d "${HADOOP_HDFS_HOME}/${HDFS_DIR}/webapps" ]]; then
|
||||
hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_DIR}"
|
||||
fi
|
||||
|
||||
hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR}"'/*'
|
||||
hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_DIR}"'/*'
|
||||
}
|
||||
|
||||
function hadoop_add_to_classpath_yarn
|
||||
{
|
||||
local i
|
||||
#
|
||||
# get all of the yarn jars+config in the path
|
||||
#
|
||||
# developers
|
||||
if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
|
||||
for i in yarn-api yarn-common yarn-mapreduce yarn-master-worker \
|
||||
yarn-server/yarn-server-nodemanager \
|
||||
yarn-server/yarn-server-common \
|
||||
yarn-server/yarn-server-resourcemanager; do
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/$i/target/classes"
|
||||
done
|
||||
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/build/test/classes"
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/build/tools"
|
||||
fi
|
||||
|
||||
if [[ -d "${HADOOP_YARN_HOME}/${YARN_DIR}/webapps" ]]; then
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_DIR}"
|
||||
fi
|
||||
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}"'/*'
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_DIR}"'/*'
|
||||
}
|
||||
|
||||
function hadoop_add_to_classpath_mapred
|
||||
{
|
||||
#
|
||||
# get all of the mapreduce jars+config in the path
|
||||
#
|
||||
# developers
|
||||
if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-shuffle/target/classes"
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-common/target/classes"
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-hs/target/classes"
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-hs-plugins/target/classes"
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-app/target/classes"
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-jobclient/target/classes"
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-core/target/classes"
|
||||
fi
|
||||
|
||||
if [[ -d "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}/webapps" ]]; then
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}"
|
||||
fi
|
||||
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_LIB_JARS_DIR}"'/*'
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}"'/*'
|
||||
}
|
||||
|
||||
function hadoop_add_to_classpath_userpath
|
||||
{
|
||||
# Add the user-specified HADOOP_CLASSPATH to the
|
||||
@ -563,7 +578,7 @@ function hadoop_add_to_classpath_userpath
|
||||
let c+=1
|
||||
done
|
||||
let j=c-1
|
||||
|
||||
|
||||
if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
|
||||
if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
|
||||
for ((i=j; i>=0; i--)); do
|
||||
@ -602,8 +617,8 @@ function hadoop_os_tricks
|
||||
# NOTE! HADOOP_ALLOW_IPV6 is a developer hook. We leave it
|
||||
# undocumented in hadoop-env.sh because we don't want users to
|
||||
# shoot themselves in the foot while devs make IPv6 work.
|
||||
if [[ -n "${bindv6only}" ]] &&
|
||||
[[ "${bindv6only}" -eq "1" ]] &&
|
||||
if [[ -n "${bindv6only}" ]] &&
|
||||
[[ "${bindv6only}" -eq "1" ]] &&
|
||||
[[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
|
||||
hadoop_error "ERROR: \"net.ipv6.bindv6only\" is set to 1 "
|
||||
hadoop_error "ERROR: Hadoop networking could be broken. Aborting."
|
||||
@ -630,14 +645,14 @@ function hadoop_java_setup
|
||||
hadoop_error "ERROR: JAVA_HOME is not set and could not be found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
if [[ ! -d "${JAVA_HOME}" ]]; then
|
||||
hadoop_error "ERROR: JAVA_HOME ${JAVA_HOME} does not exist."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
JAVA="${JAVA_HOME}/bin/java"
|
||||
|
||||
|
||||
if [[ ! -x "$JAVA" ]]; then
|
||||
hadoop_error "ERROR: $JAVA is not executable."
|
||||
exit 1
|
||||
@ -716,7 +731,7 @@ function hadoop_finalize_hadoop_opts
|
||||
function hadoop_finalize_classpath
|
||||
{
|
||||
hadoop_add_classpath "${HADOOP_CONF_DIR}" before
|
||||
|
||||
|
||||
# user classpath gets added at the last minute. this allows
|
||||
# override of CONF dirs and more
|
||||
hadoop_add_to_classpath_userpath
|
||||
@ -744,8 +759,9 @@ function hadoop_finalize_catalina_opts
|
||||
|
||||
function hadoop_finalize
|
||||
{
|
||||
# user classpath gets added at the last minute. this allows
|
||||
# override of CONF dirs and more
|
||||
|
||||
hadoop_shellprofiles_finalize
|
||||
|
||||
hadoop_finalize_classpath
|
||||
hadoop_finalize_libpaths
|
||||
hadoop_finalize_hadoop_heap
|
||||
@ -782,7 +798,7 @@ function hadoop_verify_secure_prereq
|
||||
# if you are on an OS like Illumos that has functional roles
|
||||
# and you are using pfexec, you'll probably want to change
|
||||
# this.
|
||||
|
||||
|
||||
# ${EUID} comes from the shell itself!
|
||||
if [[ "${EUID}" -ne 0 ]] && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
|
||||
hadoop_error "ERROR: You must be a privileged user in order to run a secure service."
|
||||
@ -795,7 +811,7 @@ function hadoop_verify_secure_prereq
|
||||
function hadoop_setup_secure_service
|
||||
{
|
||||
# need a more complicated setup? replace me!
|
||||
|
||||
|
||||
HADOOP_PID_DIR=${HADOOP_SECURE_PID_DIR}
|
||||
HADOOP_LOG_DIR=${HADOOP_SECURE_LOG_DIR}
|
||||
}
|
||||
@ -844,7 +860,7 @@ function hadoop_verify_logdir
|
||||
rm "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
function hadoop_status_daemon()
|
||||
function hadoop_status_daemon()
|
||||
{
|
||||
#
|
||||
# LSB 4.1.0 compatible status command (1)
|
||||
@ -861,12 +877,12 @@ function hadoop_status_daemon()
|
||||
# them the benefit of a doubt and returning 1 even if
|
||||
# our pid is not in in /var/run .
|
||||
#
|
||||
|
||||
|
||||
local pidfile=$1
|
||||
shift
|
||||
|
||||
|
||||
local pid
|
||||
|
||||
|
||||
if [[ -f "${pidfile}" ]]; then
|
||||
pid=$(cat "${pidfile}")
|
||||
if ps -p "${pid}" > /dev/null 2>&1; then
|
||||
@ -885,7 +901,7 @@ function hadoop_java_exec
|
||||
local command=$1
|
||||
local class=$2
|
||||
shift 2
|
||||
|
||||
|
||||
hadoop_debug "Final CLASSPATH: ${CLASSPATH}"
|
||||
hadoop_debug "Final HADOOP_OPTS: ${HADOOP_OPTS}"
|
||||
|
||||
@ -932,14 +948,14 @@ function hadoop_start_daemon_wrapper
|
||||
local counter
|
||||
|
||||
hadoop_rotate_log "${outfile}"
|
||||
|
||||
|
||||
hadoop_start_daemon "${daemonname}" \
|
||||
"$class" \
|
||||
"${pidfile}" \
|
||||
"$@" >> "${outfile}" 2>&1 < /dev/null &
|
||||
|
||||
# we need to avoid a race condition here
|
||||
# so let's wait for the fork to finish
|
||||
# so let's wait for the fork to finish
|
||||
# before overriding with the daemonized pid
|
||||
(( counter=0 ))
|
||||
while [[ ! -f ${pidfile} && ${counter} -le 5 ]]; do
|
||||
@ -953,23 +969,23 @@ function hadoop_start_daemon_wrapper
|
||||
if [[ $? -gt 0 ]]; then
|
||||
hadoop_error "ERROR: Cannot write ${daemonname} pid ${pidfile}."
|
||||
fi
|
||||
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
|
||||
if [[ $? -gt 0 ]]; then
|
||||
hadoop_error "ERROR: Cannot set priority of ${daemonname} process $!"
|
||||
fi
|
||||
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
disown %+ >/dev/null 2>&1
|
||||
if [[ $? -gt 0 ]]; then
|
||||
hadoop_error "ERROR: Cannot disconnect ${daemonname} process $!"
|
||||
fi
|
||||
sleep 1
|
||||
|
||||
|
||||
# capture the ulimit output
|
||||
ulimit -a >> "${outfile}" 2>&1
|
||||
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
if ! ps -p $! >/dev/null 2>&1; then
|
||||
return 1
|
||||
@ -983,22 +999,22 @@ function hadoop_start_secure_daemon
|
||||
#
|
||||
local daemonname=$1
|
||||
local class=$2
|
||||
|
||||
|
||||
# pid file to create for our deamon
|
||||
local daemonpidfile=$3
|
||||
|
||||
|
||||
# where to send stdout. jsvc has bad habits so this *may* be &1
|
||||
# which means you send it to stdout!
|
||||
local daemonoutfile=$4
|
||||
|
||||
|
||||
# where to send stderr. same thing, except &2 = stderr
|
||||
local daemonerrfile=$5
|
||||
local privpidfile=$6
|
||||
shift 6
|
||||
|
||||
|
||||
hadoop_rotate_log "${daemonoutfile}"
|
||||
hadoop_rotate_log "${daemonerrfile}"
|
||||
|
||||
|
||||
jsvc="${JSVC_HOME}/jsvc"
|
||||
if [[ ! -f "${jsvc}" ]]; then
|
||||
hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
|
||||
@ -1007,7 +1023,7 @@ function hadoop_start_secure_daemon
|
||||
hadoop_error "and set JSVC_HOME to the directory containing the jsvc binary."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# note that shellcheck will throw a
|
||||
# bogus for-our-use-case 2086 here.
|
||||
# it doesn't properly support multi-line situations
|
||||
@ -1020,7 +1036,7 @@ function hadoop_start_secure_daemon
|
||||
if [[ $? -gt 0 ]]; then
|
||||
hadoop_error "ERROR: Cannot write ${daemonname} pid ${privpidfile}."
|
||||
fi
|
||||
|
||||
|
||||
exec "${jsvc}" \
|
||||
"-Dproc_${daemonname}" \
|
||||
-outfile "${daemonoutfile}" \
|
||||
@ -1039,29 +1055,29 @@ function hadoop_start_secure_daemon_wrapper
|
||||
# of the dirty work to launch a daemon in the background!
|
||||
local daemonname=$1
|
||||
local class=$2
|
||||
|
||||
|
||||
# same rules as hadoop_start_secure_daemon except we
|
||||
# have some additional parameters
|
||||
|
||||
|
||||
local daemonpidfile=$3
|
||||
|
||||
|
||||
local daemonoutfile=$4
|
||||
|
||||
|
||||
# the pid file of the subprocess that spawned our
|
||||
# secure launcher
|
||||
local jsvcpidfile=$5
|
||||
|
||||
|
||||
# the output of the subprocess that spawned our secure
|
||||
# launcher
|
||||
local jsvcoutfile=$6
|
||||
|
||||
|
||||
local daemonerrfile=$7
|
||||
shift 7
|
||||
|
||||
local counter
|
||||
|
||||
|
||||
hadoop_rotate_log "${jsvcoutfile}"
|
||||
|
||||
|
||||
hadoop_start_secure_daemon \
|
||||
"${daemonname}" \
|
||||
"${class}" \
|
||||
@ -1071,7 +1087,7 @@ function hadoop_start_secure_daemon_wrapper
|
||||
"${jsvcpidfile}" "$@" >> "${jsvcoutfile}" 2>&1 < /dev/null &
|
||||
|
||||
# we need to avoid a race condition here
|
||||
# so let's wait for the fork to finish
|
||||
# so let's wait for the fork to finish
|
||||
# before overriding with the daemonized pid
|
||||
(( counter=0 ))
|
||||
while [[ ! -f ${daemonpidfile} && ${counter} -le 5 ]]; do
|
||||
@ -1085,7 +1101,7 @@ function hadoop_start_secure_daemon_wrapper
|
||||
if [[ $? -gt 0 ]]; then
|
||||
hadoop_error "ERROR: Cannot write ${daemonname} pid ${daemonpidfile}."
|
||||
fi
|
||||
|
||||
|
||||
sleep 1
|
||||
#shellcheck disable=SC2086
|
||||
renice "${HADOOP_NICENESS}" $! >/dev/null 2>&1
|
||||
@ -1118,12 +1134,12 @@ function hadoop_stop_daemon
|
||||
local cmd=$1
|
||||
local pidfile=$2
|
||||
shift 2
|
||||
|
||||
|
||||
local pid
|
||||
|
||||
|
||||
if [[ -f "${pidfile}" ]]; then
|
||||
pid=$(cat "$pidfile")
|
||||
|
||||
|
||||
kill "${pid}" >/dev/null 2>&1
|
||||
sleep "${HADOOP_STOP_TIMEOUT}"
|
||||
if kill -0 "${pid}" > /dev/null 2>&1; then
|
||||
@ -1145,7 +1161,7 @@ function hadoop_stop_secure_daemon
|
||||
local privpidfile=$3
|
||||
shift 3
|
||||
local ret
|
||||
|
||||
|
||||
hadoop_stop_daemon "${command}" "${daemonpidfile}"
|
||||
ret=$?
|
||||
rm -f "${daemonpidfile}" "${privpidfile}" 2>/dev/null
|
||||
@ -1160,18 +1176,18 @@ function hadoop_daemon_handler
|
||||
local daemon_pidfile=$4
|
||||
local daemon_outfile=$5
|
||||
shift 5
|
||||
|
||||
|
||||
case ${daemonmode} in
|
||||
status)
|
||||
hadoop_status_daemon "${daemon_pidfile}"
|
||||
exit $?
|
||||
;;
|
||||
|
||||
|
||||
stop)
|
||||
hadoop_stop_daemon "${daemonname}" "${daemon_pidfile}"
|
||||
exit $?
|
||||
;;
|
||||
|
||||
|
||||
##COMPAT -- older hadoops would also start daemons by default
|
||||
start|default)
|
||||
hadoop_verify_piddir
|
||||
@ -1207,19 +1223,19 @@ function hadoop_secure_daemon_handler
|
||||
local priv_outfile=$7
|
||||
local priv_errfile=$8
|
||||
shift 8
|
||||
|
||||
|
||||
case ${daemonmode} in
|
||||
status)
|
||||
hadoop_status_daemon "${daemon_pidfile}"
|
||||
exit $?
|
||||
;;
|
||||
|
||||
|
||||
stop)
|
||||
hadoop_stop_secure_daemon "${daemonname}" \
|
||||
"${daemon_pidfile}" "${priv_pidfile}"
|
||||
exit $?
|
||||
;;
|
||||
|
||||
|
||||
##COMPAT -- older hadoops would also start daemons by default
|
||||
start|default)
|
||||
hadoop_verify_piddir
|
||||
@ -1232,7 +1248,7 @@ function hadoop_secure_daemon_handler
|
||||
# stale pid file, so just remove it and continue on
|
||||
rm -f "${daemon_pidfile}" >/dev/null 2>&1
|
||||
fi
|
||||
|
||||
|
||||
##COMPAT - differenticate between --daemon start and nothing
|
||||
# "nothing" shouldn't detach
|
||||
if [[ "${daemonmode}" = "default" ]]; then
|
||||
|
@ -0,0 +1,106 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
#
|
||||
# This is an example shell profile. It does not do anything other than
|
||||
# show an example of what the general structure and API of the pluggable
|
||||
# shell profile code looks like.
|
||||
#
|
||||
#
|
||||
|
||||
#
|
||||
# First, register the profile:
|
||||
#
|
||||
# hadoop_add_profile example
|
||||
#
|
||||
#
|
||||
# This profile name determines what the name of the functions will
|
||||
# be. The general pattern is _(profilename)_hadoop_(capability). There
|
||||
# are currently four capabilities:
|
||||
# * init
|
||||
# * classpath
|
||||
# * nativelib
|
||||
# * finalize
|
||||
#
|
||||
# None of these functions are required. Examples of all four follow...
|
||||
|
||||
#
|
||||
# The _hadoop_init function is called near the very beginning of the
|
||||
# execution cycle. System and site-level shell env vars have been set,
|
||||
# command line processing finished, etc. Note that the user's .hadooprc
|
||||
# has not yet been processed. This is to allow them to override anything
|
||||
# that may be set here or potentially a dependency!
|
||||
#
|
||||
# function _example_hadoop_init
|
||||
# {
|
||||
# # This example expects a home. So set a default if not set.
|
||||
# EXAMPLE_HOME="${EXAMPLE_HOME:-/usr/example}"
|
||||
# }
|
||||
#
|
||||
|
||||
#
|
||||
# The _hadoop_classpath function is called when the shell code is
|
||||
# establishing the classpath. This function should use the
|
||||
# shell hadoop_add_classpath function rather than directly
|
||||
# manipulating the CLASSPATH variable. This ensures that the
|
||||
# CLASSPATH does not have duplicates and provides basic
|
||||
# sanity checks
|
||||
#
|
||||
# function _example_hadoop_classpath
|
||||
# {
|
||||
# # jars that should be near the front
|
||||
# hadoop_add_classpath "${EXAMPLE_HOME}/share/pre-jars/*" before
|
||||
#
|
||||
# # jars that should be near the back
|
||||
# hadoop_add_classpath "${EXAMPLE_HOME}/share/post-jars/*" after
|
||||
# }
|
||||
|
||||
#
|
||||
# The _hadoop_nativelib function is called when the shell code is
|
||||
# buliding the locations for linkable shared libraries. Depending
|
||||
# upon needs, there are shell function calls that are useful
|
||||
# to use here:
|
||||
#
|
||||
# hadoop_add_javalibpath will push the path onto the command line
|
||||
# and into the java.library.path system property. In the majority
|
||||
# of cases, this should be sufficient, especially if the shared
|
||||
# library has been linked correctly with $ORIGIN.
|
||||
#
|
||||
# hadoop_add_ldlibpath will push the path into the LD_LIBRARY_PATH
|
||||
# env var. This should be unnecessary for most code.
|
||||
#
|
||||
# function _example_hadoop_nativelib
|
||||
# {
|
||||
# # our library is standalone, so just need the basic path
|
||||
# # added. Using after so we are later in the link list
|
||||
# hadoop_add_javalibpath "${EXAMPLE_HOME}/lib" after
|
||||
# }
|
||||
|
||||
#
|
||||
# The _hadoop_finalize function is called to finish up whatever
|
||||
# extra work needs to be done prior to exec'ing java or some other
|
||||
# binary. This is where command line properties should get added
|
||||
# and any last minute work. This is called prior to Hadoop common
|
||||
# which means that one can override any parameters that Hadoop
|
||||
# would normally put here... so be careful!
|
||||
#
|
||||
# Useful functions here include hadoop_add_param and for
|
||||
# Windows compabitility, hadoop_translate_cygwin_path.
|
||||
#
|
||||
# function _example_hadoop_finalize
|
||||
# {
|
||||
# # we need a property for our feature
|
||||
# hadoop_add_param HADOOP_OPTS Dexample.feature "-Dexample.feature=awesome"
|
||||
# }
|
36
hadoop-hdfs-project/hadoop-hdfs/src/main/shellprofile.d/hdfs
Normal file
36
hadoop-hdfs-project/hadoop-hdfs/src/main/shellprofile.d/hdfs
Normal file
@ -0,0 +1,36 @@
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
hadoop_add_profile hdfs
|
||||
|
||||
function _hdfs_hadoop_classpath
|
||||
{
|
||||
#
|
||||
# get all of the hdfs jars+config in the path
|
||||
#
|
||||
# developers
|
||||
if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
|
||||
hadoop_add_classpath "${HADOOP_HDFS_HOME}/hadoop-hdfs/target/classes"
|
||||
fi
|
||||
|
||||
# put hdfs in classpath if present
|
||||
if [[ -d "${HADOOP_HDFS_HOME}/${HDFS_DIR}/webapps" ]]; then
|
||||
hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_DIR}"
|
||||
fi
|
||||
|
||||
hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR}"'/*'
|
||||
hadoop_add_classpath "${HADOOP_HDFS_HOME}/${HDFS_DIR}"'/*'
|
||||
}
|
41
hadoop-mapreduce-project/shellprofile.d/mapreduce
Normal file
41
hadoop-mapreduce-project/shellprofile.d/mapreduce
Normal file
@ -0,0 +1,41 @@
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
hadoop_add_profile mapred
|
||||
|
||||
function _mapred_hadoop_classpath
|
||||
{
|
||||
#
|
||||
# get all of the mapreduce jars+config in the path
|
||||
#
|
||||
# developers
|
||||
if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-shuffle/target/classes"
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-common/target/classes"
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-hs/target/classes"
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-hs-plugins/target/classes"
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-app/target/classes"
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-jobclient/target/classes"
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-core/target/classes"
|
||||
fi
|
||||
|
||||
if [[ -d "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}/webapps" ]]; then
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}"
|
||||
fi
|
||||
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_LIB_JARS_DIR}"'/*'
|
||||
hadoop_add_classpath "${HADOOP_MAPRED_HOME}/${MAPRED_DIR}"'/*'
|
||||
}
|
@ -195,17 +195,6 @@ if [[ "${HADOOP_DAEMON_MODE}" != "default" ]]; then
|
||||
HADOOP_LOGFILE="hadoop-${HADOOP_IDENT_STRING}-${COMMAND}-${HOSTNAME}.log"
|
||||
fi
|
||||
|
||||
# Add YARN custom options to comamnd line in case someone actaully
|
||||
# used these.
|
||||
YARN_LOG_DIR=$HADOOP_LOG_DIR
|
||||
hadoop_translate_cygwin_path YARN_LOG_DIR
|
||||
hadoop_add_param HADOOP_OPTS yarn.log.dir "-Dyarn.log.dir=${YARN_LOG_DIR}"
|
||||
hadoop_add_param HADOOP_OPTS yarn.log.file "-Dyarn.log.file=${HADOOP_LOGFILE}"
|
||||
YARN_HOME_DIR=$HADOOP_YARN_HOME
|
||||
hadoop_translate_cygwin_path YARN_HOME_DIR
|
||||
hadoop_add_param HADOOP_OPTS yarn.home.dir "-Dyarn.home.dir=${YARN_HOME_DIR}"
|
||||
hadoop_add_param HADOOP_OPTS yarn.root.logger "-Dyarn.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
|
||||
|
||||
hadoop_finalize
|
||||
|
||||
if [[ -n "${supportdaemonization}" ]]; then
|
||||
|
62
hadoop-yarn-project/hadoop-yarn/shellprofile.d/yarn
Normal file
62
hadoop-yarn-project/hadoop-yarn/shellprofile.d/yarn
Normal file
@ -0,0 +1,62 @@
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
hadoop_add_profile yarn
|
||||
|
||||
function _yarn_hadoop_classpath
|
||||
{
|
||||
local i
|
||||
#
|
||||
# get all of the yarn jars+config in the path
|
||||
#
|
||||
# developers
|
||||
if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
|
||||
for i in yarn-api yarn-common yarn-mapreduce yarn-master-worker \
|
||||
yarn-server/yarn-server-nodemanager \
|
||||
yarn-server/yarn-server-common \
|
||||
yarn-server/yarn-server-resourcemanager; do
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/$i/target/classes"
|
||||
done
|
||||
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/build/test/classes"
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/build/tools"
|
||||
fi
|
||||
|
||||
if [[ -d "${HADOOP_YARN_HOME}/${YARN_DIR}/webapps" ]]; then
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_DIR}"
|
||||
fi
|
||||
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}"'/*'
|
||||
hadoop_add_classpath "${HADOOP_YARN_HOME}/${YARN_DIR}"'/*'
|
||||
}
|
||||
|
||||
function _yarn_hadoop_finalize
|
||||
{
|
||||
# Add YARN custom options to comamnd line in case someone actaully
|
||||
# used these.
|
||||
#
|
||||
# Note that we are replacing ' ' with '\ ' so that when we exec
|
||||
# stuff it works
|
||||
#
|
||||
local yld=$HADOOP_LOG_DIR
|
||||
hadoop_translate_cygwin_path yld
|
||||
hadoop_add_param HADOOP_OPTS yarn.log.dir "-Dyarn.log.dir=${yld}"
|
||||
hadoop_add_param HADOOP_OPTS yarn.log.file "-Dyarn.log.file=${HADOOP_LOGFILE}"
|
||||
local yhd=$HADOOP_YARN_HOME
|
||||
hadoop_translate_cygwin_path yhd
|
||||
hadoop_add_param HADOOP_OPTS yarn.home.dir "-Dyarn.home.dir=${yhd}"
|
||||
hadoop_add_param HADOOP_OPTS yarn.root.logger "-Dyarn.root.logger=${YARN_ROOT_LOGGER:-INFO,console}"
|
||||
}
|
Loading…
Reference in New Issue
Block a user