HADOOP-11861. test-patch.sh rewrite addendum patch. Contributed by Allen Wittenauer.

This commit is contained in:
cnauroth 2015-04-22 22:23:31 -07:00
parent a100be685c
commit 18eb5e7934
3 changed files with 165 additions and 76 deletions

View File

@ -18,7 +18,7 @@ add_plugin shellcheck
SHELLCHECK_TIMER=0 SHELLCHECK_TIMER=0
SHELLCHECK=${SHELLCHECK:-$(which shellcheck)} SHELLCHECK=${SHELLCHECK:-$(which shellcheck 2>/dev/null)}
SHELLCHECK_SPECIFICFILES="" SHELLCHECK_SPECIFICFILES=""
@ -64,8 +64,9 @@ function shellcheck_preapply
big_console_header "shellcheck plugin: prepatch" big_console_header "shellcheck plugin: prepatch"
if [[ -z "${SHELLCHECK}" ]]; then if [[ ! -x "${SHELLCHECK}" ]]; then
hadoop_error "shellcheck is not available." hadoop_error "shellcheck is not available."
return 0
fi fi
start_clock start_clock
@ -97,8 +98,10 @@ function shellcheck_postapply
big_console_header "shellcheck plugin: postpatch" big_console_header "shellcheck plugin: postpatch"
if [[ -z "${SHELLCHECK}" ]]; then if [[ ! -x "${SHELLCHECK}" ]]; then
hadoop_error "shellcheck is not available." hadoop_error "shellcheck is not available."
add_jira_table 0 shellcheck "Shellcheck was not available."
return 0
fi fi
start_clock start_clock
@ -126,7 +129,8 @@ function shellcheck_postapply
# shellcheck disable=SC2016 # shellcheck disable=SC2016
diffPostpatch=$(wc -l "${PATCH_DIR}/diffpatchshellcheck.txt" | ${AWK} '{print $1}') diffPostpatch=$(wc -l "${PATCH_DIR}/diffpatchshellcheck.txt" | ${AWK} '{print $1}')
if [[ ${diffPostpatch} -gt 0 ]] ; then if [[ ${diffPostpatch} -gt 0
&& ${numPostpatch} -gt ${numPrepatch} ]] ; then
add_jira_table -1 shellcheck "The applied patch generated "\ add_jira_table -1 shellcheck "The applied patch generated "\
"${diffPostpatch} new shellcheck (v${SHELLCHECK_VERSION}) issues (total was ${numPrepatch}, now ${numPostpatch})." "${diffPostpatch} new shellcheck (v${SHELLCHECK_VERSION}) issues (total was ${numPrepatch}, now ${numPostpatch})."
add_jira_footer shellcheck "@@BASE@@/diffpatchshellcheck.txt" add_jira_footer shellcheck "@@BASE@@/diffpatchshellcheck.txt"

View File

@ -52,6 +52,7 @@ function setup_defaults
ISSUE="" ISSUE=""
ISSUE_RE='^(HADOOP|YARN|MAPREDUCE|HDFS)-[0-9]+$' ISSUE_RE='^(HADOOP|YARN|MAPREDUCE|HDFS)-[0-9]+$'
TIMER=$(date +"%s") TIMER=$(date +"%s")
PATCHURL=""
OSTYPE=$(uname -s) OSTYPE=$(uname -s)
@ -389,6 +390,69 @@ function find_java_home
return 0 return 0
} }
## @description Write the contents of a file to jenkins
## @params filename
## @stability stable
## @audience public
## @returns ${JIRACLI} exit code
function write_to_jira
{
local -r commentfile=${1}
shift
local retval
if [[ ${OFFLINE} == false
&& ${JENKINS} == true ]]; then
export USER=hudson
# shellcheck disable=SC2086
${JIRACLI} --comment "$(cat ${commentfile})" \
-s https://issues.apache.org/jira \
-a addcomment -u hadoopqa \
-p "${JIRA_PASSWD}" \
--issue "${ISSUE}"
retval=$?
${JIRACLI} -s https://issues.apache.org/jira \
-a logout -u hadoopqa \
-p "${JIRA_PASSWD}"
fi
return ${retval}
}
## @description Verify that the patch directory is still in working order
## @description since bad actors on some systems wipe it out. If not,
## @description recreate it and then exit
## @audience private
## @stability evolving
## @replaceable yes
## @returns may exit on failure
function verify_patchdir_still_exists
{
local -r commentfile=/tmp/testpatch.$$.${RANDOM}
local extra=""
if [[ ! -d ${PATCH_DIR} ]]; then
rm "${commentfile}" 2>/dev/null
echo "(!) The patch artifact directory on has been removed! " > "${commentfile}"
echo "This is a fatal error for test-patch.sh. Aborting. " >> "${commentfile}"
echo
cat ${commentfile}
echo
if [[ ${JENKINS} == true ]]; then
if [[ -n ${NODE_NAME} ]]; then
extra=" (node ${NODE_NAME})"
fi
echo "Jenkins${extra} information at ${BUILD_URL} may provide some hints. " >> "${commentfile}"
write_to_jira ${commentfile}
fi
rm "${commentfile}"
cleanup_and_exit ${RESULT}
fi
}
## @description Print the command to be executing to the screen. Then ## @description Print the command to be executing to the screen. Then
## @description run the command, sending stdout and stderr to the given filename ## @description run the command, sending stdout and stderr to the given filename
## @description This will also ensure that any directories in ${BASEDIR} have ## @description This will also ensure that any directories in ${BASEDIR} have
@ -404,6 +468,9 @@ function echo_and_redirect
{ {
logfile=$1 logfile=$1
shift shift
verify_patchdir_still_exists
find "${BASEDIR}" -type d -exec chmod +x {} \; find "${BASEDIR}" -type d -exec chmod +x {} \;
echo "${*} > ${logfile} 2>&1" echo "${*} > ${logfile} 2>&1"
"${@}" > "${logfile}" 2>&1 "${@}" > "${logfile}" 2>&1
@ -426,6 +493,7 @@ function hadoop_usage
echo echo
echo "Options:" echo "Options:"
echo "--basedir=<dir> The directory to apply the patch to (default current directory)" echo "--basedir=<dir> The directory to apply the patch to (default current directory)"
echo "--branch=<dir> Forcibly set the branch"
echo "--build-native=<bool> If true, then build native components (default 'true')" echo "--build-native=<bool> If true, then build native components (default 'true')"
echo "--debug If set, then output some extra stuff to stderr" echo "--debug If set, then output some extra stuff to stderr"
echo "--dirty-workspace Allow the local git workspace to have uncommitted changes" echo "--dirty-workspace Allow the local git workspace to have uncommitted changes"
@ -475,6 +543,9 @@ function parse_args
--basedir=*) --basedir=*)
BASEDIR=${i#*=} BASEDIR=${i#*=}
;; ;;
--branch=*)
PATCH_BRANCH=${i#*=}
;;
--build-native=*) --build-native=*)
BUILD_NATIVE=${i#*=} BUILD_NATIVE=${i#*=}
;; ;;
@ -722,6 +793,16 @@ function git_checkout
cleanup_and_exit 1 cleanup_and_exit 1
fi fi
# we need to explicitly fetch in case the
# git ref hasn't been brought in tree yet
if [[ ${OFFLINE} == false ]]; then
${GIT} pull --rebase
if [[ $? != 0 ]]; then
hadoop_error "ERROR: git pull is failing"
cleanup_and_exit 1
fi
fi
else else
cd "${BASEDIR}" cd "${BASEDIR}"
if [[ ! -d .git ]]; then if [[ ! -d .git ]]; then
@ -888,10 +969,13 @@ function determine_branch
allbranches=$(${GIT} branch -r | tr -d ' ' | ${SED} -e s,origin/,,g) allbranches=$(${GIT} branch -r | tr -d ' ' | ${SED} -e s,origin/,,g)
for j in "${PATCHURL}" "${PATCH_OR_ISSUE}"; do
hadoop_debug "Determine branch: starting with ${j}"
# shellcheck disable=SC2016 # shellcheck disable=SC2016
patchnamechunk=$(echo "${PATCH_OR_ISSUE}" | ${AWK} -F/ '{print $NF}') patchnamechunk=$(echo "${j}" | ${AWK} -F/ '{print $NF}')
# ISSUE.branch.##.patch # ISSUE.branch.##.patch
hadoop_debug "Determine branch: ISSUE.branch.##.patch"
PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f2 -d. ) PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f2 -d. )
verify_valid_branch "${allbranches}" "${PATCH_BRANCH}" verify_valid_branch "${allbranches}" "${PATCH_BRANCH}"
if [[ $? == 0 ]]; then if [[ $? == 0 ]]; then
@ -899,6 +983,7 @@ function determine_branch
fi fi
# ISSUE-branch-##.patch # ISSUE-branch-##.patch
hadoop_debug "Determine branch: ISSUE-branch-##.patch"
PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f3- -d- | cut -f1,2 -d-) PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f3- -d- | cut -f1,2 -d-)
verify_valid_branch "${allbranches}" "${PATCH_BRANCH}" verify_valid_branch "${allbranches}" "${PATCH_BRANCH}"
if [[ $? == 0 ]]; then if [[ $? == 0 ]]; then
@ -906,6 +991,7 @@ function determine_branch
fi fi
# ISSUE-##.patch.branch # ISSUE-##.patch.branch
hadoop_debug "Determine branch: ISSUE-##.patch.branch"
# shellcheck disable=SC2016 # shellcheck disable=SC2016
PATCH_BRANCH=$(echo "${patchnamechunk}" | ${AWK} -F. '{print $NF}') PATCH_BRANCH=$(echo "${patchnamechunk}" | ${AWK} -F. '{print $NF}')
verify_valid_branch "${allbranches}" "${PATCH_BRANCH}" verify_valid_branch "${allbranches}" "${PATCH_BRANCH}"
@ -914,12 +1000,14 @@ function determine_branch
fi fi
# ISSUE-branch.##.patch # ISSUE-branch.##.patch
hadoop_debug "Determine branch: ISSUE-branch.##.patch"
# shellcheck disable=SC2016 # shellcheck disable=SC2016
PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f3- -d- | ${AWK} -F. '{print $(NF-2)}' 2>/dev/null) PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f3- -d- | ${AWK} -F. '{print $(NF-2)}' 2>/dev/null)
verify_valid_branch "${allbranches}" "${PATCH_BRANCH}" verify_valid_branch "${allbranches}" "${PATCH_BRANCH}"
if [[ $? == 0 ]]; then if [[ $? == 0 ]]; then
return return
fi fi
done
PATCH_BRANCH=trunk PATCH_BRANCH=trunk
@ -1070,7 +1158,7 @@ function locate_patch
else else
if [[ ${PATCH_OR_ISSUE} =~ ^http ]]; then if [[ ${PATCH_OR_ISSUE} =~ ^http ]]; then
echo "Patch is being downloaded at $(date) from" echo "Patch is being downloaded at $(date) from"
patchURL="${PATCH_OR_ISSUE}" PATCHURL="${PATCH_OR_ISSUE}"
else else
${WGET} -q -O "${PATCH_DIR}/jira" "http://issues.apache.org/jira/browse/${PATCH_OR_ISSUE}" ${WGET} -q -O "${PATCH_DIR}/jira" "http://issues.apache.org/jira/browse/${PATCH_OR_ISSUE}"
@ -1089,17 +1177,17 @@ function locate_patch
fi fi
relativePatchURL=$(${GREP} -o '"/jira/secure/attachment/[0-9]*/[^"]*' "${PATCH_DIR}/jira" | ${GREP} -v -e 'htm[l]*$' | sort | tail -1 | ${GREP} -o '/jira/secure/attachment/[0-9]*/[^"]*') relativePatchURL=$(${GREP} -o '"/jira/secure/attachment/[0-9]*/[^"]*' "${PATCH_DIR}/jira" | ${GREP} -v -e 'htm[l]*$' | sort | tail -1 | ${GREP} -o '/jira/secure/attachment/[0-9]*/[^"]*')
patchURL="http://issues.apache.org${relativePatchURL}" PATCHURL="http://issues.apache.org${relativePatchURL}"
if [[ ! ${patchURL} =~ \.patch$ ]]; then if [[ ! ${PATCHURL} =~ \.patch$ ]]; then
hadoop_error "ERROR: ${patchURL} is not a patch file." hadoop_error "ERROR: ${PATCHURL} is not a patch file."
cleanup_and_exit 1 cleanup_and_exit 1
fi fi
patchNum=$(echo "${patchURL}" | ${GREP} -o '[0-9]*/' | ${GREP} -o '[0-9]*') patchNum=$(echo "${PATCHURL}" | ${GREP} -o '[0-9]*/' | ${GREP} -o '[0-9]*')
echo "${ISSUE} patch is being downloaded at $(date) from" echo "${ISSUE} patch is being downloaded at $(date) from"
fi fi
echo "${patchURL}" echo "${PATCHURL}"
add_jira_footer "Patch URL" "${patchURL}" add_jira_footer "Patch URL" "${PATCHURL}"
${WGET} -q -O "${PATCH_DIR}/patch" "${patchURL}" ${WGET} -q -O "${PATCH_DIR}/patch" "${PATCHURL}"
if [[ $? != 0 ]];then if [[ $? != 0 ]];then
hadoop_error "ERROR: ${PATCH_OR_ISSUE} could not be downloaded." hadoop_error "ERROR: ${PATCH_OR_ISSUE} could not be downloaded."
cleanup_and_exit 1 cleanup_and_exit 1
@ -1196,23 +1284,11 @@ function check_reexec
echo "(!) A patch to test-patch or smart-apply-patch has been detected. " > "${commentfile}" echo "(!) A patch to test-patch or smart-apply-patch has been detected. " > "${commentfile}"
echo "Re-executing against the patched versions to perform further tests. " >> "${commentfile}" echo "Re-executing against the patched versions to perform further tests. " >> "${commentfile}"
echo "The console is at ${BUILD_URL}/console in case of problems." >> "${commentfile}" echo "The console is at ${BUILD_URL}console in case of problems." >> "${commentfile}"
if [[ ${OFFLINE} == false ]]; then
export USER=hudson
# shellcheck disable=SC2086
${JIRACLI} --comment "$(cat ${commentfile})" \
-s https://issues.apache.org/jira \
-a addcomment -u hadoopqa \
-p "${JIRA_PASSWD}" \
--issue "${ISSUE}"
${JIRACLI} -s https://issues.apache.org/jira \
-a logout -u hadoopqa \
-p "${JIRA_PASSWD}"
write_to_jira "${commentfile}"
rm "${commentfile}" rm "${commentfile}"
fi fi
fi
cd "${CWD}" cd "${CWD}"
mkdir -p "${PATCH_DIR}/dev-support-test" mkdir -p "${PATCH_DIR}/dev-support-test"
@ -1223,12 +1299,11 @@ function check_reexec
exec "${PATCH_DIR}/dev-support-test/test-patch.sh" \ exec "${PATCH_DIR}/dev-support-test/test-patch.sh" \
--reexec \ --reexec \
--branch ${PATCH_BRANCH} \
--patch-dir="${PATCH_DIR}" \ --patch-dir="${PATCH_DIR}" \
"${USER_PARAMS[@]}" "${USER_PARAMS[@]}"
} }
## @description Check the current directory for @author tags ## @description Check the current directory for @author tags
## @audience private ## @audience private
## @stability evolving ## @stability evolving
@ -1243,6 +1318,11 @@ function check_author
start_clock start_clock
if [[ ${CHANGED_FILES} =~ dev-support/test-patch ]]; then
add_jira_table 0 @author "Skipping @author checks as test-patch has been patched."
return 0
fi
authorTags=$("${GREP}" -c -i '@author' "${PATCH_DIR}/patch") authorTags=$("${GREP}" -c -i '@author' "${PATCH_DIR}/patch")
echo "There appear to be ${authorTags} @author tags in the patch." echo "There appear to be ${authorTags} @author tags in the patch."
if [[ ${authorTags} != 0 ]] ; then if [[ ${authorTags} != 0 ]] ; then
@ -1433,7 +1513,7 @@ function check_javac
start_clock start_clock
echo_and_redirect "${PATCH_DIR}/patchJavacWarnings.txt" "${MVN}" clean test -DskipTests -D${PROJECT_NAME}PatchProcess "${NATIVE_PROFILE}" -Ptest-patch echo_and_redirect "${PATCH_DIR}/patchJavacWarnings.txt" "${MVN}" clean test -DskipTests -D${PROJECT_NAME}PatchProcess ${NATIVE_PROFILE} -Ptest-patch
if [[ $? != 0 ]] ; then if [[ $? != 0 ]] ; then
add_jira_table -1 javac "The patch appears to cause the build to fail." add_jira_table -1 javac "The patch appears to cause the build to fail."
return 2 return 2
@ -1761,7 +1841,7 @@ function check_unittests
ordered_modules="${ordered_modules} ${hdfs_modules}" ordered_modules="${ordered_modules} ${hdfs_modules}"
if [[ ${building_common} -eq 0 ]]; then if [[ ${building_common} -eq 0 ]]; then
echo " Building hadoop-common with -Pnative in order to provide libhadoop.so to the hadoop-hdfs unit tests." echo " Building hadoop-common with -Pnative in order to provide libhadoop.so to the hadoop-hdfs unit tests."
echo_and_redirect "${PATCH_DIR}/testrun_native.txt" "${MVN}" compile "${NATIVE_PROFILE}" "-D${PROJECT_NAME}PatchProcess" echo_and_redirect "${PATCH_DIR}/testrun_native.txt" "${MVN}" compile ${NATIVE_PROFILE} "-D${PROJECT_NAME}PatchProcess"
if [[ $? != 0 ]]; then if [[ $? != 0 ]]; then
add_jira_table -1 "native" "Failed to build the native portion " \ add_jira_table -1 "native" "Failed to build the native portion " \
"of hadoop-common prior to running the unit tests in ${ordered_modules}" "of hadoop-common prior to running the unit tests in ${ordered_modules}"
@ -1781,7 +1861,7 @@ function check_unittests
test_logfile=${PATCH_DIR}/testrun_${module_suffix}.txt test_logfile=${PATCH_DIR}/testrun_${module_suffix}.txt
echo " Running tests in ${module_suffix}" echo " Running tests in ${module_suffix}"
echo_and_redirect "${test_logfile}" "${MVN}" clean install -fae "${NATIVE_PROFILE}" "${REQUIRE_TEST_LIB_HADOOP}" -D${PROJECT_NAME}PatchProcess echo_and_redirect "${test_logfile}" "${MVN}" clean install -fae ${NATIVE_PROFILE} ${REQUIRE_TEST_LIB_HADOOP} -D${PROJECT_NAME}PatchProcess
test_build_result=$? test_build_result=$?
add_jira_footer "${module_suffix} test log" "@@BASE@@/testrun_${module_suffix}.txt" add_jira_footer "${module_suffix} test log" "@@BASE@@/testrun_${module_suffix}.txt"
@ -1983,7 +2063,7 @@ function output_to_jira
big_console_header "Adding comment to JIRA" big_console_header "Adding comment to JIRA"
add_jira_footer "Console output" "${BUILD_URL}/console" add_jira_footer "Console output" "${BUILD_URL}console"
if [[ ${result} == 0 ]]; then if [[ ${result} == 0 ]]; then
add_jira_header "(/) *{color:green}+1 overall{color}*" add_jira_header "(/) *{color:green}+1 overall{color}*"
@ -2035,18 +2115,7 @@ function output_to_jira
printf "\n\nThis message was automatically generated.\n\n" >> "${commentfile}" printf "\n\nThis message was automatically generated.\n\n" >> "${commentfile}"
if [[ ${OFFLINE} == false ]]; then write_to_jira "${commentfile}"
export USER=hudson
# shellcheck disable=SC2086
${JIRACLI} --comment "$(cat "${commentfile}")" \
-s https://issues.apache.org/jira \
-a addcomment -u hadoopqa \
-p "${JIRA_PASSWD}" \
--issue "${ISSUE}"
${JIRACLI} -s https://issues.apache.org/jira \
-a logout -u hadoopqa \
-p "${JIRA_PASSWD}"
fi
} }
## @description Clean the filesystem as appropriate and then exit ## @description Clean the filesystem as appropriate and then exit
@ -2061,9 +2130,11 @@ function cleanup_and_exit
if [[ ${JENKINS} == "true" ]] ; then if [[ ${JENKINS} == "true" ]] ; then
if [[ -e "${PATCH_DIR}" ]] ; then if [[ -e "${PATCH_DIR}" ]] ; then
hadoop_debug "mv ${PATCH_DIR} ${BASEDIR} " hadoop_debug "mv ${PATCH_DIR} ${BASEDIR} "
if [[ -d "${PATCH_DIR}" ]]; then
mv "${PATCH_DIR}" "${BASEDIR}" mv "${PATCH_DIR}" "${BASEDIR}"
fi fi
fi fi
fi
big_console_header "Finished build." big_console_header "Finished build."
# shellcheck disable=SC2086 # shellcheck disable=SC2086
@ -2081,6 +2152,8 @@ function postcheckout
for routine in find_java_home verify_patch_file for routine in find_java_home verify_patch_file
do do
verify_patchdir_still_exists
hadoop_debug "Running ${routine}" hadoop_debug "Running ${routine}"
${routine} ${routine}
@ -2093,6 +2166,8 @@ function postcheckout
done done
for plugin in ${PLUGINS}; do for plugin in ${PLUGINS}; do
verify_patchdir_still_exists
if declare -f ${plugin}_postcheckout >/dev/null 2>&1; then if declare -f ${plugin}_postcheckout >/dev/null 2>&1; then
hadoop_debug "Running ${plugin}_postcheckout" hadoop_debug "Running ${plugin}_postcheckout"
@ -2122,6 +2197,7 @@ function preapply
for routine in precheck_without_patch check_author \ for routine in precheck_without_patch check_author \
check_modified_unittests check_modified_unittests
do do
verify_patchdir_still_exists
hadoop_debug "Running ${routine}" hadoop_debug "Running ${routine}"
${routine} ${routine}
@ -2130,6 +2206,8 @@ function preapply
done done
for plugin in ${PLUGINS}; do for plugin in ${PLUGINS}; do
verify_patchdir_still_exists
if declare -f ${plugin}_preapply >/dev/null 2>&1; then if declare -f ${plugin}_preapply >/dev/null 2>&1; then
hadoop_debug "Running ${plugin}_preapply" hadoop_debug "Running ${plugin}_preapply"
@ -2163,7 +2241,7 @@ function postapply
for routine in check_javadoc check_apachelicense check_site for routine in check_javadoc check_apachelicense check_site
do do
verify_patchdir_still_exists
hadoop_debug "Running ${routine}" hadoop_debug "Running ${routine}"
$routine $routine
@ -2172,8 +2250,8 @@ function postapply
done done
for plugin in ${PLUGINS}; do for plugin in ${PLUGINS}; do
verify_patchdir_still_exists
if declare -f ${plugin}_postapply >/dev/null 2>&1; then if declare -f ${plugin}_postapply >/dev/null 2>&1; then
hadoop_debug "Running ${plugin}_postapply" hadoop_debug "Running ${plugin}_postapply"
#shellcheck disable=SC2086 #shellcheck disable=SC2086
${plugin}_postapply ${plugin}_postapply
@ -2193,12 +2271,14 @@ function postinstall
for routine in check_mvn_eclipse check_findbugs for routine in check_mvn_eclipse check_findbugs
do do
verify_patchdir_still_exists
hadoop_debug "Running ${routine}" hadoop_debug "Running ${routine}"
${routine} ${routine}
(( RESULT = RESULT + $? )) (( RESULT = RESULT + $? ))
done done
for plugin in ${PLUGINS}; do for plugin in ${PLUGINS}; do
verify_patchdir_still_exists
if declare -f ${plugin}_postinstall >/dev/null 2>&1; then if declare -f ${plugin}_postinstall >/dev/null 2>&1; then
hadoop_debug "Running ${plugin}_postinstall" hadoop_debug "Running ${plugin}_postinstall"
#shellcheck disable=SC2086 #shellcheck disable=SC2086
@ -2220,12 +2300,14 @@ function runtests
### Run tests for Jenkins or if explictly asked for by a developer ### Run tests for Jenkins or if explictly asked for by a developer
if [[ ${JENKINS} == "true" || ${RUN_TESTS} == "true" ]] ; then if [[ ${JENKINS} == "true" || ${RUN_TESTS} == "true" ]] ; then
verify_patchdir_still_exists
check_unittests check_unittests
(( RESULT = RESULT + $? )) (( RESULT = RESULT + $? ))
fi fi
for plugin in ${PLUGINS}; do for plugin in ${PLUGINS}; do
verify_patchdir_still_exists
if declare -f ${plugin}_tests >/dev/null 2>&1; then if declare -f ${plugin}_tests >/dev/null 2>&1; then
hadoop_debug "Running ${plugin}_tests" hadoop_debug "Running ${plugin}_tests"
#shellcheck disable=SC2086 #shellcheck disable=SC2086

View File

@ -542,6 +542,9 @@ Release 2.8.0 - UNRELEASED
HADOOP-11848. Incorrect arguments to sizeof in DomainSocket.c (Malcolm HADOOP-11848. Incorrect arguments to sizeof in DomainSocket.c (Malcolm
Kavalsky via Colin P. McCabe) Kavalsky via Colin P. McCabe)
HADOOP-11861. test-patch.sh rewrite addendum patch.
(Allen Wittenauer via cnauroth)
Release 2.7.1 - UNRELEASED Release 2.7.1 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES