From 18eb5e79345295b2259b566c154375ad2a6216a1 Mon Sep 17 00:00:00 2001 From: cnauroth Date: Wed, 22 Apr 2015 22:23:31 -0700 Subject: [PATCH] HADOOP-11861. test-patch.sh rewrite addendum patch. Contributed by Allen Wittenauer. --- dev-support/test-patch.d/shellcheck.sh | 12 +- dev-support/test-patch.sh | 226 ++++++++++++------ .../hadoop-common/CHANGES.txt | 3 + 3 files changed, 165 insertions(+), 76 deletions(-) diff --git a/dev-support/test-patch.d/shellcheck.sh b/dev-support/test-patch.d/shellcheck.sh index b60da36f3c..c1084a2e5b 100755 --- a/dev-support/test-patch.d/shellcheck.sh +++ b/dev-support/test-patch.d/shellcheck.sh @@ -18,7 +18,7 @@ add_plugin shellcheck SHELLCHECK_TIMER=0 -SHELLCHECK=${SHELLCHECK:-$(which shellcheck)} +SHELLCHECK=${SHELLCHECK:-$(which shellcheck 2>/dev/null)} SHELLCHECK_SPECIFICFILES="" @@ -64,8 +64,9 @@ function shellcheck_preapply big_console_header "shellcheck plugin: prepatch" - if [[ -z "${SHELLCHECK}" ]]; then + if [[ ! -x "${SHELLCHECK}" ]]; then hadoop_error "shellcheck is not available." + return 0 fi start_clock @@ -97,8 +98,10 @@ function shellcheck_postapply big_console_header "shellcheck plugin: postpatch" - if [[ -z "${SHELLCHECK}" ]]; then + if [[ ! -x "${SHELLCHECK}" ]]; then hadoop_error "shellcheck is not available." + add_jira_table 0 shellcheck "Shellcheck was not available." + return 0 fi start_clock @@ -126,7 +129,8 @@ function shellcheck_postapply # shellcheck disable=SC2016 diffPostpatch=$(wc -l "${PATCH_DIR}/diffpatchshellcheck.txt" | ${AWK} '{print $1}') - if [[ ${diffPostpatch} -gt 0 ]] ; then + if [[ ${diffPostpatch} -gt 0 + && ${numPostpatch} -gt ${numPrepatch} ]] ; then add_jira_table -1 shellcheck "The applied patch generated "\ "${diffPostpatch} new shellcheck (v${SHELLCHECK_VERSION}) issues (total was ${numPrepatch}, now ${numPostpatch})." add_jira_footer shellcheck "@@BASE@@/diffpatchshellcheck.txt" diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh index 6e8679e49c..2537e8572d 100755 --- a/dev-support/test-patch.sh +++ b/dev-support/test-patch.sh @@ -52,6 +52,7 @@ function setup_defaults ISSUE="" ISSUE_RE='^(HADOOP|YARN|MAPREDUCE|HDFS)-[0-9]+$' TIMER=$(date +"%s") + PATCHURL="" OSTYPE=$(uname -s) @@ -389,6 +390,69 @@ function find_java_home return 0 } +## @description Write the contents of a file to jenkins +## @params filename +## @stability stable +## @audience public +## @returns ${JIRACLI} exit code +function write_to_jira +{ + local -r commentfile=${1} + shift + + local retval + + if [[ ${OFFLINE} == false + && ${JENKINS} == true ]]; then + export USER=hudson + # shellcheck disable=SC2086 + ${JIRACLI} --comment "$(cat ${commentfile})" \ + -s https://issues.apache.org/jira \ + -a addcomment -u hadoopqa \ + -p "${JIRA_PASSWD}" \ + --issue "${ISSUE}" + retval=$? + ${JIRACLI} -s https://issues.apache.org/jira \ + -a logout -u hadoopqa \ + -p "${JIRA_PASSWD}" + fi + return ${retval} +} + +## @description Verify that the patch directory is still in working order +## @description since bad actors on some systems wipe it out. If not, +## @description recreate it and then exit +## @audience private +## @stability evolving +## @replaceable yes +## @returns may exit on failure +function verify_patchdir_still_exists +{ + local -r commentfile=/tmp/testpatch.$$.${RANDOM} + local extra="" + + if [[ ! -d ${PATCH_DIR} ]]; then + rm "${commentfile}" 2>/dev/null + + echo "(!) The patch artifact directory on has been removed! " > "${commentfile}" + echo "This is a fatal error for test-patch.sh. Aborting. " >> "${commentfile}" + echo + cat ${commentfile} + echo + if [[ ${JENKINS} == true ]]; then + if [[ -n ${NODE_NAME} ]]; then + extra=" (node ${NODE_NAME})" + fi + echo "Jenkins${extra} information at ${BUILD_URL} may provide some hints. " >> "${commentfile}" + + write_to_jira ${commentfile} + fi + + rm "${commentfile}" + cleanup_and_exit ${RESULT} + fi +} + ## @description Print the command to be executing to the screen. Then ## @description run the command, sending stdout and stderr to the given filename ## @description This will also ensure that any directories in ${BASEDIR} have @@ -404,6 +468,9 @@ function echo_and_redirect { logfile=$1 shift + + verify_patchdir_still_exists + find "${BASEDIR}" -type d -exec chmod +x {} \; echo "${*} > ${logfile} 2>&1" "${@}" > "${logfile}" 2>&1 @@ -426,6 +493,7 @@ function hadoop_usage echo echo "Options:" echo "--basedir= The directory to apply the patch to (default current directory)" + echo "--branch= Forcibly set the branch" echo "--build-native= If true, then build native components (default 'true')" echo "--debug If set, then output some extra stuff to stderr" echo "--dirty-workspace Allow the local git workspace to have uncommitted changes" @@ -475,6 +543,9 @@ function parse_args --basedir=*) BASEDIR=${i#*=} ;; + --branch=*) + PATCH_BRANCH=${i#*=} + ;; --build-native=*) BUILD_NATIVE=${i#*=} ;; @@ -722,6 +793,16 @@ function git_checkout cleanup_and_exit 1 fi + # we need to explicitly fetch in case the + # git ref hasn't been brought in tree yet + if [[ ${OFFLINE} == false ]]; then + ${GIT} pull --rebase + if [[ $? != 0 ]]; then + hadoop_error "ERROR: git pull is failing" + cleanup_and_exit 1 + fi + fi + else cd "${BASEDIR}" if [[ ! -d .git ]]; then @@ -888,38 +969,45 @@ function determine_branch allbranches=$(${GIT} branch -r | tr -d ' ' | ${SED} -e s,origin/,,g) - # shellcheck disable=SC2016 - patchnamechunk=$(echo "${PATCH_OR_ISSUE}" | ${AWK} -F/ '{print $NF}') + for j in "${PATCHURL}" "${PATCH_OR_ISSUE}"; do + hadoop_debug "Determine branch: starting with ${j}" + # shellcheck disable=SC2016 + patchnamechunk=$(echo "${j}" | ${AWK} -F/ '{print $NF}') - # ISSUE.branch.##.patch - PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f2 -d. ) - verify_valid_branch "${allbranches}" "${PATCH_BRANCH}" - if [[ $? == 0 ]]; then - return - fi + # ISSUE.branch.##.patch + hadoop_debug "Determine branch: ISSUE.branch.##.patch" + PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f2 -d. ) + verify_valid_branch "${allbranches}" "${PATCH_BRANCH}" + if [[ $? == 0 ]]; then + return + fi - # ISSUE-branch-##.patch - PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f3- -d- | cut -f1,2 -d-) - verify_valid_branch "${allbranches}" "${PATCH_BRANCH}" - if [[ $? == 0 ]]; then - return - fi + # ISSUE-branch-##.patch + hadoop_debug "Determine branch: ISSUE-branch-##.patch" + PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f3- -d- | cut -f1,2 -d-) + verify_valid_branch "${allbranches}" "${PATCH_BRANCH}" + if [[ $? == 0 ]]; then + return + fi - # ISSUE-##.patch.branch - # shellcheck disable=SC2016 - PATCH_BRANCH=$(echo "${patchnamechunk}" | ${AWK} -F. '{print $NF}') - verify_valid_branch "${allbranches}" "${PATCH_BRANCH}" - if [[ $? == 0 ]]; then - return - fi + # ISSUE-##.patch.branch + hadoop_debug "Determine branch: ISSUE-##.patch.branch" + # shellcheck disable=SC2016 + PATCH_BRANCH=$(echo "${patchnamechunk}" | ${AWK} -F. '{print $NF}') + verify_valid_branch "${allbranches}" "${PATCH_BRANCH}" + if [[ $? == 0 ]]; then + return + fi - # ISSUE-branch.##.patch - # shellcheck disable=SC2016 - PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f3- -d- | ${AWK} -F. '{print $(NF-2)}' 2>/dev/null) - verify_valid_branch "${allbranches}" "${PATCH_BRANCH}" - if [[ $? == 0 ]]; then - return - fi + # ISSUE-branch.##.patch + hadoop_debug "Determine branch: ISSUE-branch.##.patch" + # shellcheck disable=SC2016 + PATCH_BRANCH=$(echo "${patchnamechunk}" | cut -f3- -d- | ${AWK} -F. '{print $(NF-2)}' 2>/dev/null) + verify_valid_branch "${allbranches}" "${PATCH_BRANCH}" + if [[ $? == 0 ]]; then + return + fi + done PATCH_BRANCH=trunk @@ -1070,7 +1158,7 @@ function locate_patch else if [[ ${PATCH_OR_ISSUE} =~ ^http ]]; then echo "Patch is being downloaded at $(date) from" - patchURL="${PATCH_OR_ISSUE}" + PATCHURL="${PATCH_OR_ISSUE}" else ${WGET} -q -O "${PATCH_DIR}/jira" "http://issues.apache.org/jira/browse/${PATCH_OR_ISSUE}" @@ -1089,17 +1177,17 @@ function locate_patch fi relativePatchURL=$(${GREP} -o '"/jira/secure/attachment/[0-9]*/[^"]*' "${PATCH_DIR}/jira" | ${GREP} -v -e 'htm[l]*$' | sort | tail -1 | ${GREP} -o '/jira/secure/attachment/[0-9]*/[^"]*') - patchURL="http://issues.apache.org${relativePatchURL}" - if [[ ! ${patchURL} =~ \.patch$ ]]; then - hadoop_error "ERROR: ${patchURL} is not a patch file." + PATCHURL="http://issues.apache.org${relativePatchURL}" + if [[ ! ${PATCHURL} =~ \.patch$ ]]; then + hadoop_error "ERROR: ${PATCHURL} is not a patch file." cleanup_and_exit 1 fi - patchNum=$(echo "${patchURL}" | ${GREP} -o '[0-9]*/' | ${GREP} -o '[0-9]*') + patchNum=$(echo "${PATCHURL}" | ${GREP} -o '[0-9]*/' | ${GREP} -o '[0-9]*') echo "${ISSUE} patch is being downloaded at $(date) from" fi - echo "${patchURL}" - add_jira_footer "Patch URL" "${patchURL}" - ${WGET} -q -O "${PATCH_DIR}/patch" "${patchURL}" + echo "${PATCHURL}" + add_jira_footer "Patch URL" "${PATCHURL}" + ${WGET} -q -O "${PATCH_DIR}/patch" "${PATCHURL}" if [[ $? != 0 ]];then hadoop_error "ERROR: ${PATCH_OR_ISSUE} could not be downloaded." cleanup_and_exit 1 @@ -1196,22 +1284,10 @@ function check_reexec echo "(!) A patch to test-patch or smart-apply-patch has been detected. " > "${commentfile}" echo "Re-executing against the patched versions to perform further tests. " >> "${commentfile}" - echo "The console is at ${BUILD_URL}/console in case of problems." >> "${commentfile}" + echo "The console is at ${BUILD_URL}console in case of problems." >> "${commentfile}" - if [[ ${OFFLINE} == false ]]; then - export USER=hudson - # shellcheck disable=SC2086 - ${JIRACLI} --comment "$(cat ${commentfile})" \ - -s https://issues.apache.org/jira \ - -a addcomment -u hadoopqa \ - -p "${JIRA_PASSWD}" \ - --issue "${ISSUE}" - ${JIRACLI} -s https://issues.apache.org/jira \ - -a logout -u hadoopqa \ - -p "${JIRA_PASSWD}" - - rm "${commentfile}" - fi + write_to_jira "${commentfile}" + rm "${commentfile}" fi cd "${CWD}" @@ -1223,12 +1299,11 @@ function check_reexec exec "${PATCH_DIR}/dev-support-test/test-patch.sh" \ --reexec \ + --branch ${PATCH_BRANCH} \ --patch-dir="${PATCH_DIR}" \ "${USER_PARAMS[@]}" } - - ## @description Check the current directory for @author tags ## @audience private ## @stability evolving @@ -1243,6 +1318,11 @@ function check_author start_clock + if [[ ${CHANGED_FILES} =~ dev-support/test-patch ]]; then + add_jira_table 0 @author "Skipping @author checks as test-patch has been patched." + return 0 + fi + authorTags=$("${GREP}" -c -i '@author' "${PATCH_DIR}/patch") echo "There appear to be ${authorTags} @author tags in the patch." if [[ ${authorTags} != 0 ]] ; then @@ -1433,7 +1513,7 @@ function check_javac start_clock - echo_and_redirect "${PATCH_DIR}/patchJavacWarnings.txt" "${MVN}" clean test -DskipTests -D${PROJECT_NAME}PatchProcess "${NATIVE_PROFILE}" -Ptest-patch + echo_and_redirect "${PATCH_DIR}/patchJavacWarnings.txt" "${MVN}" clean test -DskipTests -D${PROJECT_NAME}PatchProcess ${NATIVE_PROFILE} -Ptest-patch if [[ $? != 0 ]] ; then add_jira_table -1 javac "The patch appears to cause the build to fail." return 2 @@ -1761,7 +1841,7 @@ function check_unittests ordered_modules="${ordered_modules} ${hdfs_modules}" if [[ ${building_common} -eq 0 ]]; then echo " Building hadoop-common with -Pnative in order to provide libhadoop.so to the hadoop-hdfs unit tests." - echo_and_redirect "${PATCH_DIR}/testrun_native.txt" "${MVN}" compile "${NATIVE_PROFILE}" "-D${PROJECT_NAME}PatchProcess" + echo_and_redirect "${PATCH_DIR}/testrun_native.txt" "${MVN}" compile ${NATIVE_PROFILE} "-D${PROJECT_NAME}PatchProcess" if [[ $? != 0 ]]; then add_jira_table -1 "native" "Failed to build the native portion " \ "of hadoop-common prior to running the unit tests in ${ordered_modules}" @@ -1781,7 +1861,7 @@ function check_unittests test_logfile=${PATCH_DIR}/testrun_${module_suffix}.txt echo " Running tests in ${module_suffix}" - echo_and_redirect "${test_logfile}" "${MVN}" clean install -fae "${NATIVE_PROFILE}" "${REQUIRE_TEST_LIB_HADOOP}" -D${PROJECT_NAME}PatchProcess + echo_and_redirect "${test_logfile}" "${MVN}" clean install -fae ${NATIVE_PROFILE} ${REQUIRE_TEST_LIB_HADOOP} -D${PROJECT_NAME}PatchProcess test_build_result=$? add_jira_footer "${module_suffix} test log" "@@BASE@@/testrun_${module_suffix}.txt" @@ -1983,7 +2063,7 @@ function output_to_jira big_console_header "Adding comment to JIRA" - add_jira_footer "Console output" "${BUILD_URL}/console" + add_jira_footer "Console output" "${BUILD_URL}console" if [[ ${result} == 0 ]]; then add_jira_header "(/) *{color:green}+1 overall{color}*" @@ -2035,18 +2115,7 @@ function output_to_jira printf "\n\nThis message was automatically generated.\n\n" >> "${commentfile}" - if [[ ${OFFLINE} == false ]]; then - export USER=hudson - # shellcheck disable=SC2086 - ${JIRACLI} --comment "$(cat "${commentfile}")" \ - -s https://issues.apache.org/jira \ - -a addcomment -u hadoopqa \ - -p "${JIRA_PASSWD}" \ - --issue "${ISSUE}" - ${JIRACLI} -s https://issues.apache.org/jira \ - -a logout -u hadoopqa \ - -p "${JIRA_PASSWD}" - fi + write_to_jira "${commentfile}" } ## @description Clean the filesystem as appropriate and then exit @@ -2061,7 +2130,9 @@ function cleanup_and_exit if [[ ${JENKINS} == "true" ]] ; then if [[ -e "${PATCH_DIR}" ]] ; then hadoop_debug "mv ${PATCH_DIR} ${BASEDIR} " - mv "${PATCH_DIR}" "${BASEDIR}" + if [[ -d "${PATCH_DIR}" ]]; then + mv "${PATCH_DIR}" "${BASEDIR}" + fi fi fi big_console_header "Finished build." @@ -2081,6 +2152,8 @@ function postcheckout for routine in find_java_home verify_patch_file do + verify_patchdir_still_exists + hadoop_debug "Running ${routine}" ${routine} @@ -2093,6 +2166,8 @@ function postcheckout done for plugin in ${PLUGINS}; do + verify_patchdir_still_exists + if declare -f ${plugin}_postcheckout >/dev/null 2>&1; then hadoop_debug "Running ${plugin}_postcheckout" @@ -2122,6 +2197,7 @@ function preapply for routine in precheck_without_patch check_author \ check_modified_unittests do + verify_patchdir_still_exists hadoop_debug "Running ${routine}" ${routine} @@ -2130,6 +2206,8 @@ function preapply done for plugin in ${PLUGINS}; do + verify_patchdir_still_exists + if declare -f ${plugin}_preapply >/dev/null 2>&1; then hadoop_debug "Running ${plugin}_preapply" @@ -2163,7 +2241,7 @@ function postapply for routine in check_javadoc check_apachelicense check_site do - + verify_patchdir_still_exists hadoop_debug "Running ${routine}" $routine @@ -2172,8 +2250,8 @@ function postapply done for plugin in ${PLUGINS}; do + verify_patchdir_still_exists if declare -f ${plugin}_postapply >/dev/null 2>&1; then - hadoop_debug "Running ${plugin}_postapply" #shellcheck disable=SC2086 ${plugin}_postapply @@ -2193,12 +2271,14 @@ function postinstall for routine in check_mvn_eclipse check_findbugs do + verify_patchdir_still_exists hadoop_debug "Running ${routine}" ${routine} (( RESULT = RESULT + $? )) done for plugin in ${PLUGINS}; do + verify_patchdir_still_exists if declare -f ${plugin}_postinstall >/dev/null 2>&1; then hadoop_debug "Running ${plugin}_postinstall" #shellcheck disable=SC2086 @@ -2220,12 +2300,14 @@ function runtests ### Run tests for Jenkins or if explictly asked for by a developer if [[ ${JENKINS} == "true" || ${RUN_TESTS} == "true" ]] ; then + verify_patchdir_still_exists check_unittests (( RESULT = RESULT + $? )) fi for plugin in ${PLUGINS}; do + verify_patchdir_still_exists if declare -f ${plugin}_tests >/dev/null 2>&1; then hadoop_debug "Running ${plugin}_tests" #shellcheck disable=SC2086 diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 1410ce4252..8311b8be6e 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -542,6 +542,9 @@ Release 2.8.0 - UNRELEASED HADOOP-11848. Incorrect arguments to sizeof in DomainSocket.c (Malcolm Kavalsky via Colin P. McCabe) + HADOOP-11861. test-patch.sh rewrite addendum patch. + (Allen Wittenauer via cnauroth) + Release 2.7.1 - UNRELEASED INCOMPATIBLE CHANGES