2019-01-05 20:14:50 +00:00
|
|
|
// Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
// or more contributor license agreements. See the NOTICE file
|
|
|
|
// distributed with this work for additional information
|
|
|
|
// regarding copyright ownership. The ASF licenses this file
|
|
|
|
// to you under the Apache License, Version 2.0 (the
|
|
|
|
// "License"); you may not use this file except in compliance
|
|
|
|
// with the License. You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing,
|
|
|
|
// software distributed under the License is distributed on an
|
|
|
|
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
// KIND, either express or implied. See the License for the
|
|
|
|
// specific language governing permissions and limitations
|
|
|
|
// under the License.
|
|
|
|
|
|
|
|
pipeline {
|
|
|
|
|
|
|
|
agent {
|
|
|
|
label 'Hadoop'
|
|
|
|
}
|
|
|
|
|
|
|
|
options {
|
|
|
|
buildDiscarder(logRotator(numToKeepStr: '5'))
|
2021-06-14 01:17:39 +00:00
|
|
|
timeout (time: 24, unit: 'HOURS')
|
2019-01-05 20:14:50 +00:00
|
|
|
timestamps()
|
|
|
|
checkoutToSubdirectory('src')
|
|
|
|
}
|
|
|
|
|
|
|
|
environment {
|
|
|
|
YETUS='yetus'
|
|
|
|
// Branch or tag name. Yetus release tags are 'rel/X.Y.Z'
|
2021-04-07 10:06:25 +00:00
|
|
|
YETUS_VERSION='f9ba0170a5787a5f4662d3769804fef0226a182f'
|
2019-01-05 20:14:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
parameters {
|
|
|
|
string(name: 'JIRA_ISSUE_KEY',
|
|
|
|
defaultValue: '',
|
|
|
|
description: 'The JIRA issue that has a patch needing pre-commit testing. Example: HADOOP-1234')
|
|
|
|
}
|
|
|
|
|
|
|
|
stages {
|
|
|
|
stage ('install yetus') {
|
|
|
|
steps {
|
|
|
|
dir("${WORKSPACE}/${YETUS}") {
|
|
|
|
checkout([
|
|
|
|
$class: 'GitSCM',
|
|
|
|
branches: [[name: "${env.YETUS_VERSION}"]],
|
|
|
|
userRemoteConfigs: [[ url: 'https://github.com/apache/yetus.git']]]
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-07-26 20:09:36 +00:00
|
|
|
// Setup codebase so that each platform's build happens in its own exclusive copy of the
|
|
|
|
// codebase.
|
|
|
|
// Primarily because YETUS messes up the git branch information and affects the subsequent
|
|
|
|
// optional stages after the first one.
|
|
|
|
stage ('setup sources') {
|
|
|
|
steps {
|
2021-07-29 17:50:57 +00:00
|
|
|
dir("${WORKSPACE}/centos-7") {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
cp -Rp ${WORKSPACE}/src ${WORKSPACE}/centos-7
|
|
|
|
'''
|
|
|
|
}
|
|
|
|
|
2021-07-26 20:09:36 +00:00
|
|
|
dir("${WORKSPACE}/centos-8") {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
cp -Rp ${WORKSPACE}/src ${WORKSPACE}/centos-8
|
|
|
|
'''
|
|
|
|
}
|
|
|
|
|
|
|
|
dir("${WORKSPACE}/debian-10") {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
cp -Rp ${WORKSPACE}/src ${WORKSPACE}/debian-10
|
|
|
|
'''
|
|
|
|
}
|
|
|
|
|
|
|
|
dir("${WORKSPACE}/ubuntu-focal") {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
cp -Rp ${WORKSPACE}/src ${WORKSPACE}/ubuntu-focal
|
|
|
|
'''
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-07-01 05:04:54 +00:00
|
|
|
// This is an optional stage which runs only when there's a change in
|
2021-07-29 17:50:57 +00:00
|
|
|
// C++/C++ build/platform.
|
|
|
|
// This stage serves as a means of cross platform validation, which is
|
|
|
|
// really needed to ensure that any C++ related/platform change doesn't
|
|
|
|
// break the Hadoop build on Centos 7.
|
|
|
|
stage ('precommit-run Centos 7') {
|
|
|
|
environment {
|
|
|
|
SOURCEDIR = "${WORKSPACE}/centos-7/src"
|
|
|
|
PATCHDIR = "${WORKSPACE}/centos-7/out"
|
|
|
|
DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_centos_7"
|
|
|
|
IS_OPTIONAL = 1
|
|
|
|
}
|
|
|
|
|
|
|
|
steps {
|
|
|
|
withCredentials(
|
|
|
|
[usernamePassword(credentialsId: 'apache-hadoop-at-github.com',
|
|
|
|
passwordVariable: 'GITHUB_TOKEN',
|
|
|
|
usernameVariable: 'GITHUB_USER'),
|
|
|
|
usernamePassword(credentialsId: 'hadoopqa-at-asf-jira',
|
|
|
|
passwordVariable: 'JIRA_PASSWORD',
|
|
|
|
usernameVariable: 'JIRA_USER')]) {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
|
|
|
|
"${SOURCEDIR}/dev-support/jenkins.sh" run_ci
|
|
|
|
'''
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
post {
|
|
|
|
// Since this is an optional platform, we want to copy the artifacts
|
|
|
|
// and archive it only if the build fails, to help with debugging.
|
|
|
|
failure {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
cp -Rp "${WORKSPACE}/centos-7/out" "${WORKSPACE}"
|
|
|
|
'''
|
|
|
|
archiveArtifacts "out/**"
|
|
|
|
}
|
|
|
|
|
|
|
|
cleanup() {
|
|
|
|
script {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
|
|
|
|
"${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
|
|
|
|
'''
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// This is an optional stage which runs only when there's a change in
|
2021-07-01 05:04:54 +00:00
|
|
|
// C++/C++ build/platform.
|
|
|
|
// This stage serves as a means of cross platform validation, which is
|
|
|
|
// really needed to ensure that any C++ related/platform change doesn't
|
|
|
|
// break the Hadoop build on Centos 8.
|
|
|
|
stage ('precommit-run Centos 8') {
|
|
|
|
environment {
|
2021-07-26 20:09:36 +00:00
|
|
|
SOURCEDIR = "${WORKSPACE}/centos-8/src"
|
|
|
|
PATCHDIR = "${WORKSPACE}/centos-8/out"
|
2021-07-01 05:04:54 +00:00
|
|
|
DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_centos_8"
|
|
|
|
IS_OPTIONAL = 1
|
|
|
|
}
|
|
|
|
|
|
|
|
steps {
|
|
|
|
withCredentials(
|
|
|
|
[usernamePassword(credentialsId: 'apache-hadoop-at-github.com',
|
|
|
|
passwordVariable: 'GITHUB_TOKEN',
|
|
|
|
usernameVariable: 'GITHUB_USER'),
|
|
|
|
usernamePassword(credentialsId: 'hadoopqa-at-asf-jira',
|
|
|
|
passwordVariable: 'JIRA_PASSWORD',
|
|
|
|
usernameVariable: 'JIRA_USER')]) {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
|
2021-07-26 20:09:36 +00:00
|
|
|
"${SOURCEDIR}/dev-support/jenkins.sh" run_ci
|
2021-07-01 05:04:54 +00:00
|
|
|
'''
|
|
|
|
}
|
|
|
|
}
|
2021-07-26 20:09:36 +00:00
|
|
|
|
|
|
|
post {
|
|
|
|
// Since this is an optional platform, we want to copy the artifacts
|
|
|
|
// and archive it only if the build fails, to help with debugging.
|
|
|
|
failure {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
cp -Rp "${WORKSPACE}/centos-8/out" "${WORKSPACE}"
|
|
|
|
'''
|
|
|
|
archiveArtifacts "out/**"
|
|
|
|
}
|
|
|
|
|
|
|
|
cleanup() {
|
|
|
|
script {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
|
|
|
|
"${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
|
|
|
|
'''
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-07-01 05:04:54 +00:00
|
|
|
}
|
|
|
|
|
2021-06-23 17:02:33 +00:00
|
|
|
// This is an optional stage which runs only when there's a change in
|
|
|
|
// C++/C++ build/platform.
|
|
|
|
// This stage serves as a means of cross platform validation, which is
|
|
|
|
// really needed to ensure that any C++ related/platform change doesn't
|
|
|
|
// break the Hadoop build on Debian 10.
|
|
|
|
stage ('precommit-run Debian 10') {
|
|
|
|
environment {
|
2021-07-26 20:09:36 +00:00
|
|
|
SOURCEDIR = "${WORKSPACE}/debian-10/src"
|
|
|
|
PATCHDIR = "${WORKSPACE}/debian-10/out"
|
2021-06-23 17:02:33 +00:00
|
|
|
DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile_debian_10"
|
|
|
|
IS_OPTIONAL = 1
|
|
|
|
}
|
|
|
|
|
2019-01-05 20:14:50 +00:00
|
|
|
steps {
|
|
|
|
withCredentials(
|
2021-03-12 09:04:00 +00:00
|
|
|
[usernamePassword(credentialsId: 'apache-hadoop-at-github.com',
|
2020-04-18 17:43:44 +00:00
|
|
|
passwordVariable: 'GITHUB_TOKEN',
|
2019-01-05 20:14:50 +00:00
|
|
|
usernameVariable: 'GITHUB_USER'),
|
|
|
|
usernamePassword(credentialsId: 'hadoopqa-at-asf-jira',
|
|
|
|
passwordVariable: 'JIRA_PASSWORD',
|
|
|
|
usernameVariable: 'JIRA_USER')]) {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
2021-06-23 17:02:33 +00:00
|
|
|
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
|
2021-07-26 20:09:36 +00:00
|
|
|
"${SOURCEDIR}/dev-support/jenkins.sh" run_ci
|
2021-06-23 17:02:33 +00:00
|
|
|
'''
|
|
|
|
}
|
|
|
|
}
|
2021-07-26 20:09:36 +00:00
|
|
|
|
|
|
|
post {
|
|
|
|
// Since this is an optional platform, we want to copy the artifacts
|
|
|
|
// and archive it only if the build fails, to help with debugging.
|
|
|
|
failure {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
cp -Rp "${WORKSPACE}/debian-10/out" "${WORKSPACE}"
|
|
|
|
'''
|
|
|
|
archiveArtifacts "out/**"
|
|
|
|
}
|
|
|
|
|
|
|
|
cleanup() {
|
|
|
|
script {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
|
|
|
|
"${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
|
|
|
|
'''
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-06-23 17:02:33 +00:00
|
|
|
}
|
2020-06-19 04:29:44 +00:00
|
|
|
|
2021-06-23 17:02:33 +00:00
|
|
|
// We want to use Ubuntu Focal as our main CI and thus, this stage
|
|
|
|
// isn't optional (runs for all the PRs).
|
|
|
|
stage ('precommit-run Ubuntu focal') {
|
|
|
|
environment {
|
2021-07-26 20:09:36 +00:00
|
|
|
SOURCEDIR = "${WORKSPACE}/ubuntu-focal/src"
|
|
|
|
PATCHDIR = "${WORKSPACE}/ubuntu-focal/out"
|
2021-06-23 17:02:33 +00:00
|
|
|
DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile"
|
|
|
|
IS_OPTIONAL = 0
|
|
|
|
}
|
2020-08-03 01:46:51 +00:00
|
|
|
|
2021-06-23 17:02:33 +00:00
|
|
|
steps {
|
|
|
|
withCredentials(
|
|
|
|
[usernamePassword(credentialsId: 'apache-hadoop-at-github.com',
|
|
|
|
passwordVariable: 'GITHUB_TOKEN',
|
|
|
|
usernameVariable: 'GITHUB_USER'),
|
|
|
|
usernamePassword(credentialsId: 'hadoopqa-at-asf-jira',
|
|
|
|
passwordVariable: 'JIRA_PASSWORD',
|
|
|
|
usernameVariable: 'JIRA_USER')]) {
|
|
|
|
sh '''#!/usr/bin/env bash
|
2021-03-11 12:48:47 +00:00
|
|
|
|
2021-06-23 17:02:33 +00:00
|
|
|
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
|
2021-07-26 20:09:36 +00:00
|
|
|
"${SOURCEDIR}/dev-support/jenkins.sh" run_ci
|
2019-01-05 20:14:50 +00:00
|
|
|
'''
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-07-26 20:09:36 +00:00
|
|
|
post {
|
|
|
|
always {
|
|
|
|
script {
|
|
|
|
// Publish status if it was missed (YETUS-1059)
|
|
|
|
withCredentials(
|
|
|
|
[usernamePassword(credentialsId: '683f5dcf-5552-4b28-9fb1-6a6b77cf53dd',
|
|
|
|
passwordVariable: 'GITHUB_TOKEN',
|
|
|
|
usernameVariable: 'GITHUB_USER')]) {
|
|
|
|
sh '''#!/usr/bin/env bash
|
2019-01-05 20:14:50 +00:00
|
|
|
|
2021-07-26 20:09:36 +00:00
|
|
|
# Copy the artifacts of Ubuntu focal build to workspace
|
|
|
|
cp -Rp "${WORKSPACE}/ubuntu-focal/out" "${WORKSPACE}"
|
|
|
|
|
|
|
|
# Send Github status
|
|
|
|
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
|
|
|
|
"${SOURCEDIR}/dev-support/jenkins.sh" github_status_recovery
|
|
|
|
'''
|
|
|
|
}
|
2021-02-20 08:18:23 +00:00
|
|
|
|
2021-07-26 20:09:36 +00:00
|
|
|
// YETUS output
|
|
|
|
archiveArtifacts "out/**"
|
|
|
|
|
|
|
|
// Publish the HTML report so that it can be looked at
|
|
|
|
// Has to be relative to WORKSPACE.
|
|
|
|
publishHTML (target: [
|
|
|
|
allowMissing: true,
|
|
|
|
keepAll: true,
|
|
|
|
alwaysLinkToLastBuild: true,
|
|
|
|
// Has to be relative to WORKSPACE
|
|
|
|
reportDir: "out",
|
|
|
|
reportFiles: 'report.html',
|
|
|
|
reportName: 'Yetus Report'
|
|
|
|
])
|
|
|
|
|
|
|
|
// Publish JUnit results
|
|
|
|
try {
|
|
|
|
junit "${SOURCEDIR}/**/target/surefire-reports/*.xml"
|
|
|
|
} catch(e) {
|
|
|
|
echo 'junit processing: ' + e.toString()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
cleanup() {
|
|
|
|
script {
|
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
chmod u+x "${SOURCEDIR}/dev-support/jenkins.sh"
|
|
|
|
"${SOURCEDIR}/dev-support/jenkins.sh" cleanup_ci_proc
|
|
|
|
'''
|
|
|
|
}
|
|
|
|
}
|
2019-01-05 20:14:50 +00:00
|
|
|
}
|
|
|
|
}
|
2021-07-26 20:09:36 +00:00
|
|
|
}
|
2019-01-05 20:14:50 +00:00
|
|
|
|
2021-07-26 20:09:36 +00:00
|
|
|
post {
|
2019-01-05 20:14:50 +00:00
|
|
|
// Jenkins pipeline jobs fill slaves on PRs without this :(
|
|
|
|
cleanup() {
|
|
|
|
script {
|
2021-07-26 20:09:36 +00:00
|
|
|
sh '''#!/usr/bin/env bash
|
|
|
|
|
|
|
|
# See HADOOP-13951
|
|
|
|
chmod -R u+rxw "${WORKSPACE}"
|
|
|
|
'''
|
2019-01-05 20:14:50 +00:00
|
|
|
deleteDir()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-05-02 14:48:30 +00:00
|
|
|
}
|