mirror of
https://github.com/asterisk/asterisk.git
synced 2025-09-04 03:50:31 +00:00
Some tests require Asterisk to execute scripts which are stored in /tmp. When mount is used for tmpfs there is no ability to allow scripts to be executed from that location. This change switches to using tmpfs which can be told to allow executables to be run from /tmp. Change-Id: I0e598ca2b76af1f7f2d29f0da7b1731a214a291a
169 lines
5.6 KiB
Plaintext
169 lines
5.6 KiB
Plaintext
/*
|
|
* This pipeline is the "template" for the Asterisk Periodic Tests multi-branch
|
|
* parent job. Jenkins will automatically scan the branches in the "asterisk"
|
|
* or "Security-asterisk" projects in Gerrit and automatically create a branch-
|
|
* specific job for each branch it finds this file in.
|
|
*
|
|
* This file starts as a declarative pipeline because with a declarative
|
|
* pipeline, you can define the trigger in the pipeline file. This keeps
|
|
* everything in one place. We transition to scripted pipeline later on because
|
|
* we need to dynamically determine which docker image we're going to use and
|
|
* you can't do that in a delcarative pipeline.
|
|
*/
|
|
def timeoutTime = 3
|
|
def timeoutUnits = 'HOURS'
|
|
if (env.TIMEOUT_DAILIES) {
|
|
def _timeout = env.TIMEOUT_DAILIES.split()
|
|
timeoutTime = _timeout[0].toInteger()
|
|
timeoutUnits = _timeout[1]
|
|
}
|
|
|
|
pipeline {
|
|
options {
|
|
timestamps()
|
|
timeout(time: timeoutTime, unit: timeoutUnits)
|
|
}
|
|
triggers {
|
|
cron 'H H(0-4) * * *'
|
|
}
|
|
|
|
agent {
|
|
/* All of the stages need to be performed on a docker host */
|
|
label "swdev-docker"
|
|
}
|
|
|
|
stages {
|
|
stage ("->") {
|
|
steps {
|
|
/* Here's where we switch to scripted pipeline */
|
|
script {
|
|
manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Host: ${NODE_NAME}", false)
|
|
|
|
stage ("Checkout") {
|
|
sh "sudo chown -R jenkins:users ."
|
|
sh "printenv | sort"
|
|
sh "sudo tests/CI/setupJenkinsEnvironment.sh"
|
|
}
|
|
|
|
def images = env.DOCKER_IMAGES.split(' ')
|
|
def r = currentBuild.startTimeInMillis % images.length
|
|
def ri = images[(int)r]
|
|
def randomImage = env.DOCKER_REGISTRY + "/" + ri
|
|
def dockerOptions = "--privileged --ulimit core=0 --ulimit nofile=10240 " +
|
|
" --tmpfs /tmp:exec,size=1G -v /srv/jenkins:/srv/jenkins:rw -v /srv/cache:/srv/cache:rw " +
|
|
" --entrypoint=''"
|
|
def bt = env.BUILD_TAG.replaceAll(/[^a-zA-Z0-9_.-]/, '-')
|
|
def outputdir = "tests/CI/output/Testsuite"
|
|
|
|
manager.createSummary("/plugin/workflow-job/images/48x48/pipelinejob.png").appendText("Docker Image: ${randomImage}", false)
|
|
def img = docker.image(randomImage)
|
|
img.pull()
|
|
|
|
img.inside(dockerOptions + " --name ${bt}-build") {
|
|
stage ("Build") {
|
|
echo 'Building..'
|
|
env.CCACHE_DIR = "/srv/cache/ccache"
|
|
sh "./tests/CI/buildAsterisk.sh --branch-name=${BRANCH_NAME} --output-dir=${outputdir} --cache-dir=/srv/cache"
|
|
|
|
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: false,
|
|
artifacts: "${outputdir}/*"
|
|
}
|
|
stage ("Docs") {
|
|
|
|
sh "sudo ./tests/CI/installAsterisk.sh --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
|
|
|
|
def docUrl = env.GIT_URL.replaceAll(/\/[^\/]+$/, "/publish-docs")
|
|
checkout scm: [$class: 'GitSCM',
|
|
branches: [[name: "master"]],
|
|
extensions: [
|
|
[$class: 'RelativeTargetDirectory', relativeTargetDir: "tests/CI/output/publish-docs"],
|
|
[$class: 'CloneOption',
|
|
noTags: true,
|
|
depth: 10,
|
|
honorRefspec: true,
|
|
shallow: true
|
|
],
|
|
],
|
|
userRemoteConfigs: [[url: docUrl]]
|
|
]
|
|
|
|
sh "./tests/CI/publishAsteriskDocs.sh --user-group=jenkins:users --branch-name=${BRANCH_NAME} --wiki-doc-branch-regex=\"${WIKI_DOC_BRANCH_REGEX}\""
|
|
}
|
|
}
|
|
|
|
def testGroups = readJSON file: "tests/CI/periodic-dailyTestGroups.json"
|
|
def parallelTasks = [ : ]
|
|
|
|
for (def testGroup in testGroups) {
|
|
/*
|
|
* Because each task is a Groovy closure, we need to
|
|
* keep local references to some variables.
|
|
*/
|
|
def groupName = testGroup.name
|
|
def groupDir = testGroup.dir
|
|
def groupTestcmd = testGroup.testcmd
|
|
def groupRunTestsuiteOptions = testGroup.runTestsuiteOptions
|
|
def testsuiteUrl = env.GIT_URL.replaceAll(/\/[^\/]+$/, "/testsuite")
|
|
|
|
parallelTasks[groupName] = {
|
|
stage (groupName) {
|
|
|
|
img.inside("${dockerOptions} --name ${bt}-${groupName}") {
|
|
|
|
lock("${JOB_NAME}.${NODE_NAME}.installer") {
|
|
sh "sudo ./tests/CI/installAsterisk.sh --uninstall-all --branch-name=${BRANCH_NAME} --user-group=jenkins:users"
|
|
}
|
|
|
|
sh "sudo rm -rf ${groupDir} || : "
|
|
|
|
checkout scm: [$class: 'GitSCM',
|
|
branches: [[name: "${BRANCH_NAME}"]],
|
|
extensions: [
|
|
[$class: 'RelativeTargetDirectory', relativeTargetDir: groupDir],
|
|
[$class: 'CloneOption',
|
|
noTags: true,
|
|
depth: 10,
|
|
honorRefspec: true,
|
|
shallow: true
|
|
],
|
|
],
|
|
userRemoteConfigs: [[url: testsuiteUrl]]
|
|
]
|
|
|
|
sh "sudo tests/CI/runTestsuite.sh ${groupRunTestsuiteOptions} --testsuite-dir='${groupDir}' --testsuite-command='${groupTestcmd}'"
|
|
|
|
archiveArtifacts allowEmptyArchive: true, defaultExcludes: false, fingerprint: true,
|
|
artifacts: "${groupDir}/asterisk-test-suite-report.xml, ${groupDir}/logs/**, ${groupDir}/core*.txt"
|
|
|
|
junit testResults: "${groupDir}/asterisk-test-suite-report.xml",
|
|
healthScaleFactor: 1.0,
|
|
keepLongStdio: true
|
|
|
|
echo "Group result d: ${currentBuild.currentResult}"
|
|
}
|
|
echo "Group result s: ${currentBuild.currentResult}"
|
|
}
|
|
}
|
|
}
|
|
parallel parallelTasks
|
|
}
|
|
}
|
|
}
|
|
}
|
|
post {
|
|
cleanup {
|
|
sh "sudo make distclean >/dev/null 2>&1 || : "
|
|
sh "sudo rm -rf tests/CI/output >/dev/null 2>&1 || : "
|
|
}
|
|
success {
|
|
echo "Reporting ${currentBuild.currentResult} Passed"
|
|
}
|
|
failure {
|
|
echo "Reporting ${currentBuild.currentResult}: Failed: Fatal Error"
|
|
}
|
|
unstable {
|
|
echo "Reporting ${currentBuild.currentResult}: Failed: Tests Failed"
|
|
}
|
|
}
|
|
}
|