2020-08-07 21:51:37 +00:00
|
|
|
// https://stackoverflow.com/questions/40760716/jenkins-abort-running-build-if-new-one-is-started
|
2021-01-11 18:51:38 +00:00
|
|
|
// We should only abort older jobs in PR branches, so we have a nice CI history in "stable",
|
|
|
|
// "testing", and "unstable".
|
|
|
|
if (env.BRANCH_NAME != "stable" && env.BRANCH_NAME != "testing" && env.BRANCH_NAME != "unstable") {
|
2020-09-25 10:26:59 +00:00
|
|
|
def buildNumber = env.BUILD_NUMBER as int
|
|
|
|
if (buildNumber > 1) {
|
|
|
|
milestone(buildNumber - 1)
|
|
|
|
}
|
|
|
|
milestone(buildNumber)
|
2020-08-07 21:51:37 +00:00
|
|
|
}
|
|
|
|
|
2022-02-04 13:43:40 +00:00
|
|
|
def runStages(nodeDir) {
|
|
|
|
sh "mkdir -p ${nodeDir}"
|
|
|
|
dir(nodeDir) {
|
|
|
|
try {
|
|
|
|
stage("Clone") {
|
|
|
|
/* source code checkout */
|
|
|
|
checkout scm
|
|
|
|
/* we need to update the submodules before caching kicks in */
|
|
|
|
sh "git submodule update --init --recursive"
|
|
|
|
}
|
|
|
|
|
|
|
|
cache(maxCacheSize: 250, caches: [
|
|
|
|
[$class: "ArbitraryFileCache", excludes: "", includes: "**/*", path: "${WORKSPACE}/${nodeDir}/vendor/nimbus-build-system/vendor/Nim/bin"],
|
|
|
|
[$class: "ArbitraryFileCache", excludes: "", includes: "**/*", path: "${WORKSPACE}/${nodeDir}/jsonTestsCache"]
|
|
|
|
]) {
|
|
|
|
stage("Build") {
|
|
|
|
sh """#!/bin/bash
|
|
|
|
set -e
|
|
|
|
# to allow the following parallel stages
|
|
|
|
make -j${env.NPROC} QUICK_AND_DIRTY_COMPILER=1 deps
|
|
|
|
./scripts/setup_scenarios.sh jsonTestsCache
|
|
|
|
"""
|
|
|
|
}
|
|
|
|
}
|
2019-12-03 14:02:38 +00:00
|
|
|
|
2022-02-04 13:43:40 +00:00
|
|
|
stage("Tools") {
|
2020-05-20 22:02:05 +00:00
|
|
|
sh """#!/bin/bash
|
|
|
|
set -e
|
2022-02-04 13:43:40 +00:00
|
|
|
make -j${env.NPROC}
|
|
|
|
make -j${env.NPROC} LOG_LEVEL=TRACE
|
2020-05-20 22:02:05 +00:00
|
|
|
"""
|
2019-11-11 21:43:40 +00:00
|
|
|
}
|
2019-12-03 14:02:38 +00:00
|
|
|
|
2022-02-04 13:43:40 +00:00
|
|
|
stage("Test suite") {
|
|
|
|
sh "make -j${env.NPROC} DISABLE_TEST_FIXTURES_SCRIPT=1 test"
|
|
|
|
}
|
2021-04-29 01:28:11 +00:00
|
|
|
|
2022-02-04 13:43:40 +00:00
|
|
|
stage("REST test suite") {
|
|
|
|
sh """#!/bin/bash
|
|
|
|
set -e
|
|
|
|
./tests/simulation/restapi.sh --data-dir resttest0_data --base-port \$(( 9100 + EXECUTOR_NUMBER * 100 )) \
|
|
|
|
--base-rest-port \$(( 7100 + EXECUTOR_NUMBER * 100 )) --base-metrics-port \
|
2022-01-27 12:03:38 +00:00
|
|
|
\$(( 8108 + EXECUTOR_NUMBER * 100 )) --resttest-delay 30 --kill-old-processes
|
2022-02-04 13:43:40 +00:00
|
|
|
"""
|
|
|
|
}
|
2021-07-26 19:55:24 +00:00
|
|
|
|
2022-02-04 13:43:40 +00:00
|
|
|
stage("Testnet finalization") {
|
|
|
|
// EXECUTOR_NUMBER will be 0 or 1, since we have 2 executors per Jenkins node
|
|
|
|
sh """#!/bin/bash
|
|
|
|
set -e
|
|
|
|
./scripts/launch_local_testnet.sh --preset minimal --nodes 4 --stop-at-epoch 5 --disable-htop --enable-logtrace \
|
2022-02-12 11:27:43 +00:00
|
|
|
--data-dir local_testnet0_data --base-port \$(( 9000 + EXECUTOR_NUMBER * 100 )) --base-rest-port \
|
2022-02-04 13:43:40 +00:00
|
|
|
\$(( 7000 + EXECUTOR_NUMBER * 100 )) --base-metrics-port \$(( 8008 + EXECUTOR_NUMBER * 100 )) --timeout 600 \
|
|
|
|
--kill-old-processes \
|
|
|
|
-- --verify-finalization --discv5:no
|
|
|
|
./scripts/launch_local_testnet.sh --nodes 4 --stop-at-epoch 5 --disable-htop --enable-logtrace \
|
2022-02-12 11:27:43 +00:00
|
|
|
--data-dir local_testnet1_data --base-port \$(( 9000 + EXECUTOR_NUMBER * 100 )) --base-rest-port \
|
2022-02-04 13:43:40 +00:00
|
|
|
\$(( 7000 + EXECUTOR_NUMBER * 100 )) --base-metrics-port \$(( 8008 + EXECUTOR_NUMBER * 100 )) --timeout 2400 \
|
|
|
|
--kill-old-processes \
|
|
|
|
-- --verify-finalization --discv5:no
|
|
|
|
"""
|
|
|
|
}
|
|
|
|
} catch(e) {
|
|
|
|
// we need to rethrow the exception here
|
|
|
|
throw e
|
|
|
|
} finally {
|
|
|
|
// archive testnet logs
|
2021-04-29 01:28:11 +00:00
|
|
|
sh """#!/bin/bash
|
2022-02-04 13:43:40 +00:00
|
|
|
for D in local_testnet0_data local_testnet1_data resttest0_data; do
|
|
|
|
[[ -d "\$D" ]] && tar cjf "\${D}-\${NODE_NAME}.tar.bz2" "\${D}"/*.txt || true
|
|
|
|
done
|
2021-04-29 01:28:11 +00:00
|
|
|
"""
|
2022-02-04 13:43:40 +00:00
|
|
|
try {
|
|
|
|
archiveArtifacts("*.tar.bz2")
|
|
|
|
} catch(e) {
|
|
|
|
println("Couldn't archive artefacts.")
|
|
|
|
println(e.toString());
|
|
|
|
// we don't need to re-raise it here; it might be a PR build being cancelled by a newer one
|
|
|
|
}
|
|
|
|
// clean the workspace
|
|
|
|
cleanWs(disableDeferredWipeout: true, deleteDirs: true)
|
2019-12-03 14:02:38 +00:00
|
|
|
}
|
2022-02-04 13:43:40 +00:00
|
|
|
} // dir(...)
|
2019-11-11 21:16:17 +00:00
|
|
|
}
|
|
|
|
|
2019-11-11 21:43:40 +00:00
|
|
|
parallel(
|
|
|
|
"Linux": {
|
2021-11-25 21:22:08 +00:00
|
|
|
throttle(['nimbus-eth2']) {
|
2022-01-13 02:05:38 +00:00
|
|
|
timeout(time: 4, unit: 'HOURS') {
|
2022-01-10 12:07:08 +00:00
|
|
|
node("linux") {
|
|
|
|
withEnv(["NPROC=${sh(returnStdout: true, script: 'nproc').trim()}"]) {
|
2022-02-04 13:43:40 +00:00
|
|
|
runStages("linux")
|
2022-01-10 12:07:08 +00:00
|
|
|
}
|
2021-11-25 21:22:08 +00:00
|
|
|
}
|
2019-11-11 21:43:40 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
2022-02-04 13:43:40 +00:00
|
|
|
"macOS (AMD64)": {
|
2021-11-25 21:22:08 +00:00
|
|
|
throttle(['nimbus-eth2']) {
|
2022-01-13 02:05:38 +00:00
|
|
|
timeout(time: 4, unit: 'HOURS') {
|
2022-01-10 12:07:08 +00:00
|
|
|
node("macos && x86_64") {
|
|
|
|
withEnv(["NPROC=${sh(returnStdout: true, script: 'sysctl -n hw.logicalcpu').trim()}"]) {
|
2022-02-04 13:43:40 +00:00
|
|
|
runStages("macos_amd64")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
},
|
|
|
|
"macOS (ARM64)": {
|
|
|
|
throttle(['nimbus-eth2']) {
|
|
|
|
timeout(time: 4, unit: 'HOURS') {
|
|
|
|
node("macos && arm64") {
|
|
|
|
withEnv(["NPROC=${sh(returnStdout: true, script: 'sysctl -n hw.logicalcpu').trim()}"]) {
|
|
|
|
runStages("macos_arm64")
|
2022-01-10 12:07:08 +00:00
|
|
|
}
|
2021-11-25 21:22:08 +00:00
|
|
|
}
|
2019-11-11 21:43:40 +00:00
|
|
|
}
|
|
|
|
}
|
2020-08-26 15:31:46 +00:00
|
|
|
},
|
2019-11-11 21:43:40 +00:00
|
|
|
)
|