diff --git a/.ci/Jenkinsfile_coverage b/.ci/Jenkinsfile_coverage
index fa1e141be93ea..6b8dc31bab34e 100644
--- a/.ci/Jenkinsfile_coverage
+++ b/.ci/Jenkinsfile_coverage
@@ -3,99 +3,91 @@
library 'kibana-pipeline-library'
kibanaLibrary.load() // load from the Jenkins instance
-stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a little bit
- timeout(time: 180, unit: 'MINUTES') {
- timestamps {
- ansiColor('xterm') {
- catchError {
+kibanaPipeline(timeoutMinutes: 180) {
+ catchErrors {
+ withEnv([
+ 'CODE_COVERAGE=1', // Needed for multiple ci scripts, such as remote.ts, test/scripts/*.sh, schema.js, etc.
+ ]) {
+ parallel([
+ 'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'),
+ 'x-pack-intake-agent': {
withEnv([
- 'CODE_COVERAGE=1', // Needed for multiple ci scripts, such as remote.ts, test/scripts/*.sh, schema.js, etc.
+ 'NODE_ENV=test' // Needed for jest tests only
]) {
- parallel([
- 'kibana-intake-agent': {
- kibanaPipeline.intakeWorker('kibana-intake', './test/scripts/jenkins_unit.sh')()
- },
- 'x-pack-intake-agent': {
- withEnv([
- 'NODE_ENV=test' // Needed for jest tests only
- ]) {
- kibanaPipeline.intakeWorker('x-pack-intake', './test/scripts/jenkins_xpack.sh')()
- }
- },
- 'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
- 'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
- 'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
- 'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
- 'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
- 'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
- 'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
- 'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
- 'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
- 'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
- 'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
- 'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
- 'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
- ]),
- 'kibana-xpack-agent': kibanaPipeline.withWorkers('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
- 'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
- 'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
- 'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
- 'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
- 'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
- 'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
- 'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7),
- 'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8),
- 'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9),
- 'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10),
- ]),
- ])
- kibanaPipeline.jobRunner('tests-l', false) {
- kibanaPipeline.downloadCoverageArtifacts()
- kibanaPipeline.bash(
- '''
- # bootstrap from x-pack folder
- source src/dev/ci_setup/setup_env.sh
- cd x-pack
- yarn kbn bootstrap --prefer-offline
- cd ..
- # extract archives
- mkdir -p /tmp/extracted_coverage
- echo extracting intakes
- tar -xzf /tmp/downloaded_coverage/coverage/kibana-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
- tar -xzf /tmp/downloaded_coverage/coverage/x-pack-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
- echo extracting kibana-oss-tests
- tar -xzf /tmp/downloaded_coverage/coverage/kibana-oss-tests/kibana-coverage.tar.gz -C /tmp/extracted_coverage
- echo extracting kibana-xpack-tests
- tar -xzf /tmp/downloaded_coverage/coverage/kibana-xpack-tests/kibana-coverage.tar.gz -C /tmp/extracted_coverage
- # replace path in json files to have valid html report
- pwd=$(pwd)
- du -sh /tmp/extracted_coverage/target/kibana-coverage/
- echo replacing path in json files
- for i in {1..9}; do
- sed -i "s|/dev/shm/workspace/kibana|$pwd|g" /tmp/extracted_coverage/target/kibana-coverage/functional/${i}*.json &
- done
- wait
- # merge oss & x-pack reports
- echo merging coverage reports
- yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/jest --report-dir target/kibana-coverage/jest-combined --reporter=html --reporter=json-summary
- yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/functional --report-dir target/kibana-coverage/functional-combined --reporter=html --reporter=json-summary
- echo copy mocha reports
- mkdir -p target/kibana-coverage/mocha-combined
- cp -r /tmp/extracted_coverage/target/kibana-coverage/mocha target/kibana-coverage/mocha-combined
- ''',
- "run `yarn kbn bootstrap && merge coverage`"
- )
- sh 'tar -czf kibana-jest-coverage.tar.gz target/kibana-coverage/jest-combined/*'
- kibanaPipeline.uploadCoverageArtifacts("coverage/jest-combined", 'kibana-jest-coverage.tar.gz')
- sh 'tar -czf kibana-functional-coverage.tar.gz target/kibana-coverage/functional-combined/*'
- kibanaPipeline.uploadCoverageArtifacts("coverage/functional-combined", 'kibana-functional-coverage.tar.gz')
- sh 'tar -czf kibana-mocha-coverage.tar.gz target/kibana-coverage/mocha-combined/*'
- kibanaPipeline.uploadCoverageArtifacts("coverage/mocha-combined", 'kibana-mocha-coverage.tar.gz')
- }
+ workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh')()
}
- }
- kibanaPipeline.sendMail()
+ },
+ 'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
+ 'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1),
+ 'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2),
+ 'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3),
+ 'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4),
+ 'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5),
+ 'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6),
+ 'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7),
+ 'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8),
+ 'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9),
+ 'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10),
+ 'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11),
+ 'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12),
+ ]),
+ 'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
+ 'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1),
+ 'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2),
+ 'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3),
+ 'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4),
+ 'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5),
+ 'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6),
+ 'xpack-ciGroup7': kibanaPipeline.xpackCiGroupProcess(7),
+ 'xpack-ciGroup8': kibanaPipeline.xpackCiGroupProcess(8),
+ 'xpack-ciGroup9': kibanaPipeline.xpackCiGroupProcess(9),
+ 'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10),
+ ]),
+ ])
+ workers.base(name: 'coverage-worker', label: 'tests-l', ramDisk: false, bootstrapped: false) {
+ kibanaPipeline.downloadCoverageArtifacts()
+ kibanaPipeline.bash(
+ '''
+ # bootstrap from x-pack folder
+ source src/dev/ci_setup/setup_env.sh
+ cd x-pack
+ yarn kbn bootstrap --prefer-offline
+ cd ..
+ # extract archives
+ mkdir -p /tmp/extracted_coverage
+ echo extracting intakes
+ tar -xzf /tmp/downloaded_coverage/coverage/kibana-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
+ tar -xzf /tmp/downloaded_coverage/coverage/x-pack-intake/kibana-coverage.tar.gz -C /tmp/extracted_coverage
+ echo extracting kibana-oss-tests
+ tar -xzf /tmp/downloaded_coverage/coverage/kibana-oss-tests/kibana-coverage.tar.gz -C /tmp/extracted_coverage
+ echo extracting kibana-xpack-tests
+ tar -xzf /tmp/downloaded_coverage/coverage/kibana-xpack-tests/kibana-coverage.tar.gz -C /tmp/extracted_coverage
+ # replace path in json files to have valid html report
+ pwd=$(pwd)
+ du -sh /tmp/extracted_coverage/target/kibana-coverage/
+ echo replacing path in json files
+ for i in {1..9}; do
+ sed -i "s|/dev/shm/workspace/kibana|$pwd|g" /tmp/extracted_coverage/target/kibana-coverage/functional/${i}*.json &
+ done
+ wait
+ # merge oss & x-pack reports
+ echo merging coverage reports
+ yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/jest --report-dir target/kibana-coverage/jest-combined --reporter=html --reporter=json-summary
+ yarn nyc report --temp-dir /tmp/extracted_coverage/target/kibana-coverage/functional --report-dir target/kibana-coverage/functional-combined --reporter=html --reporter=json-summary
+ echo copy mocha reports
+ mkdir -p target/kibana-coverage/mocha-combined
+ cp -r /tmp/extracted_coverage/target/kibana-coverage/mocha target/kibana-coverage/mocha-combined
+ ''',
+ "run `yarn kbn bootstrap && merge coverage`"
+ )
+ sh 'tar -czf kibana-jest-coverage.tar.gz target/kibana-coverage/jest-combined/*'
+ kibanaPipeline.uploadCoverageArtifacts("coverage/jest-combined", 'kibana-jest-coverage.tar.gz')
+ sh 'tar -czf kibana-functional-coverage.tar.gz target/kibana-coverage/functional-combined/*'
+ kibanaPipeline.uploadCoverageArtifacts("coverage/functional-combined", 'kibana-functional-coverage.tar.gz')
+ sh 'tar -czf kibana-mocha-coverage.tar.gz target/kibana-coverage/mocha-combined/*'
+ kibanaPipeline.uploadCoverageArtifacts("coverage/mocha-combined", 'kibana-mocha-coverage.tar.gz')
}
}
}
+ kibanaPipeline.sendMail()
}
diff --git a/.ci/Jenkinsfile_flaky b/.ci/Jenkinsfile_flaky
index f702405aad69e..befb8d259b5b6 100644
--- a/.ci/Jenkinsfile_flaky
+++ b/.ci/Jenkinsfile_flaky
@@ -21,53 +21,47 @@ def workerFailures = []
currentBuild.displayName += trunc(" ${params.GITHUB_OWNER}:${params.branch_specifier}", 24)
currentBuild.description = "${params.CI_GROUP}
Agents: ${AGENT_COUNT}
Executions: ${params.NUMBER_EXECUTIONS}"
-stage("Kibana Pipeline") {
- timeout(time: 180, unit: 'MINUTES') {
- timestamps {
- ansiColor('xterm') {
- def agents = [:]
- for(def agentNumber = 1; agentNumber <= AGENT_COUNT; agentNumber++) {
- def agentNumberInside = agentNumber
- def agentExecutions = floor(EXECUTIONS/AGENT_COUNT) + (agentNumber <= EXECUTIONS%AGENT_COUNT ? 1 : 0)
- agents["agent-${agentNumber}"] = {
- catchError {
- print "Agent ${agentNumberInside} - ${agentExecutions} executions"
-
- kibanaPipeline.withWorkers('flaky-test-runner', {
- if (NEED_BUILD) {
- if (!IS_XPACK) {
- kibanaPipeline.buildOss()
- if (CI_GROUP == '1') {
- runbld("./test/scripts/jenkins_build_kbn_tp_sample_panel_action.sh", "Build kbn tp sample panel action for ciGroup1")
- }
- } else {
- kibanaPipeline.buildXpack()
- }
- }
- }, getWorkerMap(agentNumberInside, agentExecutions, worker, workerFailures))()
+kibanaPipeline(timeoutMinutes: 180) {
+ def agents = [:]
+ for(def agentNumber = 1; agentNumber <= AGENT_COUNT; agentNumber++) {
+ def agentNumberInside = agentNumber
+ def agentExecutions = floor(EXECUTIONS/AGENT_COUNT) + (agentNumber <= EXECUTIONS%AGENT_COUNT ? 1 : 0)
+ agents["agent-${agentNumber}"] = {
+ catchErrors {
+ print "Agent ${agentNumberInside} - ${agentExecutions} executions"
+
+ workers.functional('flaky-test-runner', {
+ if (NEED_BUILD) {
+ if (!IS_XPACK) {
+ kibanaPipeline.buildOss()
+ if (CI_GROUP == '1') {
+ runbld("./test/scripts/jenkins_build_kbn_tp_sample_panel_action.sh", "Build kbn tp sample panel action for ciGroup1")
+ }
+ } else {
+ kibanaPipeline.buildXpack()
}
}
- }
+ }, getWorkerMap(agentNumberInside, agentExecutions, worker, workerFailures))()
+ }
+ }
+ }
- parallel(agents)
+ parallel(agents)
- currentBuild.description += ", Failures: ${workerFailures.size()}"
+ currentBuild.description += ", Failures: ${workerFailures.size()}"
- if (workerFailures.size() > 0) {
- print "There were ${workerFailures.size()} test suite failures."
- print "The executions that failed were:"
- print workerFailures.join("\n")
- print "Please check 'Test Result' and 'Pipeline Steps' pages for more info"
- }
- }
- }
+ if (workerFailures.size() > 0) {
+ print "There were ${workerFailures.size()} test suite failures."
+ print "The executions that failed were:"
+ print workerFailures.join("\n")
+ print "Please check 'Test Result' and 'Pipeline Steps' pages for more info"
}
}
def getWorkerFromParams(isXpack, job, ciGroup) {
if (!isXpack) {
if (job == 'serverMocha') {
- return kibanaPipeline.getPostBuildWorker('serverMocha', {
+ return kibanaPipeline.functionalTestProcess('serverMocha', {
kibanaPipeline.bash(
"""
source src/dev/ci_setup/setup_env.sh
@@ -77,20 +71,20 @@ def getWorkerFromParams(isXpack, job, ciGroup) {
)
})
} else if (job == 'firefoxSmoke') {
- return kibanaPipeline.getPostBuildWorker('firefoxSmoke', { runbld('./test/scripts/jenkins_firefox_smoke.sh', 'Execute kibana-firefoxSmoke') })
+ return kibanaPipeline.functionalTestProcess('firefoxSmoke', './test/scripts/jenkins_firefox_smoke.sh')
} else if(job == 'visualRegression') {
- return kibanaPipeline.getPostBuildWorker('visualRegression', { runbld('./test/scripts/jenkins_visual_regression.sh', 'Execute kibana-visualRegression') })
+ return kibanaPipeline.functionalTestProcess('visualRegression', './test/scripts/jenkins_visual_regression.sh')
} else {
- return kibanaPipeline.getOssCiGroupWorker(ciGroup)
+ return kibanaPipeline.ossCiGroupProcess(ciGroup)
}
}
if (job == 'firefoxSmoke') {
- return kibanaPipeline.getPostBuildWorker('xpack-firefoxSmoke', { runbld('./test/scripts/jenkins_xpack_firefox_smoke.sh', 'Execute xpack-firefoxSmoke') })
+ return kibanaPipeline.functionalTestProcess('xpack-firefoxSmoke', './test/scripts/jenkins_xpack_firefox_smoke.sh')
} else if(job == 'visualRegression') {
- return kibanaPipeline.getPostBuildWorker('xpack-visualRegression', { runbld('./test/scripts/jenkins_xpack_visual_regression.sh', 'Execute xpack-visualRegression') })
+ return kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')
} else {
- return kibanaPipeline.getXpackCiGroupWorker(ciGroup)
+ return kibanaPipeline.xpackCiGroupProcess(ciGroup)
}
}
@@ -105,10 +99,9 @@ def getWorkerMap(agentNumber, numberOfExecutions, worker, workerFailures, maxWor
for(def j = 0; j < workerExecutions; j++) {
print "Execute agent-${agentNumber} worker-${workerNumber}: ${j}"
withEnv([
- "JOB=agent-${agentNumber}-worker-${workerNumber}-${j}",
"REMOVE_KIBANA_INSTALL_DIR=1",
]) {
- catchError {
+ catchErrors {
try {
worker(workerNumber)
} catch (ex) {
diff --git a/.ci/es-snapshots/Jenkinsfile_build_es b/.ci/es-snapshots/Jenkinsfile_build_es
index ad0ad54275e12..a00bcb3bbc946 100644
--- a/.ci/es-snapshots/Jenkinsfile_build_es
+++ b/.ci/es-snapshots/Jenkinsfile_build_es
@@ -26,7 +26,7 @@ timeout(time: 120, unit: 'MINUTES') {
timestamps {
ansiColor('xterm') {
node('linux && immutable') {
- catchError {
+ catchErrors {
def VERSION
def SNAPSHOT_ID
def DESTINATION
diff --git a/.ci/es-snapshots/Jenkinsfile_verify_es b/.ci/es-snapshots/Jenkinsfile_verify_es
index 30d52a56547bd..ce472a404c053 100644
--- a/.ci/es-snapshots/Jenkinsfile_verify_es
+++ b/.ci/es-snapshots/Jenkinsfile_verify_es
@@ -19,50 +19,45 @@ currentBuild.description = "ES: ${SNAPSHOT_VERSION}
Kibana: ${params.branch
def SNAPSHOT_MANIFEST = "https://storage.googleapis.com/kibana-ci-es-snapshots-daily/${SNAPSHOT_VERSION}/archives/${SNAPSHOT_ID}/manifest.json"
-timeout(time: 120, unit: 'MINUTES') {
- timestamps {
- ansiColor('xterm') {
- catchError {
- withEnv(["ES_SNAPSHOT_MANIFEST=${SNAPSHOT_MANIFEST}"]) {
- parallel([
- // TODO we just need to run integration tests from intake?
- 'kibana-intake-agent': kibanaPipeline.intakeWorker('kibana-intake', './test/scripts/jenkins_unit.sh'),
- 'x-pack-intake-agent': kibanaPipeline.intakeWorker('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
- 'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
- 'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
- 'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
- 'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
- 'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
- 'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
- 'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
- 'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
- 'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
- 'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
- 'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
- 'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
- 'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
- ]),
- 'kibana-xpack-agent': kibanaPipeline.withWorkers('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
- 'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
- 'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
- 'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
- 'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
- 'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
- 'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
- 'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7),
- 'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8),
- 'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9),
- 'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10),
- ]),
- ])
- }
-
- promoteSnapshot(SNAPSHOT_VERSION, SNAPSHOT_ID)
- }
-
- kibanaPipeline.sendMail()
+kibanaPipeline(timeoutMinutes: 120) {
+ catchErrors {
+ withEnv(["ES_SNAPSHOT_MANIFEST=${SNAPSHOT_MANIFEST}"]) {
+ parallel([
+ 'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'),
+ 'x-pack-intake-agent': workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
+ 'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
+ 'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1),
+ 'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2),
+ 'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3),
+ 'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4),
+ 'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5),
+ 'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6),
+ 'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7),
+ 'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8),
+ 'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9),
+ 'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10),
+ 'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11),
+ 'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12),
+ ]),
+ 'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
+ 'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1),
+ 'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2),
+ 'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3),
+ 'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4),
+ 'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5),
+ 'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6),
+ 'xpack-ciGroup7': kibanaPipeline.xpackCiGroupProcess(7),
+ 'xpack-ciGroup8': kibanaPipeline.xpackCiGroupProcess(8),
+ 'xpack-ciGroup9': kibanaPipeline.xpackCiGroupProcess(9),
+ 'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10),
+ ]),
+ ])
}
+
+ promoteSnapshot(SNAPSHOT_VERSION, SNAPSHOT_ID)
}
+
+ kibanaPipeline.sendMail()
}
def promoteSnapshot(snapshotVersion, snapshotId) {
diff --git a/Jenkinsfile b/Jenkinsfile
index 1b4350d5b91e9..85502369b07be 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -3,71 +3,49 @@
library 'kibana-pipeline-library'
kibanaLibrary.load()
-stage("Kibana Pipeline") { // This stage is just here to help the BlueOcean UI a little bit
- timeout(time: 135, unit: 'MINUTES') {
- timestamps {
- ansiColor('xterm') {
- githubPr.withDefaultPrComments {
- catchError {
- retryable.enable()
- parallel([
- 'kibana-intake-agent': kibanaPipeline.intakeWorker('kibana-intake', './test/scripts/jenkins_unit.sh'),
- 'x-pack-intake-agent': kibanaPipeline.intakeWorker('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
- 'kibana-oss-agent': kibanaPipeline.withWorkers('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
- // 'oss-firefoxSmoke': kibanaPipeline.getPostBuildWorker('firefoxSmoke', {
- // retryable('kibana-firefoxSmoke') {
- // runbld('./test/scripts/jenkins_firefox_smoke.sh', 'Execute kibana-firefoxSmoke')
- // }
- // }),
- 'oss-ciGroup1': kibanaPipeline.getOssCiGroupWorker(1),
- 'oss-ciGroup2': kibanaPipeline.getOssCiGroupWorker(2),
- 'oss-ciGroup3': kibanaPipeline.getOssCiGroupWorker(3),
- 'oss-ciGroup4': kibanaPipeline.getOssCiGroupWorker(4),
- 'oss-ciGroup5': kibanaPipeline.getOssCiGroupWorker(5),
- 'oss-ciGroup6': kibanaPipeline.getOssCiGroupWorker(6),
- 'oss-ciGroup7': kibanaPipeline.getOssCiGroupWorker(7),
- 'oss-ciGroup8': kibanaPipeline.getOssCiGroupWorker(8),
- 'oss-ciGroup9': kibanaPipeline.getOssCiGroupWorker(9),
- 'oss-ciGroup10': kibanaPipeline.getOssCiGroupWorker(10),
- 'oss-ciGroup11': kibanaPipeline.getOssCiGroupWorker(11),
- 'oss-ciGroup12': kibanaPipeline.getOssCiGroupWorker(12),
- 'oss-accessibility': kibanaPipeline.getPostBuildWorker('accessibility', {
- retryable('kibana-accessibility') {
- runbld('./test/scripts/jenkins_accessibility.sh', 'Execute kibana-accessibility')
- }
- }),
- // 'oss-visualRegression': kibanaPipeline.getPostBuildWorker('visualRegression', { runbld('./test/scripts/jenkins_visual_regression.sh', 'Execute kibana-visualRegression') }),
- ]),
- 'kibana-xpack-agent': kibanaPipeline.withWorkers('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
- // 'xpack-firefoxSmoke': kibanaPipeline.getPostBuildWorker('xpack-firefoxSmoke', {
- // retryable('xpack-firefoxSmoke') {
- // runbld('./test/scripts/jenkins_xpack_firefox_smoke.sh', 'Execute xpack-firefoxSmoke')
- // }
- // }),
- 'xpack-ciGroup1': kibanaPipeline.getXpackCiGroupWorker(1),
- 'xpack-ciGroup2': kibanaPipeline.getXpackCiGroupWorker(2),
- 'xpack-ciGroup3': kibanaPipeline.getXpackCiGroupWorker(3),
- 'xpack-ciGroup4': kibanaPipeline.getXpackCiGroupWorker(4),
- 'xpack-ciGroup5': kibanaPipeline.getXpackCiGroupWorker(5),
- 'xpack-ciGroup6': kibanaPipeline.getXpackCiGroupWorker(6),
- 'xpack-ciGroup7': kibanaPipeline.getXpackCiGroupWorker(7),
- 'xpack-ciGroup8': kibanaPipeline.getXpackCiGroupWorker(8),
- 'xpack-ciGroup9': kibanaPipeline.getXpackCiGroupWorker(9),
- 'xpack-ciGroup10': kibanaPipeline.getXpackCiGroupWorker(10),
- 'xpack-accessibility': kibanaPipeline.getPostBuildWorker('xpack-accessibility', {
- retryable('xpack-accessibility') {
- runbld('./test/scripts/jenkins_xpack_accessibility.sh', 'Execute xpack-accessibility')
- }
- }),
- // 'xpack-visualRegression': kibanaPipeline.getPostBuildWorker('xpack-visualRegression', { runbld('./test/scripts/jenkins_xpack_visual_regression.sh', 'Execute xpack-visualRegression') }),
- ]),
- ])
- }
- }
-
- retryable.printFlakyFailures()
- kibanaPipeline.sendMail()
- }
+kibanaPipeline(timeoutMinutes: 135) {
+ githubPr.withDefaultPrComments {
+ catchError {
+ retryable.enable()
+ parallel([
+ 'kibana-intake-agent': workers.intake('kibana-intake', './test/scripts/jenkins_unit.sh'),
+ 'x-pack-intake-agent': workers.intake('x-pack-intake', './test/scripts/jenkins_xpack.sh'),
+ 'kibana-oss-agent': workers.functional('kibana-oss-tests', { kibanaPipeline.buildOss() }, [
+ // 'oss-firefoxSmoke': kibanaPipeline.functionalTestProcess('kibana-firefoxSmoke', './test/scripts/jenkins_firefox_smoke.sh'),
+ 'oss-ciGroup1': kibanaPipeline.ossCiGroupProcess(1),
+ 'oss-ciGroup2': kibanaPipeline.ossCiGroupProcess(2),
+ 'oss-ciGroup3': kibanaPipeline.ossCiGroupProcess(3),
+ 'oss-ciGroup4': kibanaPipeline.ossCiGroupProcess(4),
+ 'oss-ciGroup5': kibanaPipeline.ossCiGroupProcess(5),
+ 'oss-ciGroup6': kibanaPipeline.ossCiGroupProcess(6),
+ 'oss-ciGroup7': kibanaPipeline.ossCiGroupProcess(7),
+ 'oss-ciGroup8': kibanaPipeline.ossCiGroupProcess(8),
+ 'oss-ciGroup9': kibanaPipeline.ossCiGroupProcess(9),
+ 'oss-ciGroup10': kibanaPipeline.ossCiGroupProcess(10),
+ 'oss-ciGroup11': kibanaPipeline.ossCiGroupProcess(11),
+ 'oss-ciGroup12': kibanaPipeline.ossCiGroupProcess(12),
+ 'oss-accessibility': kibanaPipeline.functionalTestProcess('kibana-accessibility', './test/scripts/jenkins_accessibility.sh'),
+ // 'oss-visualRegression': kibanaPipeline.functionalTestProcess('visualRegression', './test/scripts/jenkins_visual_regression.sh'),
+ ]),
+ 'kibana-xpack-agent': workers.functional('kibana-xpack-tests', { kibanaPipeline.buildXpack() }, [
+ // 'xpack-firefoxSmoke': kibanaPipeline.functionalTestProcess('xpack-firefoxSmoke', './test/scripts/jenkins_xpack_firefox_smoke.sh'),
+ 'xpack-ciGroup1': kibanaPipeline.xpackCiGroupProcess(1),
+ 'xpack-ciGroup2': kibanaPipeline.xpackCiGroupProcess(2),
+ 'xpack-ciGroup3': kibanaPipeline.xpackCiGroupProcess(3),
+ 'xpack-ciGroup4': kibanaPipeline.xpackCiGroupProcess(4),
+ 'xpack-ciGroup5': kibanaPipeline.xpackCiGroupProcess(5),
+ 'xpack-ciGroup6': kibanaPipeline.xpackCiGroupProcess(6),
+ 'xpack-ciGroup7': kibanaPipeline.xpackCiGroupProcess(7),
+ 'xpack-ciGroup8': kibanaPipeline.xpackCiGroupProcess(8),
+ 'xpack-ciGroup9': kibanaPipeline.xpackCiGroupProcess(9),
+ 'xpack-ciGroup10': kibanaPipeline.xpackCiGroupProcess(10),
+ 'xpack-accessibility': kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh'),
+ // 'xpack-visualRegression': kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh'),
+ ]),
+ ])
}
}
+
+ retryable.printFlakyFailures()
+ kibanaPipeline.sendMail()
}
diff --git a/packages/kbn-test/src/junit_report_path.ts b/packages/kbn-test/src/junit_report_path.ts
index 11eaf3d2b14a5..d46c9455dcff0 100644
--- a/packages/kbn-test/src/junit_report_path.ts
+++ b/packages/kbn-test/src/junit_report_path.ts
@@ -20,7 +20,9 @@
import { resolve } from 'path';
const job = process.env.JOB ? `job-${process.env.JOB}-` : '';
-const num = process.env.CI_WORKER_NUMBER ? `worker-${process.env.CI_WORKER_NUMBER}-` : '';
+const num = process.env.CI_PARALLEL_PROCESS_NUMBER
+ ? `worker-${process.env.CI_PARALLEL_PROCESS_NUMBER}-`
+ : '';
export function makeJunitReportPath(rootDirectory: string, reportName: string) {
return resolve(
diff --git a/test/scripts/jenkins_test_setup_oss.sh b/test/scripts/jenkins_test_setup_oss.sh
index 9e68272053221..7bbb867526384 100644
--- a/test/scripts/jenkins_test_setup_oss.sh
+++ b/test/scripts/jenkins_test_setup_oss.sh
@@ -4,7 +4,7 @@ source test/scripts/jenkins_test_setup.sh
if [[ -z "$CODE_COVERAGE" ]] ; then
installDir="$(realpath $PARENT_DIR/kibana/build/oss/kibana-*-SNAPSHOT-linux-x86_64)"
- destDir=${installDir}-${CI_WORKER_NUMBER}
+ destDir=${installDir}-${CI_PARALLEL_PROCESS_NUMBER}
cp -R "$installDir" "$destDir"
export KIBANA_INSTALL_DIR="$destDir"
diff --git a/test/scripts/jenkins_test_setup_xpack.sh b/test/scripts/jenkins_test_setup_xpack.sh
index 76fc7cfe6c876..a72e9749ebbd5 100644
--- a/test/scripts/jenkins_test_setup_xpack.sh
+++ b/test/scripts/jenkins_test_setup_xpack.sh
@@ -4,7 +4,7 @@ source test/scripts/jenkins_test_setup.sh
if [[ -z "$CODE_COVERAGE" ]]; then
installDir="$PARENT_DIR/install/kibana"
- destDir="${installDir}-${CI_WORKER_NUMBER}"
+ destDir="${installDir}-${CI_PARALLEL_PROCESS_NUMBER}"
cp -R "$installDir" "$destDir"
export KIBANA_INSTALL_DIR="$destDir"
diff --git a/vars/agentInfo.groovy b/vars/agentInfo.groovy
index b53ed23f81ff0..166a86c169261 100644
--- a/vars/agentInfo.groovy
+++ b/vars/agentInfo.groovy
@@ -1,5 +1,5 @@
def print() {
- try {
+ catchError(catchInterruptions: false, buildResult: null) {
def startTime = sh(script: "date -d '-3 minutes' -Iseconds | sed s/+/%2B/", returnStdout: true).trim()
def endTime = sh(script: "date -d '+1 hour 30 minutes' -Iseconds | sed s/+/%2B/", returnStdout: true).trim()
@@ -34,8 +34,6 @@ def print() {
echo 'SSH Command:'
echo "ssh -F ssh_config \$(hostname --ip-address)"
""", label: "Worker/Agent/Node debug links"
- } catch(ex) {
- print ex.toString()
}
}
diff --git a/vars/catchErrors.groovy b/vars/catchErrors.groovy
new file mode 100644
index 0000000000000..460a90b8ec0c0
--- /dev/null
+++ b/vars/catchErrors.groovy
@@ -0,0 +1,8 @@
+// Basically, this is a shortcut for catchError(catchInterruptions: false) {}
+// By default, catchError will swallow aborts/timeouts, which we almost never want
+def call(Map params = [:], Closure closure) {
+ params.catchInterruptions = false
+ return catchError(params, closure)
+}
+
+return this
diff --git a/vars/githubPr.groovy b/vars/githubPr.groovy
index 91a4a76894d94..7759edbbf5bfc 100644
--- a/vars/githubPr.groovy
+++ b/vars/githubPr.groovy
@@ -14,8 +14,8 @@
So, there is only ever one build status comment on a PR at any given time, the most recent one.
*/
def withDefaultPrComments(closure) {
- catchError {
- catchError {
+ catchErrors {
+ catchErrors {
closure()
}
diff --git a/vars/kibanaPipeline.groovy b/vars/kibanaPipeline.groovy
index dd2e626d1c860..2b9b0eba38f46 100644
--- a/vars/kibanaPipeline.groovy
+++ b/vars/kibanaPipeline.groovy
@@ -1,92 +1,36 @@
-def withWorkers(machineName, preWorkerClosure = {}, workerClosures = [:]) {
- return {
- jobRunner('tests-xl', true) {
- withGcsArtifactUpload(machineName, {
- withPostBuildReporting {
- doSetup()
- preWorkerClosure()
-
- def nextWorker = 1
- def worker = { workerClosure ->
- def workerNumber = nextWorker
- nextWorker++
-
- return {
- // This delay helps smooth out CPU load caused by ES/Kibana instances starting up at the same time
- def delay = (workerNumber-1)*20
- sleep(delay)
-
- workerClosure(workerNumber)
- }
- }
-
- def workers = [:]
- workerClosures.each { workerName, workerClosure ->
- workers[workerName] = worker(workerClosure)
- }
-
- parallel(workers)
- }
- })
- }
- }
-}
-
-def withWorker(machineName, label, Closure closure) {
- return {
- jobRunner(label, false) {
- withGcsArtifactUpload(machineName) {
- withPostBuildReporting {
- doSetup()
- closure()
- }
- }
- }
- }
-}
-
-def intakeWorker(jobName, String script) {
- return withWorker(jobName, 'linux && immutable') {
- withEnv([
- "JOB=${jobName}",
- ]) {
- runbld(script, "Execute ${jobName}")
- }
- }
-}
-
def withPostBuildReporting(Closure closure) {
try {
closure()
} finally {
- catchError {
+ catchErrors {
runErrorReporter()
}
- catchError {
+ catchErrors {
runbld.junit()
}
- catchError {
+ catchErrors {
publishJunit()
}
}
}
-def getPostBuildWorker(name, closure) {
- return { workerNumber ->
- def kibanaPort = "61${workerNumber}1"
- def esPort = "61${workerNumber}2"
- def esTransportPort = "61${workerNumber}3"
+def functionalTestProcess(String name, Closure closure) {
+ return { processNumber ->
+ def kibanaPort = "61${processNumber}1"
+ def esPort = "61${processNumber}2"
+ def esTransportPort = "61${processNumber}3"
withEnv([
- "CI_WORKER_NUMBER=${workerNumber}",
+ "CI_PARALLEL_PROCESS_NUMBER=${processNumber}",
"TEST_KIBANA_HOST=localhost",
"TEST_KIBANA_PORT=${kibanaPort}",
"TEST_KIBANA_URL=http://elastic:changeme@localhost:${kibanaPort}",
"TEST_ES_URL=http://elastic:changeme@localhost:${esPort}",
"TEST_ES_TRANSPORT_PORT=${esTransportPort}",
"IS_PIPELINE_JOB=1",
+ "JOB=${name}",
"KBN_NP_PLUGINS_BUILT=true",
]) {
closure()
@@ -94,8 +38,16 @@ def getPostBuildWorker(name, closure) {
}
}
-def getOssCiGroupWorker(ciGroup) {
- return getPostBuildWorker("ciGroup" + ciGroup, {
+def functionalTestProcess(String name, String script) {
+ return functionalTestProcess(name) {
+ retryable(name) {
+ runbld(script, "Execute ${name}")
+ }
+ }
+}
+
+def ossCiGroupProcess(ciGroup) {
+ return functionalTestProcess("ciGroup" + ciGroup) {
withEnv([
"CI_GROUP=${ciGroup}",
"JOB=kibana-ciGroup${ciGroup}",
@@ -104,11 +56,11 @@ def getOssCiGroupWorker(ciGroup) {
runbld("./test/scripts/jenkins_ci_group.sh", "Execute kibana-ciGroup${ciGroup}")
}
}
- })
+ }
}
-def getXpackCiGroupWorker(ciGroup) {
- return getPostBuildWorker("xpack-ciGroup" + ciGroup, {
+def xpackCiGroupProcess(ciGroup) {
+ return functionalTestProcess("xpack-ciGroup" + ciGroup) {
withEnv([
"CI_GROUP=${ciGroup}",
"JOB=xpack-kibana-ciGroup${ciGroup}",
@@ -117,56 +69,6 @@ def getXpackCiGroupWorker(ciGroup) {
runbld("./test/scripts/jenkins_xpack_ci_group.sh", "Execute xpack-kibana-ciGroup${ciGroup}")
}
}
- })
-}
-
-def jobRunner(label, useRamDisk, closure) {
- node(label) {
- agentInfo.print()
-
- if (useRamDisk) {
- // Move to a temporary workspace, so that we can symlink the real workspace into /dev/shm
- def originalWorkspace = env.WORKSPACE
- ws('/tmp/workspace') {
- sh(
- script: """
- mkdir -p /dev/shm/workspace
- mkdir -p '${originalWorkspace}' # create all of the directories leading up to the workspace, if they don't exist
- rm --preserve-root -rf '${originalWorkspace}' # then remove just the workspace, just in case there's stuff in it
- ln -s /dev/shm/workspace '${originalWorkspace}'
- """,
- label: "Move workspace to RAM - /dev/shm/workspace"
- )
- }
- }
-
- def scmVars
-
- // Try to clone from Github up to 8 times, waiting 15 secs between attempts
- retryWithDelay(8, 15) {
- scmVars = checkout scm
- }
-
- withEnv([
- "CI=true",
- "HOME=${env.JENKINS_HOME}",
- "PR_SOURCE_BRANCH=${env.ghprbSourceBranch ?: ''}",
- "PR_TARGET_BRANCH=${env.ghprbTargetBranch ?: ''}",
- "PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}",
- "TEST_BROWSER_HEADLESS=1",
- "GIT_BRANCH=${scmVars.GIT_BRANCH}",
- ]) {
- withCredentials([
- string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'),
- string(credentialsId: 'vault-role-id', variable: 'VAULT_ROLE_ID'),
- string(credentialsId: 'vault-secret-id', variable: 'VAULT_SECRET_ID'),
- ]) {
- // scm is configured to check out to the ./kibana directory
- dir('kibana') {
- closure()
- }
- }
- }
}
}
@@ -210,7 +112,7 @@ def withGcsArtifactUpload(workerName, closure) {
try {
closure()
} finally {
- catchError {
+ catchErrors {
ARTIFACT_PATTERNS.each { pattern ->
uploadGcsArtifact(uploadPrefix, pattern)
}
@@ -243,7 +145,7 @@ def sendMail() {
}
def sendInfraMail() {
- catchError {
+ catchErrors {
step([
$class: 'Mailer',
notifyEveryUnstableBuild: true,
@@ -254,7 +156,7 @@ def sendInfraMail() {
}
def sendKibanaMail() {
- catchError {
+ catchErrors {
def buildStatus = buildUtils.getBuildStatus()
if(params.NOTIFY_ON_FAILURE && buildStatus != 'SUCCESS' && buildStatus != 'ABORTED') {
emailext(
@@ -299,4 +201,18 @@ def runErrorReporter() {
)
}
+def call(Map params = [:], Closure closure) {
+ def config = [timeoutMinutes: 135] + params
+
+ stage("Kibana Pipeline") {
+ timeout(time: config.timeoutMinutes, unit: 'MINUTES') {
+ timestamps {
+ ansiColor('xterm') {
+ closure()
+ }
+ }
+ }
+ }
+}
+
return this
diff --git a/vars/retryWithDelay.groovy b/vars/retryWithDelay.groovy
index 70d6f86a63ab2..83fd94c6f2b1e 100644
--- a/vars/retryWithDelay.groovy
+++ b/vars/retryWithDelay.groovy
@@ -2,7 +2,9 @@ def call(retryTimes, delaySecs, closure) {
retry(retryTimes) {
try {
closure()
- } catch (ex) {
+ } catch (org.jenkinsci.plugins.workflow.steps.FlowInterruptedException ex) {
+ throw ex // Immediately re-throw build abort exceptions, don't sleep first
+ } catch (Exception ex) {
sleep delaySecs
throw ex
}
diff --git a/vars/retryable.groovy b/vars/retryable.groovy
index cc34024958aed..ed84a00ece49d 100644
--- a/vars/retryable.groovy
+++ b/vars/retryable.groovy
@@ -27,7 +27,7 @@ def getFlakyFailures() {
}
def printFlakyFailures() {
- catchError {
+ catchErrors {
def failures = getFlakyFailures()
if (failures && failures.size() > 0) {
diff --git a/vars/workers.groovy b/vars/workers.groovy
new file mode 100644
index 0000000000000..c5638f2624fe5
--- /dev/null
+++ b/vars/workers.groovy
@@ -0,0 +1,147 @@
+// "Workers" in this file will spin up an instance, do some setup etc depending on the configuration, and then execute some work that you define
+// e.g. workers.base(name: 'my-worker') { sh "echo 'ready to execute some kibana scripts'" }
+
+/*
+ The base worker that all of the others use. Will clone the scm (assumed to be kibana), and run kibana bootstrap processes by default.
+
+ Parameters:
+ label - gobld/agent label to use, e.g. 'linux && immutable'
+ ramDisk - Should the workspace be mounted in memory? Default: true
+ bootstrapped - If true, download kibana dependencies, run kbn bootstrap, etc. Default: true
+ name - Name of the worker for display purposes, filenames, etc.
+ scm - Jenkins scm configuration for checking out code. Use `null` to disable checkout. Default: inherited from job
+*/
+def base(Map params, Closure closure) {
+ def config = [label: '', ramDisk: true, bootstrapped: true, name: 'unnamed-worker', scm: scm] + params
+ if (!config.label) {
+ error "You must specify an agent label, such as 'tests-xl' or 'linux && immutable', when using workers.base()"
+ }
+
+ node(config.label) {
+ agentInfo.print()
+
+ if (config.ramDisk) {
+ // Move to a temporary workspace, so that we can symlink the real workspace into /dev/shm
+ def originalWorkspace = env.WORKSPACE
+ ws('/tmp/workspace') {
+ sh(
+ script: """
+ mkdir -p /dev/shm/workspace
+ mkdir -p '${originalWorkspace}' # create all of the directories leading up to the workspace, if they don't exist
+ rm --preserve-root -rf '${originalWorkspace}' # then remove just the workspace, just in case there's stuff in it
+ ln -s /dev/shm/workspace '${originalWorkspace}'
+ """,
+ label: "Move workspace to RAM - /dev/shm/workspace"
+ )
+ }
+ }
+
+ def scmVars = [:]
+
+ if (config.scm) {
+ // Try to clone from Github up to 8 times, waiting 15 secs between attempts
+ retryWithDelay(8, 15) {
+ scmVars = checkout scm
+ }
+ }
+
+ withEnv([
+ "CI=true",
+ "HOME=${env.JENKINS_HOME}",
+ "PR_SOURCE_BRANCH=${env.ghprbSourceBranch ?: ''}",
+ "PR_TARGET_BRANCH=${env.ghprbTargetBranch ?: ''}",
+ "PR_AUTHOR=${env.ghprbPullAuthorLogin ?: ''}",
+ "TEST_BROWSER_HEADLESS=1",
+ "GIT_BRANCH=${scmVars.GIT_BRANCH ?: ''}",
+ ]) {
+ withCredentials([
+ string(credentialsId: 'vault-addr', variable: 'VAULT_ADDR'),
+ string(credentialsId: 'vault-role-id', variable: 'VAULT_ROLE_ID'),
+ string(credentialsId: 'vault-secret-id', variable: 'VAULT_SECRET_ID'),
+ ]) {
+ // scm is configured to check out to the ./kibana directory
+ dir('kibana') {
+ if (config.bootstrapped) {
+ kibanaPipeline.doSetup()
+ }
+
+ closure()
+ }
+ }
+ }
+ }
+}
+
+// Worker for ci processes. Extends the base worker and adds GCS artifact upload, error reporting, junit processing
+def ci(Map params, Closure closure) {
+ def config = [ramDisk: true, bootstrapped: true] + params
+
+ return base(config) {
+ kibanaPipeline.withGcsArtifactUpload(config.name) {
+ kibanaPipeline.withPostBuildReporting {
+ closure()
+ }
+ }
+ }
+}
+
+// Worker for running the current intake jobs. Just runs a single script after bootstrap.
+def intake(jobName, String script) {
+ return {
+ ci(name: jobName, label: 'linux && immutable', ramDisk: false) {
+ withEnv(["JOB=${jobName}"]) {
+ runbld(script, "Execute ${jobName}")
+ }
+ }
+ }
+}
+
+// Worker for running functional tests. Runs a setup process (e.g. the kibana build) then executes a map of closures in parallel (e.g. one for each ciGroup)
+def functional(name, Closure setup, Map processes) {
+ return {
+ parallelProcesses(name: name, setup: setup, processes: processes, delayBetweenProcesses: 20, label: 'tests-xl')
+ }
+}
+
+/*
+ Creates a ci worker that can run a setup process, followed by a group of processes in parallel.
+
+ Parameters:
+ name: Name of the worker for display purposes, filenames, etc.
+ setup: Closure to execute after the agent is bootstrapped, before starting the parallel work
+ processes: Map of closures that will execute in parallel after setup. Each closure is passed a unique number.
+ delayBetweenProcesses: Number of seconds to wait between starting the parallel processes. Useful to spread the load of heavy init processes, e.g. Elasticsearch starting up. Default: 0
+ label: gobld/agent label to use, e.g. 'linux && immutable'. Default: 'tests-xl', a 32 CPU machine used for running many functional test suites in parallel
+*/
+def parallelProcesses(Map params) {
+ def config = [name: 'parallel-worker', setup: {}, processes: [:], delayBetweenProcesses: 0, label: 'tests-xl'] + params
+
+ ci(label: config.label, name: config.name) {
+ config.setup()
+
+ def nextProcessNumber = 1
+ def process = { processName, processClosure ->
+ def processNumber = nextProcessNumber
+ nextProcessNumber++
+
+ return {
+ if (config.delayBetweenProcesses && config.delayBetweenProcesses > 0) {
+ // This delay helps smooth out CPU load caused by ES/Kibana instances starting up at the same time
+ def delay = (processNumber-1)*config.delayBetweenProcesses
+ sleep(delay)
+ }
+
+ processClosure(processNumber)
+ }
+ }
+
+ def processes = [:]
+ config.processes.each { processName, processClosure ->
+ processes[processName] = process(processName, processClosure)
+ }
+
+ parallel(processes)
+ }
+}
+
+return this