From 3a0c39cdc5728b9c436e238297784d24ac1f29ce Mon Sep 17 00:00:00 2001 From: Jonathan Lebon Date: Tue, 25 Sep 2018 10:50:02 -0400 Subject: [PATCH 1/3] cloud: Don't actually call it -tested.json until it is I found it misleading that the `cloud` job uploaded the AMI JSON metadata with `-smoketested.json` appended *before* it was even tested. Let's make this clearer by keeping that suffix off until it's actually true. --- Jenkinsfile.aws-test | 8 ++++---- Jenkinsfile.cloud | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Jenkinsfile.aws-test b/Jenkinsfile.aws-test index a829abf5..566dbff3 100644 --- a/Jenkinsfile.aws-test +++ b/Jenkinsfile.aws-test @@ -30,7 +30,7 @@ node(NODE) { sshUserPrivateKey(credentialsId: params.ARTIFACT_SSH_CREDS_ID, keyFileVariable: 'KEY_FILE'), ]) { sh "mkdir -p ${dirpath}" - utils.rsync_file_in(ARTIFACT_SERVER, KEY_FILE, "${dirpath}/aws-${AWS_REGION}-smoketested.json") + utils.rsync_file_in(ARTIFACT_SERVER, KEY_FILE, "${dirpath}/aws-${AWS_REGION}.json") } } @@ -54,12 +54,12 @@ node(NODE) { # Tests pass, tag the json in the artifact server to a persistent location # and give launch permissions to OpenShift CI export AWS_DEFAULT_REGION=${AWS_REGION} - if [ ! -e ${dirpath}/aws-${AWS_REGION}-smoketested.json ]; then - echo "Cannot find smoketested json artifact." + if [ ! -e ${dirpath}/aws-${AWS_REGION}.json ]; then + echo "Cannot find JSON artifact." exit 1 fi - cp ${dirpath}/aws-${AWS_REGION}-smoketested.json ${images}/aws-${AWS_REGION}-tested.json + cp ${dirpath}/aws-${AWS_REGION}.json ${images}/aws-${AWS_REGION}-tested.json aws ec2 create-tags \ --resources ${ami_intermediate} \ --tags rhcos_tag=alpha diff --git a/Jenkinsfile.cloud b/Jenkinsfile.cloud index c01908a6..b89b082a 100644 --- a/Jenkinsfile.cloud +++ b/Jenkinsfile.cloud @@ -216,7 +216,7 @@ node(NODE) { string(credentialsId: params.AWS_CI_ACCOUNT, variable: 'AWS_CI_ACCOUNT'), ]) { sh """ - amijson=${dirpath}/aws-${AWS_REGION}-smoketested.json + amijson=${dirpath}/aws-${AWS_REGION}.json ore aws upload --region ${AWS_REGION} \ --ami-name 'rhcos_dev_${commit[0..6]}' \ --ami-description 'Red Hat CoreOS ${version} (${commit})' \ @@ -251,7 +251,7 @@ node(NODE) { parallel par_stages; par_stages = [:] // Build the job responsible for testing and publishing the ami - def ami_intermediate = utils.sh_capture("jq -r .HVM ${dirpath}/aws-${AWS_REGION}-smoketested.json") + def ami_intermediate = utils.sh_capture("jq -r .HVM ${dirpath}/aws-${AWS_REGION}.json") build job: 'coreos-rhcos-aws-test', wait: false, parameters: [ string(name: 'ami_intermediate', value: "${ami_intermediate}"), string(name: 'version', value: "${version}"), From efab6cf1e9758ab67f301bb7d446c935e5ec9fd2 Mon Sep 17 00:00:00 2001 From: Jonathan Lebon Date: Tue, 25 Sep 2018 11:14:46 -0400 Subject: [PATCH 2/3] aws-test: Drop need for parameters, just use latest/ Get rid of the need to pass parameters to the `aws-test` job in preparations for turning on `keepUndefinedParameters=true` in the Jenkins instance. We only every run the `aws-test` job from the `cloud` job, so we know at that point that the latest output is in `latest/`. We can just resolve that back to a specific versioned dir before proceeding. This normalizes the `aws-test` job to take the exact same parameters as the others, and makes it easy to e.g. run the job manually like the others. --- Jenkinsfile.aws-test | 13 ++++++++++++- Jenkinsfile.cloud | 8 +------- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/Jenkinsfile.aws-test b/Jenkinsfile.aws-test index 566dbff3..9af36c28 100644 --- a/Jenkinsfile.aws-test +++ b/Jenkinsfile.aws-test @@ -22,6 +22,9 @@ node(NODE) { return } + // We're only ever triggered by the cloud job, so we know the latest build is in latest/ + // We immediately resolve it back to the specific images/ dir + def dirpath, version try { utils.inside_assembler_container("-v /srv:/srv") { stage("Sync In") { @@ -29,6 +32,13 @@ node(NODE) { string(credentialsId: params.ARTIFACT_SERVER, variable: 'ARTIFACT_SERVER'), sshUserPrivateKey(credentialsId: params.ARTIFACT_SSH_CREDS_ID, keyFileVariable: 'KEY_FILE'), ]) { + dirpath = "${images}/cloud/latest" + sh "mkdir -p ${dirpath}" + utils.rsync_file_in(ARTIFACT_SERVER, KEY_FILE, "${dirpath}/meta.json") + version = utils.sh_capture("jq -r '.[\"ostree-version\"]' ${dirpath}/meta.json") + // resolve to original dir to avoid races in the next rsync in + def imgv = utils.sh_capture("jq -r '.[\"image-version\"]' ${dirpath}/meta.json") + dirpath = "${images}/cloud/${imgv}" sh "mkdir -p ${dirpath}" utils.rsync_file_in(ARTIFACT_SERVER, KEY_FILE, "${dirpath}/aws-${AWS_REGION}.json") } @@ -44,10 +54,11 @@ node(NODE) { string(credentialsId: params.AWS_CI_ACCOUNT, variable: 'AWS_CI_ACCOUNT'), string(credentialsId: params.S3_PUBLIC_BUCKET, variable: 'S3_PUBLIC_BUCKET'), ]) { + def ami_intermediate = utils.sh_capture("jq -r .HVM ${dirpath}/aws-${AWS_REGION}.json") currentBuild.description = "version=${version} ami=${ami_intermediate}" sh """ # Do testing with intermediate aws image passed in by cloud job - if ! kola -b rhcos -p aws --aws-type ${aws_type} --tapfile rhcos-aws.tap --aws-ami ${ami_intermediate} --aws-region ${AWS_REGION} -j ${NUM_VMS} run; then + if ! kola -b rhcos -p aws --aws-type t2.small --tapfile rhcos-aws.tap --aws-ami ${ami_intermediate} --aws-region ${AWS_REGION} -j ${NUM_VMS} run; then exit 1 fi diff --git a/Jenkinsfile.cloud b/Jenkinsfile.cloud index b89b082a..026c93ac 100644 --- a/Jenkinsfile.cloud +++ b/Jenkinsfile.cloud @@ -251,13 +251,7 @@ node(NODE) { parallel par_stages; par_stages = [:] // Build the job responsible for testing and publishing the ami - def ami_intermediate = utils.sh_capture("jq -r .HVM ${dirpath}/aws-${AWS_REGION}.json") - build job: 'coreos-rhcos-aws-test', wait: false, parameters: [ - string(name: 'ami_intermediate', value: "${ami_intermediate}"), - string(name: 'version', value: "${version}"), - string(name: 'dirpath', value: "${dirpath}"), - string(name: 'aws_type', value: "t2.small") - ] + build job: 'coreos-rhcos-aws-test', wait: false } } catch (Throwable e) { currentBuild.result = 'FAILURE' From a5a1f58c1b1b06c0f9324074633a0334980a60c5 Mon Sep 17 00:00:00 2001 From: Jonathan Lebon Date: Wed, 26 Sep 2018 17:51:18 -0400 Subject: [PATCH 3/3] aws-test: Stop using `/srv` We shouldn't `rsync` to the shared `/srv` location. Just sync directly to our `WORKSPACE` to avoid races. See also #211. --- Jenkinsfile.aws-test | 31 ++++++++++--------------------- pipeline-utils.groovy | 8 ++++++++ 2 files changed, 18 insertions(+), 21 deletions(-) diff --git a/Jenkinsfile.aws-test b/Jenkinsfile.aws-test index 9af36c28..2dc3d7bb 100644 --- a/Jenkinsfile.aws-test +++ b/Jenkinsfile.aws-test @@ -1,8 +1,7 @@ def NODE = "rhcos-jenkins" def AWS_REGION = "us-east-1" -// this var conveniently refers to a location on the server as well as the -// local dir we sync to/from +// location on the server we'll rsync to/from our $WORKSPACE def images = "/srv/rhcos/output/images" node(NODE) { @@ -24,23 +23,19 @@ node(NODE) { // We're only ever triggered by the cloud job, so we know the latest build is in latest/ // We immediately resolve it back to the specific images/ dir - def dirpath, version + def version try { - utils.inside_assembler_container("-v /srv:/srv") { + utils.inside_assembler_container("") { stage("Sync In") { withCredentials([ string(credentialsId: params.ARTIFACT_SERVER, variable: 'ARTIFACT_SERVER'), sshUserPrivateKey(credentialsId: params.ARTIFACT_SSH_CREDS_ID, keyFileVariable: 'KEY_FILE'), ]) { - dirpath = "${images}/cloud/latest" - sh "mkdir -p ${dirpath}" - utils.rsync_file_in(ARTIFACT_SERVER, KEY_FILE, "${dirpath}/meta.json") - version = utils.sh_capture("jq -r '.[\"ostree-version\"]' ${dirpath}/meta.json") + utils.rsync_file_in_dest(ARTIFACT_SERVER, KEY_FILE, "${images}/cloud/latest/meta.json", "${WORKSPACE}/meta.json") + version = utils.sh_capture("jq -r '.[\"ostree-version\"]' ${WORKSPACE}/meta.json") // resolve to original dir to avoid races in the next rsync in - def imgv = utils.sh_capture("jq -r '.[\"image-version\"]' ${dirpath}/meta.json") - dirpath = "${images}/cloud/${imgv}" - sh "mkdir -p ${dirpath}" - utils.rsync_file_in(ARTIFACT_SERVER, KEY_FILE, "${dirpath}/aws-${AWS_REGION}.json") + def imgv = utils.sh_capture("jq -r '.[\"image-version\"]' ${WORKSPACE}/meta.json") + utils.rsync_file_in_dest(ARTIFACT_SERVER, KEY_FILE, "${images}/cloud/${imgv}/aws-${AWS_REGION}.json", "${WORKSPACE}/aws-${AWS_REGION}.json") } } @@ -54,7 +49,7 @@ node(NODE) { string(credentialsId: params.AWS_CI_ACCOUNT, variable: 'AWS_CI_ACCOUNT'), string(credentialsId: params.S3_PUBLIC_BUCKET, variable: 'S3_PUBLIC_BUCKET'), ]) { - def ami_intermediate = utils.sh_capture("jq -r .HVM ${dirpath}/aws-${AWS_REGION}.json") + def ami_intermediate = utils.sh_capture("jq -r .HVM ${WORKSPACE}/aws-${AWS_REGION}.json") currentBuild.description = "version=${version} ami=${ami_intermediate}" sh """ # Do testing with intermediate aws image passed in by cloud job @@ -65,12 +60,6 @@ node(NODE) { # Tests pass, tag the json in the artifact server to a persistent location # and give launch permissions to OpenShift CI export AWS_DEFAULT_REGION=${AWS_REGION} - if [ ! -e ${dirpath}/aws-${AWS_REGION}.json ]; then - echo "Cannot find JSON artifact." - exit 1 - fi - - cp ${dirpath}/aws-${AWS_REGION}.json ${images}/aws-${AWS_REGION}-tested.json aws ec2 create-tags \ --resources ${ami_intermediate} \ --tags rhcos_tag=alpha @@ -80,7 +69,7 @@ node(NODE) { # Upload the json file to a public location aws s3 cp --acl public-read \ - ${images}/aws-${AWS_REGION}-tested.json \ + ${WORKSPACE}/aws-${AWS_REGION}-tested.json \ s3://${S3_PUBLIC_BUCKET}/aws-${AWS_REGION}-tested.json """ } @@ -96,7 +85,7 @@ node(NODE) { string(credentialsId: params.ARTIFACT_SERVER, variable: 'ARTIFACT_SERVER'), sshUserPrivateKey(credentialsId: params.ARTIFACT_SSH_CREDS_ID, keyFileVariable: 'KEY_FILE'), ]) { - utils.rsync_file_out(ARTIFACT_SERVER, KEY_FILE, "${images}/aws-${AWS_REGION}-tested.json") + utils.rsync_file_out_dest(ARTIFACT_SERVER, KEY_FILE, "${WORKSPACE}/aws-${AWS_REGION}.json", "${images}/aws-${AWS_REGION}-tested.json") } } } diff --git a/pipeline-utils.groovy b/pipeline-utils.groovy index eedede87..ed0f5349 100644 --- a/pipeline-utils.groovy +++ b/pipeline-utils.groovy @@ -120,10 +120,18 @@ def rsync_file_in(server, key, file) { rsync_file(key, "${server}:${file}", file) } +def rsync_file_in_dest(server, key, srcfile, destfile) { + rsync_file(key, "${server}:${srcfile}", destfile) +} + def rsync_file_out(server, key, file) { rsync_file(key, file, "${server}:${file}") } +def rsync_file_out_dest(server, key, srcfile, destfile) { + rsync_file(key, srcfile, "${server}:${destfile}") +} + def rsync_file(key, from_file, to_file) { sh """ rsync -Hlpt --stats \