diff --git a/Jenkinsfile.aws-test b/Jenkinsfile.aws-test index e53abd931..1208b487d 100644 --- a/Jenkinsfile.aws-test +++ b/Jenkinsfile.aws-test @@ -1,8 +1,7 @@ def NODE = "rhcos-jenkins" def AWS_REGION = "us-east-1" -// this var conveniently refers to a location on the server as well as the -// local dir we sync to/from +// location on the server we'll rsync to/from our $WORKSPACE def images = "/srv/rhcos/output/images" node(NODE) { @@ -24,23 +23,19 @@ node(NODE) { // We're only ever triggered by the cloud job, so we know the latest build is in latest/ // We immediately resolve it back to the specific images/ dir - def dirpath, version + def version try { - utils.inside_assembler_container("-v /srv:/srv") { + utils.inside_assembler_container("") { stage("Sync In") { withCredentials([ string(credentialsId: params.ARTIFACT_SERVER, variable: 'ARTIFACT_SERVER'), sshUserPrivateKey(credentialsId: params.ARTIFACT_SSH_CREDS_ID, keyFileVariable: 'KEY_FILE'), ]) { - dirpath = "${images}/cloud/latest" - sh "mkdir -p ${dirpath}" - utils.rsync_file_in(ARTIFACT_SERVER, KEY_FILE, "${dirpath}/meta.json") - version = utils.sh_capture("jq -r '.[\"ostree-version\"]' ${dirpath}/meta.json") + utils.rsync_file_in_dest(ARTIFACT_SERVER, KEY_FILE, "${images}/cloud/latest/meta.json", "${WORKSPACE}/meta.json") + version = utils.sh_capture("jq -r '.[\"ostree-version\"]' ${WORKSPACE}/meta.json") # resolve to original dir to avoid races in the next rsync in - def imgv = utils.sh_capture("jq -r '.[\"image-version\"]' ${dirpath}/meta.json") - dirpath = "${images}/cloud/${imgv}" - sh "mkdir -p ${dirpath}" - utils.rsync_file_in(ARTIFACT_SERVER, KEY_FILE, "${dirpath}/aws-${AWS_REGION}.json") + def imgv = utils.sh_capture("jq -r '.[\"image-version\"]' ${WORKSPACE}/meta.json") + utils.rsync_file_in_dest(ARTIFACT_SERVER, KEY_FILE, "${images}/cloud/${imgv}/aws-${AWS_REGION}.json", "${WORKSPACE}/aws-${AWS_REGION}.json") } } @@ -54,7 +49,7 @@ node(NODE) { string(credentialsId: params.AWS_CI_ACCOUNT, variable: 'AWS_CI_ACCOUNT'), string(credentialsId: params.S3_PUBLIC_BUCKET, variable: 'S3_PUBLIC_BUCKET'), ]) { - def ami_intermediate = utils.sh_capture("jq -r .HVM ${dirpath}/aws-${AWS_REGION}.json") + def ami_intermediate = utils.sh_capture("jq -r .HVM ${WORKSPACE}/aws-${AWS_REGION}.json") currentBuild.description = "version=${version} ami=${ami_intermediate}" sh """ # Do testing with intermediate aws image passed in by cloud job @@ -65,12 +60,6 @@ node(NODE) { # Tests pass, tag the json in the artifact server to a persistent location # and give launch permissions to OpenShift CI export AWS_DEFAULT_REGION=${AWS_REGION} - if [ ! -e ${dirpath}/aws-${AWS_REGION}.json ]; then - echo "Cannot find JSON artifact." - exit 1 - fi - - cp ${dirpath}/aws-${AWS_REGION}.json ${images}/aws-${AWS_REGION}-tested.json aws ec2 create-tags \ --resources ${ami_intermediate} \ --tags rhcos_tag=alpha @@ -80,7 +69,7 @@ node(NODE) { # Upload the json file to a public location aws s3 cp --acl public-read \ - ${images}/aws-${AWS_REGION}-tested.json \ + ${WORKSPACE}/aws-${AWS_REGION}-tested.json \ s3://${S3_PUBLIC_BUCKET}/aws-${AWS_REGION}-tested.json """ } @@ -96,7 +85,7 @@ node(NODE) { string(credentialsId: params.ARTIFACT_SERVER, variable: 'ARTIFACT_SERVER'), sshUserPrivateKey(credentialsId: params.ARTIFACT_SSH_CREDS_ID, keyFileVariable: 'KEY_FILE'), ]) { - utils.rsync_file_out(ARTIFACT_SERVER, KEY_FILE, "${images}/aws-${AWS_REGION}-tested.json") + utils.rsync_file_out_dest(ARTIFACT_SERVER, KEY_FILE, "${WORKSPACE}/aws-${AWS_REGION}.json", "${images}/aws-${AWS_REGION}-tested.json") } } } diff --git a/pipeline-utils.groovy b/pipeline-utils.groovy index eedede871..ed0f5349f 100644 --- a/pipeline-utils.groovy +++ b/pipeline-utils.groovy @@ -120,10 +120,18 @@ def rsync_file_in(server, key, file) { rsync_file(key, "${server}:${file}", file) } +def rsync_file_in_dest(server, key, srcfile, destfile) { + rsync_file(key, "${server}:${srcfile}", destfile) +} + def rsync_file_out(server, key, file) { rsync_file(key, file, "${server}:${file}") } +def rsync_file_out_dest(server, key, srcfile, destfile) { + rsync_file(key, srcfile, "${server}:${destfile}") +} + def rsync_file(key, from_file, to_file) { sh """ rsync -Hlpt --stats \