Skip to content

Commit

Permalink
Merge pull request #330 from cgwalters/upload-us-east-2
Browse files Browse the repository at this point in the history
Copy tested AMI to more regions
  • Loading branch information
openshift-merge-robot authored Oct 2, 2018
2 parents de9fa2f + 09d433d commit c610545
Show file tree
Hide file tree
Showing 2 changed files with 111 additions and 1 deletion.
18 changes: 17 additions & 1 deletion Jenkinsfile.aws-test
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
def NODE = "rhcos-jenkins"
// Our primary
def AWS_REGION = "us-east-1"
def API_CI_REGISTRY = "registry.svc.ci.openshift.org"
def OS_NAME = "maipo";
def OSCONTAINER_IMG = API_CI_REGISTRY + "/rhcos/os-${OS_NAME}"
// We copy tested AMIs to other regions; this
// list is hardcoded right now pending discussion
// with ops about which regions we should target
def OTHER_AWS_REGIONS = ["us-east-2", "us-west-1"]

// location on the server we'll rsync to/from our $WORKSPACE
def images = "/srv/rhcos/output/images"
Expand Down Expand Up @@ -81,11 +86,21 @@ node(NODE) {
aws ec2 modify-image-attribute \
--image-id ${ami_intermediate} \
--launch-permission '{"Add":[{"Group":"all"}]}'
# And upload to more regions
./scripts/ami-copy-regions --source-region ${AWS_REGION} \
--source-image-id ${ami_intermediate} \
--name rhcos_dev_${version} \
--copy-tags rhcos_tag,ostree_version,ostree_commit \
--regions ${OTHER_AWS_REGIONS.join(',')} \
--out ${WORKSPACE}/aws.json

# Upload the json file to a public location
# Upload the json files to a public location
aws s3 cp --acl public-read \
${WORKSPACE}/aws-${AWS_REGION}.json \
s3://${S3_PUBLIC_BUCKET}/aws-${AWS_REGION}-tested.json
aws s3 cp --acl public-read \
${WORKSPACE}/aws.json \
s3://${S3_PUBLIC_BUCKET}/aws-tested.json

# Copy the container image to alpha, then GC the image tagged with the ostree commit
skopeo copy docker://${OSCONTAINER_IMG}:${ostree_commit} docker://${OSCONTAINER_IMG}:alpha
Expand All @@ -106,6 +121,7 @@ node(NODE) {
sshUserPrivateKey(credentialsId: params.ARTIFACT_SSH_CREDS_ID, keyFileVariable: 'KEY_FILE'),
]) {
utils.rsync_file_out_dest(ARTIFACT_SERVER, KEY_FILE, "${WORKSPACE}/aws-${AWS_REGION}.json", "${images}/aws-${AWS_REGION}-tested.json")
utils.rsync_file_out_dest(ARTIFACT_SERVER, KEY_FILE, "${WORKSPACE}/aws.json", "${images}/aws-tested.json")
}
}
}
Expand Down
94 changes: 94 additions & 0 deletions scripts/ami-copy-regions
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
#!/usr/bin/python3
# Copy an AMI to multiple regions, generating an "AMI JSON"
# file matching the Container Linux schema:
# https://alpha.release.core-os.net/amd64-usr/current/coreos_production_ami_all.json
# Note this assumes the images are HVM.
# The images are also made public.

import os,sys,argparse,subprocess,io,time,re,multiprocessing
import tempfile, json

def fatal(msg):
print('error: {}'.format(msg), file=sys.stderr)
raise SystemExit(1)

def csv_list(string):
return string.split(',')

parser = argparse.ArgumentParser()
parser.add_argument("--source-region", help="Source region",
action='store', required=True)
parser.add_argument("--source-image-id", help="Source AMI",
action='store', required=True)
parser.add_argument("--name", help="AMI name",
action='store', required=True)
# These could be repeated args, but I find the comma-separated to be far less
# verbose.
parser.add_argument("--copy-tags", help="Copy the given tags (comma separated)",
action='store', type=csv_list)
parser.add_argument("--regions", help="Upload to regions (comma separated)",
action='store', required=True,
type=csv_list)
parser.add_argument("--out", help="Store output in FILE",
action='store', metavar='FILE',
required=True)

args = parser.parse_args()

# So...we could use an API, but it'd (probably) add a huge new dependency, and
# right now the low-tech aspect of this is OK.
def runaws(args):
return json.loads(subprocess.check_output(['aws', '--output', 'json'] + args))

# Gather tags to copy from the source AMI
tags_to_copy = []
if len(args.copy_tags) > 0:
res = runaws(['ec2', 'describe-tags', '--filters',
'Name=resource-id,Values={}'.format(args.source_image_id)])
current_tags = {}
for tag in res['Tags']:
key = tag['Key']
current_tags[tag['Key']] = tag['Value']
tags_to_copy = []
for tag in args.copy_tags:
val = current_tags.get(tag)
if val is None:
fatal("ami-{} is missing tag: {}".format(args.source_image_id, tag))
tags_to_copy.append("Key={},Value={}".format(tag, val))
print("Tags to copy: {}".format(tags_to_copy))

# Upload to all the regions
amis = []
for region in args.regions:
print("Uploading to: {}".format(region))
res = runaws(['ec2', 'copy-image', '--source-region', args.source_region,
'--source-image-id', args.source_image_id,
'--name', args.name, '--region', region])
iid = res['ImageId']
print("Complete, ImageId={}".format(iid))

print("Copying tags...")
subprocess.check_call(['aws', 'ec2', 'create-tags', '--region', region,
'--resources', iid, '--tags'] + tags_to_copy)
amis.append({'name': region,
'hvm': iid})

print("Using modify-image-attribute to make AMIs public (may take a while)")
for ami in amis:
print("Waiting on {}".format(ami))
region = ami['name']
iid = ami['hvm']
subprocess.check_call(['aws', 'ec2', '--region', region,
'wait', 'image-available',
'--image-id', iid])
subprocess.check_call(['aws', 'ec2', '--region', region, 'modify-image-attribute',
'--image-id', iid,
'--launch-permission', '{"Add":[{"Group":"all"}]}'])
print("AMI is now public: {}".format(ami))

# Be consistent
amis.sort(key=lambda x: x['name'])

# Write our output JSON
with open(args.out, 'w') as f:
json.dump({'amis': amis}, f)

0 comments on commit c610545

Please sign in to comment.