-
Notifications
You must be signed in to change notification settings - Fork 24
/
create-bucket
executable file
·74 lines (64 loc) · 3.05 KB
/
create-bucket
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
#!/usr/bin/env python
import os
import boto.s3
import json
import swiftclient
from boto import config as botoconfig
from boto.s3.connection import S3Connection, OrdinaryCallingFormat
from oauth2client.service_account import ServiceAccountCredentials
from gcloud.storage.client import Client
from gcloud import exceptions
from azure.storage.blob import BlobService
def bucket_exists(conn, name):
bucket = conn.lookup(name)
if not bucket:
return False
return True
bucket_name = os.getenv('BUCKET_NAME')
region = os.getenv('AWS_REGION')
if os.getenv('REGISTRY_STORAGE') == "s3" and os.getenv('REGISTRY_STORAGE_S3_BACKEND') != 'minio':
conn = boto.s3.connect_to_region(region)
if not bucket_exists(conn, bucket_name):
if region == "us-east-1":
# use "US Standard" region. workaround for https://github.com/boto/boto3/issues/125
conn.create_bucket(bucket_name)
else:
conn.create_bucket(bucket_name, location=region)
elif os.getenv('REGISTRY_STORAGE') == "gcs":
scopes = ['https://www.googleapis.com/auth/devstorage.full_control']
credentials = ServiceAccountCredentials.from_json_keyfile_name(os.getenv('REGISTRY_STORAGE_GCS_KEYFILE'), scopes=scopes)
with open(os.getenv('REGISTRY_STORAGE_GCS_KEYFILE')) as data_file:
data = json.load(data_file)
conn = Client(credentials=credentials, project=data['project_id'])
try:
conn.get_bucket(bucket_name)
except exceptions.NotFound:
conn.create_bucket(bucket_name)
elif os.getenv('REGISTRY_STORAGE') == "azure":
conn = BlobService(account_name=os.getenv('REGISTRY_STORAGE_AZURE_ACCOUNTNAME'), account_key=os.getenv('REGISTRY_STORAGE_AZURE_ACCOUNTKEY'))
# Azure doesn't throw an exception if the container exists by default
# https://github.com/Azure/azure-storage-python/blob/master/azure/storage/blob/baseblobservice.py#L504
conn.create_container(bucket_name)
elif os.getenv('REGISTRY_STORAGE') == "swift":
conn = swiftclient.Connection(
user=os.getenv('REGISTRY_STORAGE_SWIFT_USERNAME'),
key=os.getenv('REGISTRY_STORAGE_SWIFT_PASSWORD'),
authurl=os.getenv('REGISTRY_STORAGE_SWIFT_AUTHURL'),
auth_version=os.getenv('REGISTRY_STORAGE_SWIFT_AUTHVERSION'),
tenant_name=os.getenv('REGISTRY_STORAGE_SWIFT_TENANT')
)
# swift also does not throw exception if container already exists.
conn.put_container(bucket_name)
elif os.getenv('REGISTRY_STORAGE') == "s3" and os.getenv('REGISTRY_STORAGE_S3_BACKEND') == 'minio':
botoconfig.add_section('s3')
botoconfig.set('s3', 'use-sigv4', 'True')
botoconfig.add_section('Boto')
botoconfig.set('Boto', 'is_secure', 'False')
conn = S3Connection(
host=os.getenv('S3_HOST'),
port=int(os.getenv('S3_PORT')),
calling_format=OrdinaryCallingFormat())
# HACK(bacongobbler): allow boto to connect to minio by changing the region name for s3v4 auth
conn.auth_region_name = os.getenv('REGISTRY_STORAGE_S3_REGION')
if not bucket_exists(conn, bucket_name):
conn.create_bucket(bucket_name)