Skip to content

Commit

Permalink
s3_bucket transfer accleration
Browse files Browse the repository at this point in the history
  • Loading branch information
abraverm committed Apr 9, 2024
1 parent 3974a82 commit d5b77a9
Show file tree
Hide file tree
Showing 3 changed files with 205 additions and 0 deletions.
59 changes: 59 additions & 0 deletions plugins/modules/s3_bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,12 @@
type: bool
default: false
version_added: 6.0.0
accelerate_enabled:
description:
- Enables Amazon S3 Transfer Acceleration, sent data will be routed to Amazon S3 over an optimized network path.
type: bool
default: false
version_added: 7.6.0
extends_documentation_fragment:
- amazon.aws.common.modules
Expand Down Expand Up @@ -286,6 +292,12 @@
name: mys3bucket
state: present
acl: public-read
# Enable transfer acceleration
- amazon.aws.s3_bucket:
name: mys3bucket
state: present
accelerate: true
"""

RETURN = r"""
Expand Down Expand Up @@ -388,6 +400,7 @@ def create_or_update_bucket(s3_client, module):
object_ownership = module.params.get("object_ownership")
object_lock_enabled = module.params.get("object_lock_enabled")
acl = module.params.get("acl")
accelerate_enabled = module.params.get("accelerate_enabled")
# default to US Standard region,
# note: module.region will also try to pull a default out of the boto3 configs.
location = module.region or "us-east-1"
Expand All @@ -403,6 +416,8 @@ def create_or_update_bucket(s3_client, module):
module.fail_json_aws(e, msg="Failed to check bucket presence")

if not bucket_is_present:
if accelerate_enabled and "." in str(name):
module.fail_json_aws(None, "S3 Transfer Acceleration is not supported for buckets with periods (.) in their names")
try:
bucket_changed = create_bucket(s3_client, name, location, object_lock_enabled)
s3_client.get_waiter("bucket_exists").wait(Bucket=name)
Expand Down Expand Up @@ -732,6 +747,35 @@ def create_or_update_bucket(s3_client, module):
if object_lock_enabled and not object_lock_status:
module.fail_json(msg="Enabling object lock for existing buckets is not supported")

# -- Transfer Acceleration
try:
accelerate_status = get_bucket_accelerate_status(s3_client, name)
result["accelerate_enabled"] = accelerate_status
except is_boto3_error_code(["NotImplemented", "XNotImplemented"]) as e:
if accelerate_enabled is not None:
module.fail_json(msg="Fetching bucket transfer acceleration state is not supported")
except is_boto3_error_code("AccessDenied") as e: # pylint: disable=duplicate-except
if accelerate_enabled is not None:
module.fail_json(msg="Permission denied fetching transfer acceleration for bucket")
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
) as e: # pylint: disable=duplicate-except
module.fail_json_aws(e, msg="Failed to fetch bucket transfer acceleration state")
else:
if accelerate_status is not None:
try:
if not accelerate_enabled and accelerate_status:
delete_bucket_accelerate_configuration(s3_client, name)
changed = True
result["accelerate_enabled"] = False
if accelerate_enabled and not accelerate_status:
put_bucket_accelerate_configuration(s3_client, name)
changed = True
result["accelerate_enabled"] = True
except (botocore.exceptions.BotoCoreError, botocore.exceptions.ClientError) as e:
module.fail_json_aws(e, msg="Failed to update bucket transfer acceleration")

# Module exit
module.exit_json(changed=changed, name=name, **result)

Expand Down Expand Up @@ -768,6 +812,15 @@ def create_bucket(s3_client, bucket_name, location, object_lock_enabled=False):
# method. However, the AWS Api sometimes fails to report bucket presence, so we catch this exception
return False

@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"])
def put_bucket_accelerate_configuration(s3_client, bucket_name):
s3_client.put_bucket_accelerate_configuration(Bucket=bucket_name, AccelerateConfiguration={"Status": "Enabled"})


@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"])
def delete_bucket_accelerate_configuration(s3_client, bucket_name):
s3_client.put_bucket_accelerate_configuration(Bucket=bucket_name, AccelerateConfiguration={"Status": "Suspended"})


@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"])
def put_bucket_tagging(s3_client, bucket_name, tags):
Expand Down Expand Up @@ -822,6 +875,11 @@ def get_bucket_object_lock_enabled(s3_client, bucket_name):
object_lock_configuration = s3_client.get_object_lock_configuration(Bucket=bucket_name)
return object_lock_configuration["ObjectLockConfiguration"]["ObjectLockEnabled"] == "Enabled"

@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"])
def get_bucket_accelerate_status(s3_client, bucket_name):
accelerate_configuration = s3_client.get_bucket_accelerate_configuration(Bucket=bucket_name)
return accelerate_configuration.get("Status") == "Enabled"


@AWSRetry.exponential_backoff(max_delay=120, catch_extra_error_codes=["NoSuchBucket", "OperationAborted"])
def get_bucket_encryption(s3_client, bucket_name):
Expand Down Expand Up @@ -1220,6 +1278,7 @@ def main():
acl=dict(type="str", choices=["private", "public-read", "public-read-write", "authenticated-read"]),
validate_bucket_name=dict(type="bool", default=True),
dualstack=dict(default=False, type="bool"),
accelerate_enabled=dict(default=False, type="bool"),
object_lock_enabled=dict(type="bool"),
)

Expand Down
1 change: 1 addition & 0 deletions tests/integration/targets/s3_bucket/inventory
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ encryption_sse
public_access
acl
object_lock
accelerate

[all:vars]
ansible_connection=local
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
---
- module_defaults:
group/aws:
access_key: "{{ aws_access_key }}"
secret_key: "{{ aws_secret_key }}"
session_token: "{{ security_token | default(omit) }}"
region: "{{ aws_region }}"
block:
- ansible.builtin.set_fact:
local_bucket_name: "{{ bucket_name | hash('md5')}}-accelerate"

# ============================================================

- name: Create a simple bucket
amazon.aws.s3_bucket:
name: "{{ local_bucket_name }}"
state: present
register: output

- ansible.builtin.assert:
that:
- output.changed
- not output.accelerate_enabled

- name: Re-disable transfer acceleration (idempotency)
amazon.aws.s3_bucket:
name: "{{ local_bucket_name }}"
state: present
accelerate_enabled: false
register: output

- ansible.builtin.assert:
that:
- not output.changed
- not output.accelerate_enabled

- name: Enable transfer acceleration
amazon.aws.s3_bucket:
name: "{{ local_bucket_name }}"
state: present
accelerate_enabled: true
register: output
ignore_errors: false

- ansible.builtin.assert:
that:
- output.changed
- output.accelerate_enabled

- name: Re-Enable transfer acceleration (idempotency)
amazon.aws.s3_bucket:
name: "{{ local_bucket_name }}"
state: present
accelerate_enabled: true
register: output

- ansible.builtin.assert:
that:
- not output.changed
- output.accelerate_enabled

- name: Delete test s3 bucket
amazon.aws.s3_bucket:
name: "{{ local_bucket_name }}"
state: absent
register: output

- ansible.builtin.assert:
that:
- output.changed

# ============================================================

- name: Create a bucket with transfer accelerate enabled
amazon.aws.s3_bucket:
name: "{{ local_bucket_name }}-2"
state: present
accelerate_enabled: true
register: output

- ansible.builtin.assert:
that:
- output.changed
- output.accelerate_enabled

- name: Disable transfer accelerate
amazon.aws.s3_bucket:
name: "{{ local_bucket_name }}-2"
state: present
accelerate_enabled: false
register: output
ignore_errors: false

- ansible.builtin.assert:
that:
- output.changed
- not output.accelerate_enabled

- name: Re-Enable transfer accelerate (idempotency)
amazon.aws.s3_bucket:
name: "{{ local_bucket_name }}-2"
state: present
accelerate_enabled: true
register: output

- ansible.builtin.assert:
that:
- output.changed
- output.accelerate_enabled

- name: Touch bucket with transfer accelerate enabled (idempotency)
amazon.aws.s3_bucket:
name: "{{ local_bucket_name }}-2"
state: present
accelerate_enabled: true
register: output

- ansible.builtin.assert:
that:
- not output.changed
- output.accelerate_enabled

- name: Delete test s3 bucket
amazon.aws.s3_bucket:
name: "{{ local_bucket_name }}-2"
state: absent
register: output

- ansible.builtin.assert:
that:
- output.changed

# ============================================================
always:
- name: Ensure all buckets are deleted
amazon.aws.s3_bucket:
name: "{{ local_bucket_name }}"
state: absent
ignore_errors: true

- name: Ensure all buckets are deleted
amazon.aws.s3_bucket:
name: "{{ local_bucket_name }}-2"
state: absent
ignore_errors: true

0 comments on commit d5b77a9

Please sign in to comment.