Skip to content

Commit

Permalink
New Resource: aws_ssm_resource_data_sync (#1895)
Browse files Browse the repository at this point in the history
* CRD methods for ResourceDayaSync

* Modify checkDestroy, make docs

* Reflect 1st review

* Reflect 2nd review

* goimports

* Retry creation due S3 eventual consistency
  • Loading branch information
atsushi-ishibashi authored and radeksimko committed Nov 10, 2017
1 parent 0f45448 commit a28808b
Show file tree
Hide file tree
Showing 5 changed files with 361 additions and 0 deletions.
1 change: 1 addition & 0 deletions aws/provider.go
Original file line number Diff line number Diff line change
Expand Up @@ -463,6 +463,7 @@ func Provider() terraform.ResourceProvider {
"aws_ssm_patch_baseline": resourceAwsSsmPatchBaseline(),
"aws_ssm_patch_group": resourceAwsSsmPatchGroup(),
"aws_ssm_parameter": resourceAwsSsmParameter(),
"aws_ssm_resource_data_sync": resourceAwsSsmResourceDataSync(),
"aws_spot_datafeed_subscription": resourceAwsSpotDataFeedSubscription(),
"aws_spot_instance_request": resourceAwsSpotInstanceRequest(),
"aws_spot_fleet_request": resourceAwsSpotFleetRequest(),
Expand Down
169 changes: 169 additions & 0 deletions aws/resource_aws_ssm_resource_data_sync.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
package aws

import (
"time"

"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/ssm"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/helper/schema"
)

func resourceAwsSsmResourceDataSync() *schema.Resource {
return &schema.Resource{
Create: resourceAwsSsmResourceDataSyncCreate,
Read: resourceAwsSsmResourceDataSyncRead,
Delete: resourceAwsSsmResourceDataSyncDelete,

Schema: map[string]*schema.Schema{
"name": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"s3_destination": {
Type: schema.TypeList,
Required: true,
ForceNew: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"kms_key_arn": {
Type: schema.TypeString,
Optional: true,
},
"bucket_name": {
Type: schema.TypeString,
Required: true,
},
"prefix": {
Type: schema.TypeString,
Optional: true,
},
"region": {
Type: schema.TypeString,
Required: true,
},
"sync_format": {
Type: schema.TypeString,
Optional: true,
Default: ssm.ResourceDataSyncS3FormatJsonSerDe,
},
},
},
},
},
}
}

func resourceAwsSsmResourceDataSyncCreate(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*AWSClient).ssmconn

err := resource.Retry(1*time.Minute, func() *resource.RetryError {
input := &ssm.CreateResourceDataSyncInput{
S3Destination: expandSsmResourceDataSyncS3Destination(d),
SyncName: aws.String(d.Get("name").(string)),
}
_, err := conn.CreateResourceDataSync(input)
if err != nil {
if isAWSErr(err, ssm.ErrCodeResourceDataSyncInvalidConfigurationException, "S3 write failed for bucket") {
return resource.RetryableError(err)
}
return resource.NonRetryableError(err)
}
return nil
})

if err != nil {
return err
}

d.SetId(d.Get("name").(string))
return resourceAwsSsmResourceDataSyncRead(d, meta)
}

func resourceAwsSsmResourceDataSyncRead(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*AWSClient).ssmconn

syncItem, err := findResourceDataSyncItem(conn, d.Get("name").(string))
if err != nil {
return err
}
if syncItem == nil {
d.SetId("")
return nil
}
d.Set("s3_destination", flattenSsmResourceDataSyncS3Destination(syncItem.S3Destination))
return nil
}

func resourceAwsSsmResourceDataSyncDelete(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*AWSClient).ssmconn

input := &ssm.DeleteResourceDataSyncInput{
SyncName: aws.String(d.Get("name").(string)),
}

_, err := conn.DeleteResourceDataSync(input)
if err != nil {
if isAWSErr(err, ssm.ErrCodeResourceDataSyncNotFoundException, "") {
return nil
}
return err
}
return nil
}

func findResourceDataSyncItem(conn *ssm.SSM, name string) (*ssm.ResourceDataSyncItem, error) {
nextToken := ""
for {
input := &ssm.ListResourceDataSyncInput{}
if nextToken != "" {
input.NextToken = aws.String(nextToken)
}
resp, err := conn.ListResourceDataSync(input)
if err != nil {
return nil, err
}
for _, v := range resp.ResourceDataSyncItems {
if *v.SyncName == name {
return v, nil
}
}
if resp.NextToken == nil {
break
}
nextToken = *resp.NextToken
}
return nil, nil
}

func flattenSsmResourceDataSyncS3Destination(dest *ssm.ResourceDataSyncS3Destination) []interface{} {
result := make(map[string]interface{})
result["bucket_name"] = *dest.BucketName
result["region"] = *dest.Region
result["sync_format"] = *dest.SyncFormat
if dest.AWSKMSKeyARN != nil {
result["kms_key_arn"] = *dest.AWSKMSKeyARN
}
if dest.Prefix != nil {
result["prefix"] = *dest.Prefix
}
return []interface{}{result}
}

func expandSsmResourceDataSyncS3Destination(d *schema.ResourceData) *ssm.ResourceDataSyncS3Destination {
raw := d.Get("s3_destination").([]interface{})[0].(map[string]interface{})
s3dest := &ssm.ResourceDataSyncS3Destination{
BucketName: aws.String(raw["bucket_name"].(string)),
Region: aws.String(raw["region"].(string)),
SyncFormat: aws.String(raw["sync_format"].(string)),
}
if v, ok := raw["kms_key_arn"].(string); ok && v != "" {
s3dest.AWSKMSKeyARN = aws.String(v)
}
if v, ok := raw["prefix"].(string); ok && v != "" {
s3dest.Prefix = aws.String(v)
}
return s3dest
}
108 changes: 108 additions & 0 deletions aws/resource_aws_ssm_resource_data_sync_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
package aws

import (
"fmt"
"log"
"testing"

"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
)

func TestAccAWSSsmResourceDataSync_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSSsmResourceDataSyncDestroy,
Steps: []resource.TestStep{
{
Config: testAccSsmResourceDataSyncConfig(acctest.RandInt(), acctest.RandString(5)),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSSsmResourceDataSyncExists("aws_ssm_resource_data_sync.foo"),
),
},
},
})
}

func testAccCheckAWSSsmResourceDataSyncDestroy(s *terraform.State) error {
conn := testAccProvider.Meta().(*AWSClient).ssmconn

for _, rs := range s.RootModule().Resources {
if rs.Type != "aws_ssm_resource_data_sync" {
continue
}
syncItem, err := findResourceDataSyncItem(conn, rs.Primary.Attributes["name"])
if err != nil {
return err
}
if syncItem != nil {
return fmt.Errorf("Resource Data Sync (%s) found", rs.Primary.Attributes["name"])
}
}
return nil
}

func testAccCheckAWSSsmResourceDataSyncExists(name string) resource.TestCheckFunc {
return func(s *terraform.State) error {
log.Println(s.RootModule().Resources)
_, ok := s.RootModule().Resources[name]
if !ok {
return fmt.Errorf("Not found: %s", name)
}
return nil
}
}

func testAccSsmResourceDataSyncConfig(rInt int, rName string) string {
return fmt.Sprintf(`
resource "aws_s3_bucket" "hoge" {
bucket = "tf-test-bucket-%d"
region = "us-west-2"
force_destroy = true
}
resource "aws_s3_bucket_policy" "hoge" {
bucket = "${aws_s3_bucket.hoge.bucket}"
policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SSMBucketPermissionsCheck",
"Effect": "Allow",
"Principal": {
"Service": "ssm.amazonaws.com"
},
"Action": "s3:GetBucketAcl",
"Resource": "arn:aws:s3:::tf-test-bucket-%d"
},
{
"Sid": " SSMBucketDelivery",
"Effect": "Allow",
"Principal": {
"Service": "ssm.amazonaws.com"
},
"Action": "s3:PutObject",
"Resource": ["arn:aws:s3:::tf-test-bucket-%d/*"],
"Condition": {
"StringEquals": {
"s3:x-amz-acl": "bucket-owner-full-control"
}
}
}
]
}
EOF
}
resource "aws_ssm_resource_data_sync" "foo" {
name = "tf-test-ssm-%s"
s3_destination = {
bucket_name = "${aws_s3_bucket.hoge.bucket}"
region = "${aws_s3_bucket.hoge.region}"
}
}
`, rInt, rInt, rInt, rName)
}
4 changes: 4 additions & 0 deletions website/aws.erb
Original file line number Diff line number Diff line change
Expand Up @@ -1487,6 +1487,10 @@
<a href="/docs/providers/aws/r/ssm_parameter.html">aws_ssm_parameter</a>
</li>

<li<%= sidebar_current("docs-aws-resource-ssm-resource-data-sync") %>>
<a href="/docs/providers/aws/r/ssm_resource_data_sync.html">aws_ssm_resource_data_sync</a>
</li>

</ul>
</li>

Expand Down
79 changes: 79 additions & 0 deletions website/docs/r/ssm_resource_data_sync.html.markdown
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
---
layout: "aws"
page_title: "AWS: aws_ssm_resource_data_sync"
sidebar_current: "docs-aws-resource-ssm-resource-data-sync"
description: |-
Provides a SSM resource data sync.
---

# aws_athena_database

Provides a SSM resource data sync.

## Example Usage

```hcl
resource "aws_s3_bucket" "hoge" {
bucket = "tf-test-bucket-1234"
region = "us-east-1"
}
resource "aws_s3_bucket_policy" "hoge" {
bucket = "${aws_s3_bucket.hoge.bucket}"
policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SSMBucketPermissionsCheck",
"Effect": "Allow",
"Principal": {
"Service": "ssm.amazonaws.com"
},
"Action": "s3:GetBucketAcl",
"Resource": "arn:aws:s3:::tf-test-bucket-1234"
},
{
"Sid": " SSMBucketDelivery",
"Effect": "Allow",
"Principal": {
"Service": "ssm.amazonaws.com"
},
"Action": "s3:PutObject",
"Resource": ["arn:aws:s3:::tf-test-bucket-1234/*"],
"Condition": {
"StringEquals": {
"s3:x-amz-acl": "bucket-owner-full-control"
}
}
}
]
}
EOF
}
resource "aws_ssm_resource_data_sync" "foo" {
name = "foo"
s3_destination = {
bucket_name = "${aws_s3_bucket.hoge.bucket}"
region = "${aws_s3_bucket.hoge.region}"
}
}
```

## Argument Reference

The following arguments are supported:

* `name` - (Required) Name for the configuration.
* `s3_destination` - (Required) Amazon S3 configuration details for the sync.

## s3_destination

`s3_destination` supports the following:

* `bucket_name` - (Required) Name of S3 bucket where the aggregated data is stored.
* `region` - (Required) Region with the bucket targeted by the Resource Data Sync.
* `kms_key_arn` - (Optional) ARN of an encryption key for a destination in Amazon S3.
* `prefix` - (Optional) Prefix for the bucket.
* `sync_format` - (Optional) A supported sync format. Only JsonSerDe is currently supported. Defaults to JsonSerDe.

0 comments on commit a28808b

Please sign in to comment.