From 8852674ce64dc7e5c404b4372c56dd5a92f64215 Mon Sep 17 00:00:00 2001 From: Akram Hussein Date: Thu, 20 Dec 2018 23:14:41 +0000 Subject: [PATCH] [Terraform]: Add resource_storage_transfer_job --- .../resource_storage_transfer_job.go | 730 ++++++++++++++++++ .../resource_storage_transfer_job_test.go | 162 ++++ third_party/terraform/utils/provider.go.erb | 1 + third_party/terraform/utils/validation.go | 18 + .../docs/r/storage_transfer_job.html.markdown | 216 ++++++ third_party/terraform/website/google.erb | 9 + 6 files changed, 1136 insertions(+) create mode 100644 third_party/terraform/resources/resource_storage_transfer_job.go create mode 100644 third_party/terraform/tests/resource_storage_transfer_job_test.go create mode 100644 third_party/terraform/website/docs/r/storage_transfer_job.html.markdown diff --git a/third_party/terraform/resources/resource_storage_transfer_job.go b/third_party/terraform/resources/resource_storage_transfer_job.go new file mode 100644 index 000000000000..5e9455533cc7 --- /dev/null +++ b/third_party/terraform/resources/resource_storage_transfer_job.go @@ -0,0 +1,730 @@ +package google + +import ( + "fmt" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/helper/schema" + "github.com/hashicorp/terraform/helper/validation" + "google.golang.org/api/googleapi" + "google.golang.org/api/storagetransfer/v1" + "log" + "strings" + "time" +) + +func resourceStorageTransferJob() *schema.Resource { + return &schema.Resource{ + Create: resourceStorageTransferJobCreate, + Read: resourceStorageTransferJobRead, + Update: resourceStorageTransferJobUpdate, + Delete: resourceStorageTransferJobDelete, + Importer: &schema.ResourceImporter{ + State: resourceStorageTransferJobStateImporter, + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + "description": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringLenBetween(0, 1024), + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, + "transfer_spec": { + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "object_conditions": objectConditionsSchema(), + "transfer_options": transferOptionsSchema(), + "gcs_data_sink": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: gcsDataSchema(), + }, + "gcs_data_source": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: gcsDataSchema(), + ConflictsWith: []string{"transfer_spec.aws_s3_data_source", "transfer_spec.http_data_source"}, + }, + "aws_s3_data_source": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: awsS3DataSchema(), + ConflictsWith: []string{"transfer_spec.gcs_data_source", "transfer_spec.http_data_source"}, + }, + "http_data_source": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: httpDataSchema(), + ConflictsWith: []string{"transfer_spec.aws_s3_data_source", "transfer_spec.gcs_data_source"}, + }, + }, + }, + }, + "schedule": { + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "schedule_start_date": { + Type: schema.TypeList, + Required: true, + ForceNew: true, + MaxItems: 1, + Elem: dateObjectSchema(), + }, + "schedule_end_date": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Elem: dateObjectSchema(), + }, + "start_time_of_day": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + MaxItems: 1, + Elem: timeObjectSchema(), + }, + }, + }, + }, + "status": { + Type: schema.TypeString, + Optional: true, + Default: "ENABLED", + ValidateFunc: validation.StringInSlice([]string{"ENABLED", "DISABLED", "DELETED"}, false), + }, + "creation_time": { + Type: schema.TypeString, + Computed: true, + }, + "last_modification_time": { + Type: schema.TypeString, + Computed: true, + }, + "deletion_time": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func objectConditionsSchema() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "min_time_elapsed_since_last_modification": { + Type: schema.TypeString, + ValidateFunc: validateDuration(), + Optional: true, + }, + "max_time_elapsed_since_last_modification": { + Type: schema.TypeString, + ValidateFunc: validateDuration(), + Optional: true, + }, + "include_prefixes": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + MaxItems: 1000, + Type: schema.TypeString, + }, + }, + "exclude_prefixes": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + MaxItems: 1000, + Type: schema.TypeString, + }, + }, + }, + }, + } +} + +func transferOptionsSchema() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "overwrite_objects_already_existing_in_sink": { + Type: schema.TypeBool, + Optional: true, + }, + "delete_objects_unique_in_sink": { + Type: schema.TypeBool, + Optional: true, + ConflictsWith: []string{"transfer_spec.transfer_options.delete_objects_from_source_after_transfer"}, + }, + "delete_objects_from_source_after_transfer": { + Type: schema.TypeBool, + Optional: true, + ConflictsWith: []string{"transfer_spec.transfer_options.delete_objects_unique_in_sink"}, + }, + }, + }, + } +} + +func timeObjectSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "hours": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + ValidateFunc: validation.IntBetween(0, 24), + }, + "minutes": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + ValidateFunc: validation.IntBetween(0, 59), + }, + "seconds": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + ValidateFunc: validation.IntBetween(0, 60), + }, + "nanos": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + ValidateFunc: validation.IntBetween(0, 999999999), + }, + }, + } +} + +func dateObjectSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "year": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + ValidateFunc: validation.IntBetween(0, 9999), + }, + + "month": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + ValidateFunc: validation.IntBetween(1, 12), + }, + + "day": { + Type: schema.TypeInt, + Required: true, + ForceNew: true, + ValidateFunc: validation.IntBetween(0, 31), + }, + }, + } +} + +func gcsDataSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "bucket_name": { + Required: true, + Type: schema.TypeString, + }, + }, + } +} + +func awsS3DataSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "bucket_name": { + Required: true, + Type: schema.TypeString, + }, + "aws_access_key": { + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "access_key_id": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + }, + "secret_access_key": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + }, + }, + }, + }, + }, + } +} + +func httpDataSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "list_url": { + Type: schema.TypeString, + Required: true, + }, + }, + } +} + +func resourceStorageTransferJobCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + project, err := getProject(d, config) + if err != nil { + return err + } + + transferJob := &storagetransfer.TransferJob{ + Description: d.Get("description").(string), + ProjectId: project, + Status: d.Get("status").(string), + Schedule: expandTransferSchedules(d.Get("schedule").([]interface{})), + TransferSpec: expandTransferSpecs(d.Get("transfer_spec").([]interface{})), + } + + var res *storagetransfer.TransferJob + + err = retry(func() error { + res, err = config.clientStorageTransfer.TransferJobs.Create(transferJob).Do() + return err + }) + + if err != nil { + fmt.Printf("Error creating transfer job %v: %v", transferJob, err) + return err + } + + d.Set("name", res.Name) + + name := GetResourceNameFromSelfLink(res.Name) + d.SetId(fmt.Sprintf("%s/%s", project, name)) + + return resourceStorageTransferJobRead(d, meta) +} + +func resourceStorageTransferJobRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + project, err := getProject(d, config) + if err != nil { + return err + } + + name := d.Get("name").(string) + res, err := config.clientStorageTransfer.TransferJobs.Get(name).ProjectId(project).Do() + if err != nil { + return handleNotFoundError(err, d, fmt.Sprintf("Transfer Job %q", name)) + } + log.Printf("[DEBUG] Read transfer job: %v in project: %v \n\n", res.Name, res.ProjectId) + + d.Set("project", res.ProjectId) + d.Set("description", res.Description) + d.Set("status", res.Status) + d.Set("last_modification_time", res.LastModificationTime) + d.Set("creation_time", res.CreationTime) + d.Set("deletion_time", res.DeletionTime) + + err = d.Set("schedule", flattenTransferSchedule(res.Schedule)) + if err != nil { + return err + } + + d.Set("transfer_spec", flattenTransferSpec(res.TransferSpec, d)) + if err != nil { + return err + } + + return nil +} + +func resourceStorageTransferJobUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + project, err := getProject(d, config) + if err != nil { + return err + } + + transferJob := &storagetransfer.TransferJob{} + fieldMask := []string{} + + if d.HasChange("description") { + if v, ok := d.GetOk("description"); ok { + fieldMask = append(fieldMask, "description") + transferJob.Description = v.(string) + } + } + + if d.HasChange("status") { + if v, ok := d.GetOk("status"); ok { + fieldMask = append(fieldMask, "status") + transferJob.Status = v.(string) + } + } + + if d.HasChange("schedule") { + if v, ok := d.GetOk("schedule"); ok { + fieldMask = append(fieldMask, "schedule") + transferJob.Schedule = expandTransferSchedules(v.([]interface{})) + } + } + + if d.HasChange("transfer_spec") { + if v, ok := d.GetOk("transfer_spec"); ok { + fieldMask = append(fieldMask, "transfer_spec") + transferJob.TransferSpec = expandTransferSpecs(v.([]interface{})) + } + } + + updateRequest := &storagetransfer.UpdateTransferJobRequest{ + ProjectId: project, + TransferJob: transferJob, + } + + updateRequest.UpdateTransferJobFieldMask = strings.Join(fieldMask, ",") + + res, err := config.clientStorageTransfer.TransferJobs.Patch(d.Get("name").(string), updateRequest).Do() + if err != nil { + return err + } + + log.Printf("[DEBUG] Patched transfer job: %v\n\n", res.Name) + return nil +} + +func resourceStorageTransferJobDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + project, err := getProject(d, config) + if err != nil { + return err + } + + transferJobName := d.Get("name").(string) + + transferJob := &storagetransfer.TransferJob{ + Status: "DELETED", + } + + fieldMask := "status" + + updateRequest := &storagetransfer.UpdateTransferJobRequest{ + ProjectId: project, + TransferJob: transferJob, + } + + updateRequest.UpdateTransferJobFieldMask = fieldMask + + // Update transfer job with status set to DELETE + log.Printf("[DEBUG] Setting status to DELETE for: %v\n\n", transferJobName) + err = resource.Retry(1*time.Minute, func() *resource.RetryError { + _, err := config.clientStorageTransfer.TransferJobs.Patch(transferJobName, updateRequest).Do() + if err != nil { + return resource.RetryableError(err) + } + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 429 { + return resource.RetryableError(gerr) + } + return nil + }) + + if err != nil { + fmt.Printf("Error deleting transfer job %v: %v\n\n", transferJob, err) + return err + } + + log.Printf("[DEBUG] Deleted transfer job %v\n\n", transferJob) + + return nil +} + +func resourceStorageTransferJobStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + parts := strings.Split(d.Id(), "/") + switch len(parts) { + case 2: + d.Set("project", parts[0]) + d.Set("name", fmt.Sprintf("transferJobs/%s", parts[1])) + default: + return nil, fmt.Errorf("Invalid transfer job specifier. Expecting {projectId}/{transferJobName}") + } + return []*schema.ResourceData{d}, nil +} + +func expandDates(dates []interface{}) *storagetransfer.Date { + if len(dates) == 0 || dates[0] == nil { + return nil + } + + date := dates[0].([]interface{}) + return &storagetransfer.Date{ + Day: int64(extractFirstMapConfig(date)["day"].(int)), + Month: int64(extractFirstMapConfig(date)["month"].(int)), + Year: int64(extractFirstMapConfig(date)["year"].(int)), + } +} + +func flattenDate(date *storagetransfer.Date) []map[string]interface{} { + data := map[string]interface{}{ + "year": date.Year, + "month": date.Month, + "day": date.Day, + } + + return []map[string]interface{}{data} +} + +func expandTimeOfDays(times []interface{}) *storagetransfer.TimeOfDay { + if len(times) == 0 || times[0] == nil { + return nil + } + + time := times[0].([]interface{}) + return &storagetransfer.TimeOfDay{ + Hours: int64(extractFirstMapConfig(time)["hours"].(int)), + Minutes: int64(extractFirstMapConfig(time)["minutes"].(int)), + Seconds: int64(extractFirstMapConfig(time)["seconds"].(int)), + Nanos: int64(extractFirstMapConfig(time)["nanos"].(int)), + } +} + +func flattenTimeOfDay(timeOfDay *storagetransfer.TimeOfDay) []map[string]interface{} { + data := map[string]interface{}{ + "hours": timeOfDay.Hours, + "minutes": timeOfDay.Minutes, + "seconds": timeOfDay.Seconds, + "nanos": timeOfDay.Nanos, + } + + return []map[string]interface{}{data} +} + +func expandTransferSchedules(transferSchedules []interface{}) *storagetransfer.Schedule { + if len(transferSchedules) == 0 || transferSchedules[0] == nil { + return nil + } + + schedule := transferSchedules[0].(map[string]interface{}) + return &storagetransfer.Schedule{ + ScheduleStartDate: expandDates([]interface{}{schedule["schedule_start_date"]}), + ScheduleEndDate: expandDates([]interface{}{schedule["schedule_end_date"]}), + StartTimeOfDay: expandTimeOfDays([]interface{}{schedule["start_time_of_day"]}), + } +} + +func flattenTransferSchedule(transferSchedule *storagetransfer.Schedule) []map[string][]map[string]interface{} { + data := map[string][]map[string]interface{}{ + "schedule_start_date": flattenDate(transferSchedule.ScheduleStartDate), + } + + if transferSchedule.ScheduleEndDate != nil { + data["schedule_end_date"] = flattenDate(transferSchedule.ScheduleEndDate) + } + + if transferSchedule.StartTimeOfDay != nil { + data["start_time_of_day"] = flattenTimeOfDay(transferSchedule.StartTimeOfDay) + } + + return []map[string][]map[string]interface{}{data} +} + +func expandGcsData(gcsDatas []interface{}) *storagetransfer.GcsData { + if len(gcsDatas) == 0 || gcsDatas[0] == nil { + return nil + } + + gcsData := gcsDatas[0].(map[string]interface{}) + return &storagetransfer.GcsData{ + BucketName: gcsData["bucket_name"].(string), + } +} + +func flattenGcsData(gcsData *storagetransfer.GcsData) []map[string]interface{} { + data := map[string]interface{}{ + "bucket_name": gcsData.BucketName, + } + + return []map[string]interface{}{data} +} + +func expandAwsAccessKeys(awsAccessKeys []interface{}) *storagetransfer.AwsAccessKey { + if len(awsAccessKeys) == 0 || awsAccessKeys[0] == nil { + return nil + } + + awsAccessKey := awsAccessKeys[0].(map[string]interface{}) + return &storagetransfer.AwsAccessKey{ + AccessKeyId: awsAccessKey["access_key_id"].(string), + SecretAccessKey: awsAccessKey["secret_access_key"].(string), + } +} + +func flattenAwsAccessKeys(d *schema.ResourceData) []map[string]interface{} { + data := map[string]interface{}{ + "access_key_id": d.Get("transfer_spec.0.aws_s3_data_source.0.aws_access_key.0.access_key_id"), + "secret_access_key": d.Get("transfer_spec.0.aws_s3_data_source.0.aws_access_key.0.secret_access_key"), + } + + return []map[string]interface{}{data} +} + +func expandAwsS3Data(awsS3Datas []interface{}) *storagetransfer.AwsS3Data { + if len(awsS3Datas) == 0 || awsS3Datas[0] == nil { + return nil + } + + awsS3Data := awsS3Datas[0].(map[string]interface{}) + return &storagetransfer.AwsS3Data{ + BucketName: awsS3Data["bucket_name"].(string), + AwsAccessKey: expandAwsAccessKeys(awsS3Data["aws_access_key"].([]interface{})), + } +} + +func flattenAwsS3Data(awsS3Data *storagetransfer.AwsS3Data, d *schema.ResourceData) []map[string]interface{} { + data := map[string]interface{}{ + "bucket_name": awsS3Data.BucketName, + "aws_access_key": flattenAwsAccessKeys(d), + } + + return []map[string]interface{}{data} +} + +func expandHttpData(httpDatas []interface{}) *storagetransfer.HttpData { + if len(httpDatas) == 0 || httpDatas[0] == nil { + return nil + } + + httpData := httpDatas[0].(map[string]interface{}) + return &storagetransfer.HttpData{ + ListUrl: httpData["list_url"].(string), + } +} + +func flattenHttpData(httpData *storagetransfer.HttpData) []map[string]interface{} { + data := map[string]interface{}{ + "list_url": httpData.ListUrl, + } + + return []map[string]interface{}{data} +} + +func expandObjectConditions(conditions []interface{}) *storagetransfer.ObjectConditions { + if len(conditions) == 0 || conditions[0] == nil { + return nil + } + + condition := conditions[0].(map[string]interface{}) + return &storagetransfer.ObjectConditions{ + ExcludePrefixes: convertStringArr(condition["exclude_prefixes"].([]interface{})), + IncludePrefixes: convertStringArr(condition["include_prefixes"].([]interface{})), + MaxTimeElapsedSinceLastModification: condition["max_time_elapsed_since_last_modification"].(string), + MinTimeElapsedSinceLastModification: condition["min_time_elapsed_since_last_modification"].(string), + } +} + +func flattenObjectCondition(condition *storagetransfer.ObjectConditions) []map[string]interface{} { + data := map[string]interface{}{ + "exclude_prefixes": condition.ExcludePrefixes, + "include_prefixes": condition.IncludePrefixes, + "max_time_elapsed_since_last_modification": condition.MaxTimeElapsedSinceLastModification, + "min_time_elapsed_since_last_modification": condition.MinTimeElapsedSinceLastModification, + } + return []map[string]interface{}{data} +} + +func expandTransferOptions(options []interface{}) *storagetransfer.TransferOptions { + if len(options) == 0 || options[0] == nil { + return nil + } + + option := options[0].(map[string]interface{}) + return &storagetransfer.TransferOptions{ + DeleteObjectsFromSourceAfterTransfer: option["delete_objects_from_source_after_transfer"].(bool), + DeleteObjectsUniqueInSink: option["delete_objects_unique_in_sink"].(bool), + OverwriteObjectsAlreadyExistingInSink: option["overwrite_objects_already_existing_in_sink"].(bool), + } +} + +func flattenTransferOption(option *storagetransfer.TransferOptions) []map[string]interface{} { + data := map[string]interface{}{ + "delete_objects_from_source_after_transfer": option.DeleteObjectsFromSourceAfterTransfer, + "delete_objects_unique_in_sink": option.DeleteObjectsUniqueInSink, + "overwrite_objects_already_existing_in_sink": option.OverwriteObjectsAlreadyExistingInSink, + } + + return []map[string]interface{}{data} +} + +func expandTransferSpecs(transferSpecs []interface{}) *storagetransfer.TransferSpec { + if len(transferSpecs) == 0 || transferSpecs[0] == nil { + return nil + } + + transferSpec := transferSpecs[0].(map[string]interface{}) + return &storagetransfer.TransferSpec{ + GcsDataSink: expandGcsData(transferSpec["gcs_data_sink"].([]interface{})), + ObjectConditions: expandObjectConditions(transferSpec["object_conditions"].([]interface{})), + TransferOptions: expandTransferOptions(transferSpec["transfer_options"].([]interface{})), + GcsDataSource: expandGcsData(transferSpec["gcs_data_source"].([]interface{})), + AwsS3DataSource: expandAwsS3Data(transferSpec["aws_s3_data_source"].([]interface{})), + HttpDataSource: expandHttpData(transferSpec["http_data_source"].([]interface{})), + } +} + +func flattenTransferSpec(transferSpec *storagetransfer.TransferSpec, d *schema.ResourceData) []map[string][]map[string]interface{} { + + data := map[string][]map[string]interface{}{ + "gcs_data_sink": flattenGcsData(transferSpec.GcsDataSink), + } + + if transferSpec.ObjectConditions != nil { + data["object_conditions"] = flattenObjectCondition(transferSpec.ObjectConditions) + } + if transferSpec.TransferOptions != nil { + data["transfer_options"] = flattenTransferOption(transferSpec.TransferOptions) + } + if transferSpec.GcsDataSource != nil { + data["gcs_data_source"] = flattenGcsData(transferSpec.GcsDataSource) + } else if transferSpec.AwsS3DataSource != nil { + data["aws_s3_data_source"] = flattenAwsS3Data(transferSpec.AwsS3DataSource, d) + } else if transferSpec.HttpDataSource != nil { + data["http_data_source"] = flattenHttpData(transferSpec.HttpDataSource) + } + + return []map[string][]map[string]interface{}{data} +} diff --git a/third_party/terraform/tests/resource_storage_transfer_job_test.go b/third_party/terraform/tests/resource_storage_transfer_job_test.go new file mode 100644 index 000000000000..ac8fa64b5cc3 --- /dev/null +++ b/third_party/terraform/tests/resource_storage_transfer_job_test.go @@ -0,0 +1,162 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +func TestAccStorageTransferJob_basic(t *testing.T) { + t.Parallel() + + testDataSourceBucketName := acctest.RandString(10) + testDataSinkName := acctest.RandString(10) + testTransferJobDescription := acctest.RandString(10) + testUpdatedDataSourceBucketName := acctest.RandString(10) + testUpdatedDataSinkBucketName := acctest.RandString(10) + testUpdatedTransferJobDescription := acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccStorageTransferJobDestroy, + Steps: []resource.TestStep{ + { + Config: testAccStorageTransferJob_basic(getTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription), + }, + { + ResourceName: "google_storage_transfer_job.transfer_job", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccStorageTransferJob_basic(getTestProjectFromEnv(), testUpdatedDataSourceBucketName, testDataSinkName, testTransferJobDescription), + }, + { + ResourceName: "google_storage_transfer_job.transfer_job", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccStorageTransferJob_basic(getTestProjectFromEnv(), testUpdatedDataSourceBucketName, testUpdatedDataSinkBucketName, testTransferJobDescription), + }, + { + ResourceName: "google_storage_transfer_job.transfer_job", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccStorageTransferJob_basic(getTestProjectFromEnv(), testUpdatedDataSourceBucketName, testUpdatedDataSinkBucketName, testUpdatedTransferJobDescription), + }, + { + ResourceName: "google_storage_transfer_job.transfer_job", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccStorageTransferJob_basic(project string, dataSourceBucketName string, dataSinkBucketName string, transferJobDescription string) string { + return fmt.Sprintf(` +data "google_storage_transfer_project_service_account" "default" { + project = "%s" +} + +resource "google_storage_bucket" "data_source" { + name = "%s" + project = "%s" + force_destroy = true +} + +resource "google_storage_bucket_iam_member" "data_source" { + bucket = "${google_storage_bucket.data_source.name}" + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" +} + +resource "google_storage_bucket" "data_sink" { + name = "%s" + project = "%s" + force_destroy = true +} + +resource "google_storage_bucket_iam_member" "data_sink" { + bucket = "${google_storage_bucket.data_sink.name}" + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" +} + +resource "google_storage_transfer_job" "transfer_job" { + description = "%s" + project = "%s" + + transfer_spec { + gcs_data_source { + bucket_name = "${google_storage_bucket.data_source.name}" + } + gcs_data_sink { + bucket_name = "${google_storage_bucket.data_sink.name}" + } + } + + schedule { + schedule_start_date { + year = 2018 + month = 10 + day = 1 + } + schedule_end_date { + year = 2019 + month = 10 + day = 1 + } + start_time_of_day { + hours = 0 + minutes = 30 + seconds = 0 + nanos = 0 + } + } + + depends_on = [ + "google_storage_bucket_iam_member.data_source", + "google_storage_bucket_iam_member.data_sink", + ] +} +`, project, dataSourceBucketName, project, dataSinkBucketName, project, transferJobDescription, project) +} + +func testAccStorageTransferJobDestroy(s *terraform.State) error { + config := testAccProvider.Meta().(*Config) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_storage_transfer_job" { + continue + } + + rs_attr := rs.Primary.Attributes + name, ok := rs_attr["name"] + if !ok { + return fmt.Errorf("No name set") + } + + project, err := getTestProject(rs.Primary, config) + if err != nil { + return err + } + + res, err := config.clientStorageTransfer.TransferJobs.Get(name).ProjectId(project).Do() + if res.Status != "DELETED" { + return fmt.Errorf("Transfer Job not set to DELETED") + } + if err != nil { + return fmt.Errorf("Transfer Job does not exist, should exist and be DELETED") + } + } + + return nil +} diff --git a/third_party/terraform/utils/provider.go.erb b/third_party/terraform/utils/provider.go.erb index 090597fd26f8..08f7edde32f4 100644 --- a/third_party/terraform/utils/provider.go.erb +++ b/third_party/terraform/utils/provider.go.erb @@ -255,6 +255,7 @@ func ResourceMapWithErrors() (map[string]*schema.Resource, error) { "google_storage_object_acl": resourceStorageObjectAcl(), "google_storage_default_object_acl": resourceStorageDefaultObjectAcl(), "google_storage_notification": resourceStorageNotification(), + "google_storage_transfer_job": resourceStorageTransferJob(), }, ) } diff --git a/third_party/terraform/utils/validation.go b/third_party/terraform/utils/validation.go index b840b57ae9d7..ffd879529b80 100644 --- a/third_party/terraform/utils/validation.go +++ b/third_party/terraform/utils/validation.go @@ -6,6 +6,7 @@ import ( "regexp" "strconv" "strings" + "time" "github.com/hashicorp/terraform/helper/schema" "github.com/hashicorp/terraform/helper/validation" @@ -187,6 +188,23 @@ func validateProjectName() schema.SchemaValidateFunc { } } +func validateDuration() schema.SchemaValidateFunc { + return func(i interface{}, k string) (s []string, es []error) { + v, ok := i.(string) + if !ok { + es = append(es, fmt.Errorf("expected type of %s to be string", k)) + return + } + + if _, err := time.ParseDuration(v); err != nil { + es = append(es, fmt.Errorf("expected %s to be a duration, but parsing gave an error: %s", k, err.Error())) + return + } + + return + } +} + // StringNotInSlice returns a SchemaValidateFunc which tests if the provided value // is of type string and that it matches none of the element in the invalid slice. // if ignorecase is true, case is ignored. diff --git a/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown b/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown new file mode 100644 index 000000000000..a30df5f884f7 --- /dev/null +++ b/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown @@ -0,0 +1,216 @@ +--- +layout: "google" +page_title: "Google: google_storage_transfer_job" +sidebar_current: "docs-google-storage-transfer-job-x" +description: |- + Creates a new Transfer Job in Google Cloud Storage Transfer. +--- + +# google\_storage\_transfer\_job + +Creates a new Transfer Job in Google Cloud Storage Transfer. + +To get more information about Google Cloud Storage Transfer, see: + +* [Overview](https://cloud.google.com/storage-transfer/docs/overview) +* [API documentation](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs#TransferJob) +* How-to Guides + * [Configuring Access to Data Sources and Sinks](https://cloud.google.com/storage-transfer/docs/configure-access) + +## Example Usage + +Example creating a nightly Transfer Job from an AWS S3 Bucket to a GCS bucket. + +```hcl + +data "google_storage_transfer_project_service_account" "default" { + project = "${var.project}" +} + +resource "google_storage_bucket" "s3-backup-bucket" { + name = "${var.aws_s3_bucket}" + storage_class = "NEARLINE" + project = "${var.project}" +} + +resource "google_storage_bucket_iam_member" "s3-backup-bucket" { + bucket = "${var.aws_s3_bucket}" + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" + depends_on = [ + "google_storage_bucket.s3-backup-bucket" + ] +} + +resource "google_storage_transfer_job" "s3-bucket-nightly-backup" { + description = "Nightly backup of S3 bucket" + project = "${var.project}" + + transfer_spec { + object_conditions { + max_time_elapsed_since_last_modification = "600s" + exclude_prefixes = [ + "requests.gz" + ] + } + transfer_options { + delete_objects_unique_in_sink = false + } + aws_s3_data_source { + bucket_name = "${var.aws_s3_bucket}" + aws_access_key { + access_key_id = "${var.aws_access_key}" + secret_access_key = "${var.aws_secret_key}" + } + } + gcs_data_sink { + bucket_name = "${var.aws_s3_bucket}-backup" + } + } + + schedule { + schedule_start_date { + year = 2018 + month = 10 + day = 1 + } + schedule_end_date { + year = 2019 + month = 1 + day = 15 + } + start_time_of_day { + hours = 23 + minutes = 30 + seconds = 0 + nanos = 0 + } + } + + depends_on = [ + "google_storage_bucket_iam_member.s3-backup-bucket" + ] +} +``` + +## Argument Reference + +The following arguments are supported: + +* `description` - (Required) Unique description to identify the Transfer Job. + +* `transfer_spec` - (Required) Transfer specification. Structure documented below. + +* `schedule` - (Required) Schedule specification defining when the Transfer Job should be scheduled to start, end and and what time to run. Structure documented below. + +- - - + +* `project` - (Optional) The project in which the resource belongs. If it + is not provided, the provider project is used. + +* `status` - (Optional) Status of the job. Default: `ENABLED`. **NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.** + +The `transfer_spec` block supports: + +* `gcs_data_sink` - (Required) A Google Cloud Storage data sink. Structure documented below. + +* `object_conditions` - (Optional) Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects' `last_modification_time` do not exclude objects in a data sink. Structure documented below. + +* `transfer_options` - (Optional) Characteristics of how to treat files from datasource and sink during job. If the option `delete_objects_unique_in_sink` is true, object conditions based on objects' `last_modification_time` are ignored and do not exclude objects in a data source or a data sink. Structure documented below. + +* `gcs_data_source` - (Optional) A Google Cloud Storage data source. Structure documented below. + +* `aws_s3_data_source` - (Optional) An AWS S3 data source. Structure documented below. + +* `http_data_source` - (Optional) An HTTP URL data source. Structure documented below. + +The `schedule` block supports: + +* `schedule_start_date` - (Required) The first day the recurring transfer is scheduled to run. If `schedule_start_date` is in the past, the transfer will run for the first time on the following day. Structure documented below. + +* `schedule_end_date` - (Optional) The last day the recurring transfer will be run. If `schedule_end_date` is the same as `schedule_start_date`, the transfer will be executed only once. Structure documented below. + +* `start_time_of_day` - (Optional) The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone. Structure documented below. + +The `object_conditions` block supports: + +* `max_time_elapsed_since_last_modification` - (Optional) A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". + +* `min_time_elapsed_since_last_modification` - (Optional) +A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". + +* `include_prefixes` - (Optional) If `include_refixes` is specified, objects that satisfy the object conditions must have names that start with one of the `include_prefixes` and that do not start with any of the `exclude_prefixes`. If `include_prefixes` is not specified, all objects except those that have names starting with one of the `exclude_prefixes` must satisfy the object conditions. See [Requirements](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/TransferSpec#ObjectConditions). + +* `exclude_prefixes` - (Optional) `exclude_prefixes` must follow the requirements described for `include_prefixes`. See [Requirements](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/TransferSpec#ObjectConditions). + +The `transfer_options` block supports: + +* `overwrite_objects_already_existing_in_sink` - (Optional) Whether overwriting objects that already exist in the sink is allowed. + +* `delete_objects_unique_in_sink` - (Optional) Whether objects that exist only in the sink should be deleted. Note that this option and +`delete_objects_from_source_after_transfer` are mutually exclusive. + +* `delete_objects_from_source_after_transfer` - (Optional) Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and `delete_objects_unique_in_sink` are mutually exclusive. + +The `gcs_data_sink` block supports: + +* `bucket_name` - (Required) Google Cloud Storage bucket name. + +The `gcs_data_source` block supports: + +* `bucket_name` - (Required) Google Cloud Storage bucket name. + +The `aws_s3_data_source` block supports: + +* `bucket_name` - (Required) S3 Bucket name. + +* `aws_access_key` - (Required) AWS credentials block. + +The `aws_access_key` block supports: + +* `access_key_id` - (Required) AWS Key ID. + +* `secret_access_key` - (Required) AWS Secret Access Key. + +The `http_data_source` block supports: + +* `list_url` - (Required) The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported. + +The `schedule_start_date` and `schedule_end_date` blocks support: + +* `year` - (Required) Year of date. Must be from 1 to 9999. + +* `month` - (Required) Month of year. Must be from 1 to 12. + +* `day` - (Required) Day of month. Must be from 1 to 31 and valid for the year and month. + +The `schedule_start_date` blocks support: + +* `hours` - (Required) Hours of day in 24 hour format. Should be from 0 to 23 + +* `minutes` - (Required) Minutes of hour of day. Must be from 0 to 59. + +* `seconds` - (Optional) Seconds of minutes of the time. Must normally be from 0 to 59. + +* `nanos` - (Required) Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999. + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are +exported: + +* `name` - The name of the Transfer Job. + +* `creation_time` - When the Transfer Job was created. + +* `last_modification_time` - When the Transfer Job was last modified. + +* `deletion_time` - When the Transfer Job was deleted. + +## Import + +Storage buckets can be imported using the Transfer Job's `project` and `name` without the `transferJob/` prefix, e.g. + +``` +$ terraform import google_storage_transfer_job.nightly-backup-transfer-job my-project-1asd32/8422144862922355674 +``` diff --git a/third_party/terraform/website/google.erb b/third_party/terraform/website/google.erb index 328ecec2230e..c78c26675abc 100644 --- a/third_party/terraform/website/google.erb +++ b/third_party/terraform/website/google.erb @@ -823,6 +823,15 @@ + > + Google Storage Transfer Resources + + + > Google Key Management Service Resources