Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

BigQuery: support encryptionConfiguration in google_bigquery_data_transfer_config #8045

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .changelog/11478.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
bigquery: added `encryption_configuration` field to `google_bigquery_data_transfer_config` resource
```
Original file line number Diff line number Diff line change
Expand Up @@ -176,6 +176,21 @@ email address of the user who owns this transfer config.`,
},
},
},
"encryption_configuration": {
Type: schema.TypeList,
Optional: true,
Description: `Represents the encryption configuration for a transfer.`,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"kms_key_name": {
Type: schema.TypeString,
Required: true,
Description: `The name of the KMS key used for encrypting BigQuery data.`,
},
},
},
},
"location": {
Type: schema.TypeString,
Optional: true,
Expand Down Expand Up @@ -348,6 +363,12 @@ func resourceBigqueryDataTransferConfigCreate(d *schema.ResourceData, meta inter
} else if v, ok := d.GetOkExists("data_refresh_window_days"); !tpgresource.IsEmptyValue(reflect.ValueOf(dataRefreshWindowDaysProp)) && (ok || !reflect.DeepEqual(v, dataRefreshWindowDaysProp)) {
obj["dataRefreshWindowDays"] = dataRefreshWindowDaysProp
}
encryptionConfigurationProp, err := expandBigqueryDataTransferConfigEncryptionConfiguration(d.Get("encryption_configuration"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("encryption_configuration"); !tpgresource.IsEmptyValue(reflect.ValueOf(encryptionConfigurationProp)) && (ok || !reflect.DeepEqual(v, encryptionConfigurationProp)) {
obj["encryptionConfiguration"] = encryptionConfigurationProp
}
disabledProp, err := expandBigqueryDataTransferConfigDisabled(d.Get("disabled"), d, config)
if err != nil {
return err
Expand Down Expand Up @@ -516,6 +537,9 @@ func resourceBigqueryDataTransferConfigRead(d *schema.ResourceData, meta interfa
if err := d.Set("data_refresh_window_days", flattenBigqueryDataTransferConfigDataRefreshWindowDays(res["dataRefreshWindowDays"], d, config)); err != nil {
return fmt.Errorf("Error reading Config: %s", err)
}
if err := d.Set("encryption_configuration", flattenBigqueryDataTransferConfigEncryptionConfiguration(res["encryptionConfiguration"], d, config)); err != nil {
return fmt.Errorf("Error reading Config: %s", err)
}
if err := d.Set("disabled", flattenBigqueryDataTransferConfigDisabled(res["disabled"], d, config)); err != nil {
return fmt.Errorf("Error reading Config: %s", err)
}
Expand Down Expand Up @@ -584,6 +608,12 @@ func resourceBigqueryDataTransferConfigUpdate(d *schema.ResourceData, meta inter
} else if v, ok := d.GetOkExists("data_refresh_window_days"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, dataRefreshWindowDaysProp)) {
obj["dataRefreshWindowDays"] = dataRefreshWindowDaysProp
}
encryptionConfigurationProp, err := expandBigqueryDataTransferConfigEncryptionConfiguration(d.Get("encryption_configuration"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("encryption_configuration"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, encryptionConfigurationProp)) {
obj["encryptionConfiguration"] = encryptionConfigurationProp
}
disabledProp, err := expandBigqueryDataTransferConfigDisabled(d.Get("disabled"), d, config)
if err != nil {
return err
Expand Down Expand Up @@ -835,6 +865,23 @@ func flattenBigqueryDataTransferConfigDataRefreshWindowDays(v interface{}, d *sc
return v // let terraform core handle it otherwise
}

func flattenBigqueryDataTransferConfigEncryptionConfiguration(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} {
if v == nil {
return nil
}
original := v.(map[string]interface{})
if len(original) == 0 {
return nil
}
transformed := make(map[string]interface{})
transformed["kms_key_name"] =
flattenBigqueryDataTransferConfigEncryptionConfigurationKmsKeyName(original["kmsKeyName"], d, config)
return []interface{}{transformed}
}
func flattenBigqueryDataTransferConfigEncryptionConfigurationKmsKeyName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} {
return v
}

func flattenBigqueryDataTransferConfigDisabled(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} {
return v
}
Expand Down Expand Up @@ -945,6 +992,29 @@ func expandBigqueryDataTransferConfigDataRefreshWindowDays(v interface{}, d tpgr
return v, nil
}

func expandBigqueryDataTransferConfigEncryptionConfiguration(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 || l[0] == nil {
return nil, nil
}
raw := l[0]
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})

transformedKmsKeyName, err := expandBigqueryDataTransferConfigEncryptionConfigurationKmsKeyName(original["kms_key_name"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedKmsKeyName); val.IsValid() && !tpgresource.IsEmptyValue(val) {
transformed["kmsKeyName"] = transformedKmsKeyName
}

return transformed, nil
}

func expandBigqueryDataTransferConfigEncryptionConfigurationKmsKeyName(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {
return v, nil
}

func expandBigqueryDataTransferConfigDisabled(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) {
return v, nil
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -386,6 +386,29 @@ func testAccBigqueryDataTransferConfig_scheduledQuery_update(t *testing.T) {
})
}

func testAccBigqueryDataTransferConfig_CMEK(t *testing.T) {
// Uses time.Now
acctest.SkipIfVcr(t)
random_suffix := acctest.RandString(t, 10)

acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigqueryDataTransferConfig_CMEK_basic(random_suffix),
},
{
ResourceName: "google_bigquery_data_transfer_config.query_config",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"location"},
},
},
})
}

func testAccBigqueryDataTransferConfig_scheduledQuery_no_destination(t *testing.T) {
// Uses time.Now
acctest.SkipIfVcr(t)
Expand Down Expand Up @@ -770,6 +793,74 @@ resource "google_bigquery_data_transfer_config" "copy_config" {
`, random_suffix, random_suffix, random_suffix)
}

func testAccBigqueryDataTransferConfig_CMEK_basic(random_suffix string) string {
return fmt.Sprintf(`
data "google_project" "project" {
}

resource "google_kms_key_ring" "example_keyring" {
name = "keyring-test-%s"
location = "us-central1"
}

resource "google_kms_crypto_key" "example_crypto_key" {
name = "crypto-key-%s"
key_ring = google_kms_key_ring.example_keyring.id
purpose = "ENCRYPT_DECRYPT"
}

resource "google_service_account" "bqwriter%s" {
account_id = "bqwriter%s"
}

resource "google_project_iam_member" "data_editor" {
project = data.google_project.project.project_id

role = "roles/bigquery.dataEditor"
member = "serviceAccount:${google_service_account.bqwriter%s.email}"
}

data "google_iam_policy" "owner" {
binding {
role = "roles/bigquery.dataOwner"

members = [
"serviceAccount:${google_service_account.bqwriter%s.email}",
]
}
}

resource "google_bigquery_dataset_iam_policy" "dataset" {
dataset_id = google_bigquery_dataset.my_dataset.dataset_id
policy_data = data.google_iam_policy.owner.policy_data
}

resource "google_bigquery_data_transfer_config" "query_config" {
depends_on = [ google_kms_crypto_key.example_crypto_key ]
encryption_configuration {
kms_key_name = google_kms_crypto_key.example_crypto_key.id
}
display_name = "my-query-%s"
location = "us-central1"
data_source_id = "scheduled_query"
schedule = "first sunday of quarter 00:00"
destination_dataset_id = google_bigquery_dataset.my_dataset.dataset_id
params = {
destination_table_name_template = "my_table"
write_disposition = "WRITE_APPEND"
query = "SELECT name FROM table WHERE x = 'y'"
}
}

resource "google_bigquery_dataset" "my_dataset" {
dataset_id = "my_dataset_%s"
friendly_name = "foo"
description = "bar"
location = "us-central1"
}
`, random_suffix, random_suffix, random_suffix, random_suffix, random_suffix, random_suffix, random_suffix, random_suffix)
}

func testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, path, table string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "dataset" {
Expand Down
62 changes: 62 additions & 0 deletions website/docs/r/bigquery_data_transfer_config.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,57 @@ resource "google_bigquery_dataset" "my_dataset" {
location = "asia-northeast1"
}
```
## Example Usage - Bigquerydatatransfer Config Cmek


```hcl
data "google_project" "project" {
}

resource "google_project_iam_member" "permissions" {
project = data.google_project.project.project_id
role = "roles/iam.serviceAccountTokenCreator"
member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com"
}

resource "google_bigquery_data_transfer_config" "query_config_cmek" {
depends_on = [google_project_iam_member.permissions]

display_name = ""
location = "asia-northeast1"
data_source_id = "scheduled_query"
schedule = "first sunday of quarter 00:00"
destination_dataset_id = google_bigquery_dataset.my_dataset.dataset_id
params = {
destination_table_name_template = "my_table"
write_disposition = "WRITE_APPEND"
query = "SELECT name FROM tabl WHERE x = 'y'"
}

encryption_configuration {
kms_key_name = google_kms_crypto_key.crypto_key.id
}
}

resource "google_bigquery_dataset" "my_dataset" {
depends_on = [google_project_iam_member.permissions]

dataset_id = "example_dataset"
friendly_name = "foo"
description = "bar"
location = "asia-northeast1"
}

resource "google_kms_crypto_key" "crypto_key" {
name = "example-key"
key_ring = google_kms_key_ring.key_ring.id
}

resource "google_kms_key_ring" "key_ring" {
name = "example-keyring"
location = "us"
}
```
## Example Usage - Bigquerydatatransfer Config Salesforce


Expand Down Expand Up @@ -161,6 +212,11 @@ The following arguments are supported:
just [today-1]. Only valid if the data source supports the feature.
Set the value to 0 to use the default value.

* `encryption_configuration` -
(Optional)
Represents the encryption configuration for a transfer.
Structure is [documented below](#nested_encryption_configuration).

* `disabled` -
(Optional)
When set to true, no runs are scheduled for a given transfer.
Expand Down Expand Up @@ -221,6 +277,12 @@ The following arguments are supported:
(Required)
If true, email notifications will be sent on transfer run failures.

<a name="nested_encryption_configuration"></a>The `encryption_configuration` block supports:

* `kms_key_name` -
(Required)
The name of the KMS key used for encrypting BigQuery data.

<a name="nested_sensitive_params"></a>The `sensitive_params` block supports:

* `secret_access_key` -
Expand Down