diff --git a/mmv1/products/bigquery/Table.yaml b/mmv1/products/bigquery/Table.yaml index 02eb4ac6a67f..b1dc8ab5a89a 100644 --- a/mmv1/products/bigquery/Table.yaml +++ b/mmv1/products/bigquery/Table.yaml @@ -550,3 +550,75 @@ properties: in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". + - name: externalCatalogTableOptions + type: NestedObject + description: | + Options defining open source compatible table. + min_version: beta + properties: + - name: 'parameters' + type: KeyValuePairs + description: | + A map of key value pairs defining the parameters and properties of the open source table. + Corresponds with hive meta store table parameters. Maximum size of 4Mib. + min_version: beta + - name: 'storageDescriptor' + type: NestedObject + description: | + A storage descriptor containing information about the physical storage of this table. + min_version: beta + properties: + - name: 'storageUri' + type: String + description: | + The physical location of the table (e.g. `gs://spark-dataproc-data/pangea-data/case_sensitive/` + or `gs://spark-dataproc-data/pangea-data/*`). The maximum length is 2056 bytes. + min_version: beta + - name: 'inputFormat' + type: String + description: | + Specifies the fully qualified class name of the InputFormat + (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). + The maximum length is 128 characters. + min_version: beta + - name: 'outputFormat' + type: String + description: | + Specifies the fully qualified class name of the OutputFormat + (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). + The maximum length is 128 characters. + min_version: beta + - name: 'serdeInfo' + type: NestedObject + description: | + Serializer and deserializer information. + min_version: beta + properties: + - name: 'name' + type: String + description: | + Name of the SerDe. The maximum length is 256 characters. + min_version: beta + - name: 'serializationLibrary' + type: String + description: | + Specifies a fully-qualified class name of the serialization library that is + responsible for the translation of data between table representation and the + underlying low-level input and output format structures. + The maximum length is 256 characters. + required: true + min_version: beta + - name: 'parameters' + type: KeyValuePairs + description: | + Key-value pairs that define the initialization parameters for the serialization + library. Maximum size 10 Kib. + min_version: beta + - name: 'connectionId' + type: String + description: | + The connection specifying the credentials to be used to read external storage, such as + Azure Blob, Cloud Storage, or S3. The connection is needed to read the open source table + from BigQuery Engine. The connection_id can have the form `..` + or `projects//locations//connections/`. + min_version: beta diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl similarity index 91% rename from mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go rename to mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl index 8528c5fb14d8..d1a0c7b1f18d 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl @@ -181,9 +181,9 @@ func bigQueryTableSchemaDiffSuppress(name, old, new string, _ *schema.ResourceDa func bigQueryTableConnectionIdSuppress(name, old, new string, _ *schema.ResourceData) bool { // API accepts connectionId in below two formats - // "{{project}}.{{location}}.{{connection_id}}" or - // "projects/{{project}}/locations/{{location}}/connections/{{connection_id}}". - // but always returns "{{project}}.{{location}}.{{connection_id}}" + // ".." or + // "projects/locations//connections/". + // but always returns ".." if tpgresource.IsEmptyValue(reflect.ValueOf(old)) || tpgresource.IsEmptyValue(reflect.ValueOf(new)) { return false @@ -861,13 +861,13 @@ func ResourceBigQueryTable() *schema.Resource { // ConnectionId: [Optional] The connection specifying the credentials // to be used to read external storage, such as Azure Blob, // Cloud Storage, or S3. The connectionId can have the form - // "{{project}}.{{location}}.{{connection_id}}" or - // "projects/{{project}}/locations/{{location}}/connections/{{connection_id}}". + // ".." or + // "projects//locations//connections/". "connection_id": { Type: schema.TypeString, Optional: true, DiffSuppressFunc: bigQueryTableConnectionIdSuppress, - Description: `The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connectionId can have the form "{{project}}.{{location}}.{{connection_id}}" or "projects/{{project}}/locations/{{location}}/connections/{{connection_id}}".`, + Description: `The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connectionId can have the form ".." or "projects//locations//connections/".`, }, "reference_file_schema_uri": { Type: schema.TypeString, @@ -1452,6 +1452,92 @@ func ResourceBigQueryTable() *schema.Resource { Elem: &schema.Schema{Type: schema.TypeString}, Description: `The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production".`, }, + {{- if ne $.TargetVersionName "ga" }} + // ExternalCatalogTableOptions: [Optional] Options defining open source compatible table. + "external_catalog_table_options": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: `Options defining open source compatible table.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + // Parameters: [Optional] The parameters of the table. + "parameters": { + Type: schema.TypeMap, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `A map of key value pairs defining the parameters and properties of the open source table. Corresponds with hive meta store table parameters. Maximum size of 4Mib.`, + }, + // StorageDescriptor: [Optional] A storage descriptor containing information about the physical storage of this table. + "storage_descriptor": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: `A storage descriptor containing information about the physical storage of this table.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + // LocationUri: [Optional] The physical location of the table (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-data/pangea-data/*'). The maximum length is 2056 bytes. + "location_uri": { + Type: schema.TypeString, + Optional: true, + Description: `The physical location of the table (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-data/pangea-data/*'). The maximum length is 2056 bytes.`, + }, + // InputFormat: [Optional] Specifies the fully qualified class name of the InputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). The maximum length is 128 characters. + "input_format": { + Type: schema.TypeString, + Optional: true, + Description: `Specifies the fully qualified class name of the InputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). The maximum length is 128 characters.`, + }, + // OutputFormat: [Optional] Specifies the fully qualified class name of the OutputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). The maximum length is 128 characters. + "output_format": { + Type: schema.TypeString, + Optional: true, + Description: `Specifies the fully qualified class name of the OutputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). The maximum length is 128 characters.`, + }, + // SerdeInfo: [Optional] Serializer and deserializer information. + "serde_info": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: `Serializer and deserializer information.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + // Name: [Optional] Name of the SerDe. The maximum length is 256 characters. + "name": { + Type: schema.TypeString, + Optional: true, + Description: `Name of the SerDe. The maximum length is 256 characters.`, + }, + // SerializationLibrary: [Required] Specifies a fully-qualified class name of the serialization library that is responsible for the translation of data between table representation and the underlying low-level input and output format structures. The maximum length is 256 characters. + "serialization_library": { + Type: schema.TypeString, + Required: true, + Description: `Specifies a fully-qualified class name of the serialization library that is responsible for the translation of data between table representation and the underlying low-level input and output format structures. The maximum length is 256 characters.`, + }, + // Parameters: [Optional] Key-value pairs that define the initialization parameters for the serialization library. Maximum size 10 Kib. + "parameters": { + Type: schema.TypeMap, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `Key-value pairs that define the initialization parameters for the serialization library. Maximum size 10 Kib.`, + }, + }, + }, + }, + }, + }, + }, + // ConnectionId: [Optional] The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connection is needed to read the open source table from BigQuery Engine. The connection_id can have the form `..` or `projects//locations//connections/`. + "connection_id": { + Type: schema.TypeString, + Optional: true, + DiffSuppressFunc: bigQueryTableConnectionIdSuppress, + Description: `The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connection is needed to read the open source table from BigQuery Engine. The connection_id can have the form .. or projects//locations//connections/.`, + }, + }, + }, + }, + {{- end }} }, UseJSONNumber: true, } @@ -1576,6 +1662,12 @@ func resourceTable(d *schema.ResourceData, meta interface{}) (*bigquery.Table, e table.ResourceTags = tpgresource.ExpandStringMap(d, "resource_tags") + {{- if ne $.TargetVersionName "ga" }} + if v, ok := d.GetOk("external_catalog_table_options"); ok { + table.ExternalCatalogTableOptions = expandExternalCatalogTableOptions(v) + } + + {{- end }} return table, nil } @@ -1868,7 +1960,7 @@ func resourceBigQueryTableRead(d *schema.ResourceData, meta interface{}) error { } // TODO: Update when the Get API fields for TableReplicationInfo are available in the client library. - url, err := tpgresource.ReplaceVars(d, config, "{{BigQueryBasePath}}projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}") + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}BigQueryBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/datasets/{{"{{"}}dataset_id{{"}}"}}/tables/{{"{{"}}table_id{{"}}"}}") if err != nil { return err } @@ -1893,6 +1985,16 @@ func resourceBigQueryTableRead(d *schema.ResourceData, meta interface{}) error { } } + {{- if ne $.TargetVersionName "ga" }} + if res.ExternalCatalogTableOptions != nil { + externalCatalogTableOptions := flattenExternalCatalogTableOptions(res.ExternalCatalogTableOptions) + + if err := d.Set("external_catalog_table_options", externalCatalogTableOptions); err != nil { + return fmt.Errorf("Error setting external_catalog_table_options: %s", err) + } + } + + {{- end }} return nil } @@ -2697,7 +2799,7 @@ func flattenEncryptionConfiguration(ec *bigquery.EncryptionConfiguration) []map[ } // The key name was returned, no need to set the version - return []map[string]interface{}{{"kms_key_name": ec.KmsKeyName, "kms_key_version": ""}} + return []map[string]interface{}{{"{{"}}"kms_key_name": ec.KmsKeyName, "kms_key_version": ""{{"}}"}} } func flattenTimePartitioning(tp *bigquery.TimePartitioning, use_old_rpf bool) []map[string]interface{} { @@ -3033,6 +3135,154 @@ func flattenTableReplicationInfo(tableReplicationInfo map[string]interface{}) [] return []map[string]interface{}{result} } +{{- if ne $.TargetVersionName "ga" }} +func expandExternalCatalogTableOptions(configured interface{}) *bigquery.ExternalCatalogTableOptions { + if len(configured.([]interface{})) == 0 { + return nil + } + + raw := configured.([]interface{})[0].(map[string]interface{}) + ecto := &bigquery.ExternalCatalogTableOptions{} + + if v, ok := raw["parameters"]; ok { + parameters := map[string]string{} + + for k, v := range v.(map[string]interface{}) { + parameters[k] = v.(string) + } + + ecto.Parameters = parameters + } + + if v, ok := raw["storage_descriptor"]; ok { + ecto.StorageDescriptor = expandStorageDescriptor(v) + } + + if v, ok := raw["connection_id"]; ok { + ecto.ConnectionId = v.(string) + } + + return ecto +} + +func flattenExternalCatalogTableOptions(ecto *bigquery.ExternalCatalogTableOptions) []map[string]interface{} { + if ecto == nil { + return nil + } + + result := map[string]interface{}{} + + if ecto.Parameters != nil { + result["parameters"] = ecto.Parameters + } + + if ecto.StorageDescriptor != nil { + result["storage_descriptor"] = flattenStorageDescriptor(ecto.StorageDescriptor) + } + + if ecto.ConnectionId != "" { + result["connection_id"] = ecto.ConnectionId + } + + return []map[string]interface{}{result} +} + +func expandStorageDescriptor(configured interface{}) *bigquery.StorageDescriptor { + if len(configured.([]interface{})) == 0 { + return nil + } + + raw := configured.([]interface{})[0].(map[string]interface{}) + sd := &bigquery.StorageDescriptor{} + + if v, ok := raw["location_uri"]; ok { + sd.LocationUri = v.(string) + } + + if v, ok := raw["input_format"]; ok { + sd.InputFormat = v.(string) + } + + if v, ok := raw["output_format"]; ok { + sd.OutputFormat = v.(string) + } + + if v, ok := raw["serde_info"]; ok { + sd.SerdeInfo = expandSerDeInfo(v) + } + + return sd +} + +func flattenStorageDescriptor(sd *bigquery.StorageDescriptor) []map[string]interface{} { + if sd == nil { + return nil + } + + result := map[string]interface{}{} + + if sd.LocationUri != "" { + result["location_uri"] = sd.LocationUri + } + + if sd.InputFormat != "" { + result["input_format"] = sd.InputFormat + } + + if sd.OutputFormat != "" { + result["output_format"] = sd.OutputFormat + } + + if sd.SerdeInfo != nil { + result["serde_info"] = flattenSerDeInfo(sd.SerdeInfo) + } + + return []map[string]interface{}{result} +} + +func expandSerDeInfo(configured interface{}) *bigquery.SerDeInfo { + if len(configured.([]interface{})) == 0 { + return nil + } + + raw := configured.([]interface{})[0].(map[string]interface{}) + si := &bigquery.SerDeInfo{SerializationLibrary: raw["serialization_library"].(string)} + + if v, ok := raw["name"]; ok { + si.Name = v.(string) + } + + if v, ok := raw["parameters"]; ok { + parameters := map[string]string{} + + for k, v := range v.(map[string]interface{}) { + parameters[k] = v.(string) + } + + si.Parameters = parameters + } + + return si +} + +func flattenSerDeInfo(si *bigquery.SerDeInfo) []map[string]interface{} { + if si == nil { + return nil + } + + result := map[string]interface{}{"serialization_library": si.SerializationLibrary} + + if si.Name != "" { + result["name"] = si.Name + } + + if si.Parameters != nil { + result["parameters"] = si.Parameters + } + + return []map[string]interface{}{result} +} +{{- end }} func resourceBigQueryTableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ @@ -3049,7 +3299,7 @@ func resourceBigQueryTableImport(d *schema.ResourceData, meta interface{}) ([]*s } // Replace import id for the resource id - id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}") + id, err := tpgresource.ReplaceVars(d, config, "projects/{{"{{"}}project{{"}}"}}/datasets/{{"{{"}}dataset_id{{"}}"}}/tables/{{"{{"}}table_id{{"}}"}}") if err != nil { return nil, fmt.Errorf("Error constructing id: %s", err) } diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go.tmpl similarity index 97% rename from mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go rename to mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go.tmpl index 4c7198963057..0b5f746bc7a7 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go.tmpl @@ -1814,6 +1814,45 @@ func TestAccBigQueryTable_ResourceTags(t *testing.T) { }) } +{{- if ne $.TargetVersionName "ga" }} +func TestAccBigQueryTable_externalCatalogTableOptions(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "dataset_id": fmt.Sprintf("tf_test_dataset_%s", acctest.RandString(t, 10)), + "table_id": fmt.Sprintf("tf_test_table_%s", acctest.RandString(t, 10)), + "connection_id": fmt.Sprintf("tf_test_connection_%s", acctest.RandString(t, 10)), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigQueryTable_externalCatalogTableOptions_basic(context), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccBigQueryTable_externalCatalogTableOptions_update(context), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +{{- end }} func testAccCheckBigQueryExtData(t *testing.T, expectedQuoteChar string) resource.TestCheckFunc { return func(s *terraform.State) error { for _, rs := range s.RootModule().Resources { @@ -4515,6 +4554,115 @@ resource "google_bigquery_table" "test" { `, context) } +{{- if ne $.TargetVersionName "ga" }} +func testAccBigQueryTable_externalCatalogTableOptions_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_bigquery_dataset" "test" { + provider = google-beta + + dataset_id = "%{dataset_id}" + location = "EU" +} + +resource "google_bigquery_connection" "test" { + provider = google-beta + + connection_id = "%{connection_id}" + location = "EU" + cloud_resource {} +} + +resource "google_bigquery_table" "test" { + provider = google-beta + + deletion_protection = false + dataset_id = "${google_bigquery_dataset.test.dataset_id}" + table_id = "%{table_id}" + + schema = <The `external_data_configuration` block supports: * `autodetect` - (Required) Let BigQuery try to autodetect the schema @@ -500,6 +505,67 @@ The following arguments are supported: * `table_format` - (Required) The table format the metadata only snapshots are stored in. +The `external_catalog_table_options` block supports: + +* `parameters` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + A map of key value pairs defining the parameters and properties of the open + source table. Corresponds with hive meta store table parameters. Maximum size + of 4Mib. +* `storage_descriptor` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + A storage descriptor containing information about the physical storage of this + table. + Structure is [documented below](#nested_storage_descriptor). +* `connection_id` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + The connection specifying the credentials to be used to read external storage, + such as Azure Blob, Cloud Storage, or S3. The connection is needed to read the + open source table from BigQuery Engine. The connection_id can have the form + `..` or `projects//locations//connections/`. + +The `storage_descriptor` block supports: + +* `location_uri` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + The physical location of the table (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-data/pangea-data/*'). + The maximum length is 2056 bytes. + +* `input_format` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + Specifies the fully qualified class name of the InputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). + The maximum length is 128 characters. + +* `output_format` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + Specifies the fully qualified class name of the OutputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). + The maximum length is 128 characters. + +* `serde_info` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + Serializer and deserializer information. + Structure is [documented below](#nested_serde_info). + +The `serde_info` block supports: + +* `name` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + Name of the SerDe. + The maximum length is 256 characters. + +* `serialization_library` - (Required, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + Specifies a fully-qualified class name of the serialization library that is + responsible for the translation of data between table representation and the + underlying low-level input and output format structures. + The maximum length is 256 characters. + +* `parameters` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + Key-value pairs that define the initialization parameters for the + serialization library. + Maximum size 10 Kib. + ## Attributes Reference In addition to the arguments listed above, the following computed attributes are