From de59d37cb93151c5965a61b1be061de758c1a222 Mon Sep 17 00:00:00 2001 From: Modular Magician Date: Wed, 18 Dec 2024 16:29:55 +0000 Subject: [PATCH] Add external catalog table options to google_bigquery_table beta (#12528) [upstream:bb906e00f7df7b854ca52882d6f860cfc5d1bf6e] Signed-off-by: Modular Magician --- .changelog/12528.txt | 3 + .../bigquery/resource_bigquery_table.go | 15 ++--- .../bigquery/resource_bigquery_table_test.go | 1 - website/docs/r/bigquery_table.html.markdown | 66 +++++++++++++++++++ 4 files changed, 75 insertions(+), 10 deletions(-) create mode 100644 .changelog/12528.txt diff --git a/.changelog/12528.txt b/.changelog/12528.txt new file mode 100644 index 00000000000..61a6765abf9 --- /dev/null +++ b/.changelog/12528.txt @@ -0,0 +1,3 @@ +```release-note:enhancement +bigquery: added `external_catalog_table_options` field to `google_bigquery_table` resource (beta) +``` \ No newline at end of file diff --git a/google/services/bigquery/resource_bigquery_table.go b/google/services/bigquery/resource_bigquery_table.go index 19e5e2d62cc..91348be74cf 100644 --- a/google/services/bigquery/resource_bigquery_table.go +++ b/google/services/bigquery/resource_bigquery_table.go @@ -183,9 +183,9 @@ func bigQueryTableSchemaDiffSuppress(name, old, new string, _ *schema.ResourceDa func bigQueryTableConnectionIdSuppress(name, old, new string, _ *schema.ResourceData) bool { // API accepts connectionId in below two formats - // "{{project}}.{{location}}.{{connection_id}}" or - // "projects/{{project}}/locations/{{location}}/connections/{{connection_id}}". - // but always returns "{{project}}.{{location}}.{{connection_id}}" + // ".." or + // "projects/locations//connections/". + // but always returns ".." if tpgresource.IsEmptyValue(reflect.ValueOf(old)) || tpgresource.IsEmptyValue(reflect.ValueOf(new)) { return false @@ -863,13 +863,13 @@ func ResourceBigQueryTable() *schema.Resource { // ConnectionId: [Optional] The connection specifying the credentials // to be used to read external storage, such as Azure Blob, // Cloud Storage, or S3. The connectionId can have the form - // "{{project}}.{{location}}.{{connection_id}}" or - // "projects/{{project}}/locations/{{location}}/connections/{{connection_id}}". + // ".." or + // "projects//locations//connections/". "connection_id": { Type: schema.TypeString, Optional: true, DiffSuppressFunc: bigQueryTableConnectionIdSuppress, - Description: `The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connectionId can have the form "{{project}}.{{location}}.{{connection_id}}" or "projects/{{project}}/locations/{{location}}/connections/{{connection_id}}".`, + Description: `The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connectionId can have the form ".." or "projects//locations//connections/".`, }, "reference_file_schema_uri": { Type: schema.TypeString, @@ -1577,7 +1577,6 @@ func resourceTable(d *schema.ResourceData, meta interface{}) (*bigquery.Table, e } table.ResourceTags = tpgresource.ExpandStringMap(d, "resource_tags") - return table, nil } @@ -1894,7 +1893,6 @@ func resourceBigQueryTableRead(d *schema.ResourceData, meta interface{}) error { return fmt.Errorf("Error setting table replication info: %s", err) } } - return nil } @@ -3034,7 +3032,6 @@ func flattenTableReplicationInfo(tableReplicationInfo map[string]interface{}) [] return []map[string]interface{}{result} } - func resourceBigQueryTableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ diff --git a/google/services/bigquery/resource_bigquery_table_test.go b/google/services/bigquery/resource_bigquery_table_test.go index 2979d80d920..6a5fdc17e95 100644 --- a/google/services/bigquery/resource_bigquery_table_test.go +++ b/google/services/bigquery/resource_bigquery_table_test.go @@ -1815,7 +1815,6 @@ func TestAccBigQueryTable_ResourceTags(t *testing.T) { }, }) } - func testAccCheckBigQueryExtData(t *testing.T, expectedQuoteChar string) resource.TestCheckFunc { return func(s *terraform.State) error { for _, rs := range s.RootModule().Resources { diff --git a/website/docs/r/bigquery_table.html.markdown b/website/docs/r/bigquery_table.html.markdown index 75c460cc702..0a4893bed88 100644 --- a/website/docs/r/bigquery_table.html.markdown +++ b/website/docs/r/bigquery_table.html.markdown @@ -182,6 +182,11 @@ The following arguments are supported: parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". +* `external_catalog_table_options` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + Options defining open source compatible table. + Structure is [documented below](#nested_external_catalog_table_options). + The `external_data_configuration` block supports: * `autodetect` - (Required) Let BigQuery try to autodetect the schema @@ -500,6 +505,67 @@ The following arguments are supported: * `table_format` - (Required) The table format the metadata only snapshots are stored in. +The `external_catalog_table_options` block supports: + +* `parameters` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + A map of key value pairs defining the parameters and properties of the open + source table. Corresponds with hive meta store table parameters. Maximum size + of 4Mib. +* `storage_descriptor` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + A storage descriptor containing information about the physical storage of this + table. + Structure is [documented below](#nested_storage_descriptor). +* `connection_id` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + The connection specifying the credentials to be used to read external storage, + such as Azure Blob, Cloud Storage, or S3. The connection is needed to read the + open source table from BigQuery Engine. The connection_id can have the form + `..` or `projects//locations//connections/`. + +The `storage_descriptor` block supports: + +* `location_uri` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + The physical location of the table (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-data/pangea-data/*'). + The maximum length is 2056 bytes. + +* `input_format` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + Specifies the fully qualified class name of the InputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). + The maximum length is 128 characters. + +* `output_format` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + Specifies the fully qualified class name of the OutputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). + The maximum length is 128 characters. + +* `serde_info` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + Serializer and deserializer information. + Structure is [documented below](#nested_serde_info). + +The `serde_info` block supports: + +* `name` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + Name of the SerDe. + The maximum length is 256 characters. + +* `serialization_library` - (Required, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + Specifies a fully-qualified class name of the serialization library that is + responsible for the translation of data between table representation and the + underlying low-level input and output format structures. + The maximum length is 256 characters. + +* `parameters` - (Optional, [Beta] +(https://terraform.io/docs/providers/google/guides/provider_versions.html)) + Key-value pairs that define the initialization parameters for the + serialization library. + Maximum size 10 Kib. + ## Attributes Reference In addition to the arguments listed above, the following computed attributes are