Skip to content

Commit

Permalink
Add external catalog table options to google_bigquery_table beta (#12528
Browse files Browse the repository at this point in the history
)

[upstream:bb906e00f7df7b854ca52882d6f860cfc5d1bf6e]

Signed-off-by: Modular Magician <[email protected]>
  • Loading branch information
modular-magician committed Dec 18, 2024
1 parent abeb194 commit de59d37
Show file tree
Hide file tree
Showing 4 changed files with 75 additions and 10 deletions.
3 changes: 3 additions & 0 deletions .changelog/12528.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
bigquery: added `external_catalog_table_options` field to `google_bigquery_table` resource (beta)
```
15 changes: 6 additions & 9 deletions google/services/bigquery/resource_bigquery_table.go
Original file line number Diff line number Diff line change
Expand Up @@ -183,9 +183,9 @@ func bigQueryTableSchemaDiffSuppress(name, old, new string, _ *schema.ResourceDa

func bigQueryTableConnectionIdSuppress(name, old, new string, _ *schema.ResourceData) bool {
// API accepts connectionId in below two formats
// "{{project}}.{{location}}.{{connection_id}}" or
// "projects/{{project}}/locations/{{location}}/connections/{{connection_id}}".
// but always returns "{{project}}.{{location}}.{{connection_id}}"
// "<project>.<location>.<connection_id>" or
// "projects/<project}>locations/<location>/connections/<connection_id>".
// but always returns "<project>.<location>.<connection_id>"

if tpgresource.IsEmptyValue(reflect.ValueOf(old)) || tpgresource.IsEmptyValue(reflect.ValueOf(new)) {
return false
Expand Down Expand Up @@ -863,13 +863,13 @@ func ResourceBigQueryTable() *schema.Resource {
// ConnectionId: [Optional] The connection specifying the credentials
// to be used to read external storage, such as Azure Blob,
// Cloud Storage, or S3. The connectionId can have the form
// "{{project}}.{{location}}.{{connection_id}}" or
// "projects/{{project}}/locations/{{location}}/connections/{{connection_id}}".
// "<project>.<location>.<connection_id>" or
// "projects/<project>/locations/<location>/connections/<connection_id>".
"connection_id": {
Type: schema.TypeString,
Optional: true,
DiffSuppressFunc: bigQueryTableConnectionIdSuppress,
Description: `The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connectionId can have the form "{{project}}.{{location}}.{{connection_id}}" or "projects/{{project}}/locations/{{location}}/connections/{{connection_id}}".`,
Description: `The connection specifying the credentials to be used to read external storage, such as Azure Blob, Cloud Storage, or S3. The connectionId can have the form "<project>.<location>.<connection_id>" or "projects/<project>/locations/<location>/connections/<connection_id>".`,
},
"reference_file_schema_uri": {
Type: schema.TypeString,
Expand Down Expand Up @@ -1577,7 +1577,6 @@ func resourceTable(d *schema.ResourceData, meta interface{}) (*bigquery.Table, e
}

table.ResourceTags = tpgresource.ExpandStringMap(d, "resource_tags")

return table, nil
}

Expand Down Expand Up @@ -1894,7 +1893,6 @@ func resourceBigQueryTableRead(d *schema.ResourceData, meta interface{}) error {
return fmt.Errorf("Error setting table replication info: %s", err)
}
}

return nil
}

Expand Down Expand Up @@ -3034,7 +3032,6 @@ func flattenTableReplicationInfo(tableReplicationInfo map[string]interface{}) []

return []map[string]interface{}{result}
}

func resourceBigQueryTableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
config := meta.(*transport_tpg.Config)
if err := tpgresource.ParseImportId([]string{
Expand Down
1 change: 0 additions & 1 deletion google/services/bigquery/resource_bigquery_table_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1815,7 +1815,6 @@ func TestAccBigQueryTable_ResourceTags(t *testing.T) {
},
})
}

func testAccCheckBigQueryExtData(t *testing.T, expectedQuoteChar string) resource.TestCheckFunc {
return func(s *terraform.State) error {
for _, rs := range s.RootModule().Resources {
Expand Down
66 changes: 66 additions & 0 deletions website/docs/r/bigquery_table.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,11 @@ The following arguments are supported:
parent organization or project resource for this tag key. Tag value is
expected to be the short name, for example "Production".

* `external_catalog_table_options` - (Optional, [Beta]
(https://terraform.io/docs/providers/google/guides/provider_versions.html))
Options defining open source compatible table.
Structure is [documented below](#nested_external_catalog_table_options).

<a name="nested_external_data_configuration"></a>The `external_data_configuration` block supports:

* `autodetect` - (Required) Let BigQuery try to autodetect the schema
Expand Down Expand Up @@ -500,6 +505,67 @@ The following arguments are supported:

* `table_format` - (Required) The table format the metadata only snapshots are stored in.

<a name="nested_external_catalog_table_options"></a>The `external_catalog_table_options` block supports:

* `parameters` - (Optional, [Beta]
(https://terraform.io/docs/providers/google/guides/provider_versions.html))
A map of key value pairs defining the parameters and properties of the open
source table. Corresponds with hive meta store table parameters. Maximum size
of 4Mib.
* `storage_descriptor` - (Optional, [Beta]
(https://terraform.io/docs/providers/google/guides/provider_versions.html))
A storage descriptor containing information about the physical storage of this
table.
Structure is [documented below](#nested_storage_descriptor).
* `connection_id` - (Optional, [Beta]
(https://terraform.io/docs/providers/google/guides/provider_versions.html))
The connection specifying the credentials to be used to read external storage,
such as Azure Blob, Cloud Storage, or S3. The connection is needed to read the
open source table from BigQuery Engine. The connection_id can have the form
`<project_id>.<location_id>.<connection_id>` or `projects/<project_id>/locations/<location_id>/connections/<connection_id>`.

<a name="nested_storage_descriptor"></a>The `storage_descriptor` block supports:

* `location_uri` - (Optional, [Beta]
(https://terraform.io/docs/providers/google/guides/provider_versions.html))
The physical location of the table (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-data/pangea-data/*').
The maximum length is 2056 bytes.

* `input_format` - (Optional, [Beta]
(https://terraform.io/docs/providers/google/guides/provider_versions.html))
Specifies the fully qualified class name of the InputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcInputFormat").
The maximum length is 128 characters.

* `output_format` - (Optional, [Beta]
(https://terraform.io/docs/providers/google/guides/provider_versions.html))
Specifies the fully qualified class name of the OutputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat").
The maximum length is 128 characters.

* `serde_info` - (Optional, [Beta]
(https://terraform.io/docs/providers/google/guides/provider_versions.html))
Serializer and deserializer information.
Structure is [documented below](#nested_serde_info).

<a name="nested_serde_info"></a>The `serde_info` block supports:

* `name` - (Optional, [Beta]
(https://terraform.io/docs/providers/google/guides/provider_versions.html))
Name of the SerDe.
The maximum length is 256 characters.

* `serialization_library` - (Required, [Beta]
(https://terraform.io/docs/providers/google/guides/provider_versions.html))
Specifies a fully-qualified class name of the serialization library that is
responsible for the translation of data between table representation and the
underlying low-level input and output format structures.
The maximum length is 256 characters.

* `parameters` - (Optional, [Beta]
(https://terraform.io/docs/providers/google/guides/provider_versions.html))
Key-value pairs that define the initialization parameters for the
serialization library.
Maximum size 10 Kib.

## Attributes Reference

In addition to the arguments listed above, the following computed attributes are
Expand Down

0 comments on commit de59d37

Please sign in to comment.