From 66bd22b103e4ff516a40f9da7ce174abf8a78629 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Tue, 1 Oct 2024 11:53:30 +0200 Subject: [PATCH 01/11] Stream on table --- MIGRATION_GUIDE.md | 10 + .../snowflake_stream_on_table/import.sh | 1 + .../snowflake_stream_on_table/resource.tf | 29 + pkg/acceptance/bettertestspoc/README.md | 17 +- .../objectassert/stream_snowflake_ext.go | 47 ++ .../objectassert/stream_snowflake_gen.go | 40 -- .../task_parameters_snowflake_gen.go | 2 +- .../resourceassert/gen/resource_schema_def.go | 4 + .../stream_on_table_resource_gen.go | 147 +++++ .../authentication_policy_show_output_gen.go | 81 +++ .../stream_show_output_ext.go | 30 + .../stream_show_output_gen.go | 18 +- .../config/model/stream_on_table_model_gen.go | 171 ++++++ pkg/acceptance/check_destroy.go | 3 + pkg/acceptance/helpers/context_client.go | 10 + pkg/acceptance/helpers/stream_client.go | 8 + pkg/acceptance/helpers/table_client.go | 8 + pkg/provider/provider.go | 1 + pkg/provider/resources/resources.go | 1 + .../api_authentication_integration_common.go | 5 +- pkg/resources/common.go | 6 +- pkg/resources/masking_policy.go | 3 +- pkg/resources/row_access_policy.go | 2 +- pkg/resources/stream_common.go | 59 ++ pkg/resources/stream_on_table.go | 296 ++++++++++ .../stream_on_table_acceptance_test.go | 518 ++++++++++++++++++ .../testdata/TestAcc_StreamOnTable/at/test.tf | 19 + .../TestAcc_StreamOnTable/at/variables.tf | 31 ++ .../TestAcc_StreamOnTable/basic/test.tf | 13 + .../TestAcc_StreamOnTable/basic/variables.tf | 19 + .../TestAcc_StreamOnTable/before/test.tf | 19 + .../TestAcc_StreamOnTable/before/variables.tf | 27 + pkg/resources/view.go | 8 +- pkg/schemas/common_types.go | 72 +++ pkg/schemas/stream.go | 132 +++++ pkg/schemas/stream_gen.go | 21 +- pkg/sdk/parsers.go | 14 + pkg/sdk/streams_def.go | 53 +- pkg/sdk/streams_gen.go | 6 +- pkg/sdk/streams_impl_gen.go | 23 +- pkg/sdk/streams_validations_gen.go | 4 +- .../testint/streams_gen_integration_test.go | 28 +- templates/resources/stream.md.tmpl | 35 ++ v1-preparations/CHANGES_BEFORE_V1.md | 7 +- 44 files changed, 1944 insertions(+), 104 deletions(-) create mode 100644 examples/resources/snowflake_stream_on_table/import.sh create mode 100644 examples/resources/snowflake_stream_on_table/resource.tf create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/stream_on_table_resource_gen.go create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/authentication_policy_show_output_gen.go create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_ext.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/stream_on_table_model_gen.go create mode 100644 pkg/resources/stream_common.go create mode 100644 pkg/resources/stream_on_table.go create mode 100644 pkg/resources/stream_on_table_acceptance_test.go create mode 100644 pkg/resources/testdata/TestAcc_StreamOnTable/at/test.tf create mode 100644 pkg/resources/testdata/TestAcc_StreamOnTable/at/variables.tf create mode 100644 pkg/resources/testdata/TestAcc_StreamOnTable/basic/test.tf create mode 100644 pkg/resources/testdata/TestAcc_StreamOnTable/basic/variables.tf create mode 100644 pkg/resources/testdata/TestAcc_StreamOnTable/before/test.tf create mode 100644 pkg/resources/testdata/TestAcc_StreamOnTable/before/variables.tf create mode 100644 pkg/schemas/stream.go create mode 100644 templates/resources/stream.md.tmpl diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index ae2ecd9298..45313f6b9f 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -4,6 +4,16 @@ This document is meant to help you migrate your Terraform config to the new newe describe deprecations or breaking changes and help you to change your configuration to keep the same (or similar) behavior across different versions. +## v0.96.0 ➞ v0.97.0 + +### *(new feature)* snowflake_stream_on_table resource + +To enhance clarity and functionality, the new resource `snowflake_stream_on_table` has been introduced +to replace the previous `snowflake_stream`. Recognizing that the old resource carried multiple responsibilities within a single entity, we opted to divide it into more specialized resources. +The newly introduced resources are aligned with the latest Snowflake documentation at the time of implementation, and adhere to our [new conventions](#general-changes). +This segregation was based on the object on which the stream is created. The mapping between SQL statements and the resources is the following: +- `ON TABLE ` -> `snowflake_stream_on_table` + ## v0.95.0 ➞ v0.96.0 ### snowflake_masking_policies data source changes diff --git a/examples/resources/snowflake_stream_on_table/import.sh b/examples/resources/snowflake_stream_on_table/import.sh new file mode 100644 index 0000000000..1c1035cec3 --- /dev/null +++ b/examples/resources/snowflake_stream_on_table/import.sh @@ -0,0 +1 @@ +terraform import snowflake_stream_on_table.example '""."".""' diff --git a/examples/resources/snowflake_stream_on_table/resource.tf b/examples/resources/snowflake_stream_on_table/resource.tf new file mode 100644 index 0000000000..576472fa8a --- /dev/null +++ b/examples/resources/snowflake_stream_on_table/resource.tf @@ -0,0 +1,29 @@ +resource "snowflake_table" "table" { + database = "database" + schema = "schema" + name = "name" + + column { + type = "NUMBER(38,0)" + name = "id" + } +} + + +# resource with all fields set (excluding time travel with BEFORE) +resource "snowflake_stream_on_table" "stream" { + name = "stream" + schema = "schema" + database = "database" + + copy_grants = true + table = snowflake_table.table.fully_qualified_name + append_only = "true" + show_initial_rows = "true" + + at { + statement = "8e5d0ca9-005e-44e6-b858-a8f5b37c5726" + } + + comment = "A stream." +} diff --git a/pkg/acceptance/bettertestspoc/README.md b/pkg/acceptance/bettertestspoc/README.md index a97717c232..ddc4405fe7 100644 --- a/pkg/acceptance/bettertestspoc/README.md +++ b/pkg/acceptance/bettertestspoc/README.md @@ -177,7 +177,7 @@ it will result in: object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [7/13]: failed with error: expected scaling policy: ECONOMY; got: STANDARD object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [8/13]: failed with error: expected auto suspend: 123; got: 600 object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [9/13]: failed with error: expected auto resume: false; got: true - object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [10/13]: failed with error: expected resource monitor: some-id; got: + object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [10/13]: failed with error: expected resource monitor: some-id; got: object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [11/13]: failed with error: expected comment: bad comment; got: Who does encouraging eagerly annoying dream several their scold straightaway. object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [12/13]: failed with error: expected enable query acceleration: true; got: false object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [13/13]: failed with error: expected query acceleration max scale factor: 12; got: 8 @@ -241,17 +241,17 @@ it will result in: WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [6/12]: failed with error: expected: ECONOMY, got: STANDARD WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [7/12]: failed with error: expected: 123, got: 600 WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [8/12]: failed with error: expected: false, got: true - WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [9/12]: failed with error: expected: abc, got: + WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [9/12]: failed with error: expected: abc, got: WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [10/12]: failed with error: expected: bad comment, got: Promise my huh off certain you bravery dynasty with Roman. WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [11/12]: failed with error: expected: true, got: false WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [12/12]: failed with error: expected: 16, got: 8 check 9/11 error: WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [2/7]: failed with error: expected: 1, got: 8 - WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [3/7]: failed with error: expected: WAREHOUSE, got: + WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [3/7]: failed with error: expected: WAREHOUSE, got: WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [4/7]: failed with error: expected: 23, got: 0 - WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [5/7]: failed with error: expected: WAREHOUSE, got: + WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [5/7]: failed with error: expected: WAREHOUSE, got: WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [6/7]: failed with error: expected: 1232, got: 172800 - WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [7/7]: failed with error: expected: WAREHOUSE, got: + WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [7/7]: failed with error: expected: WAREHOUSE, got: check 10/11 error: object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [1/13]: failed with error: expected name: bad name; got: WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [2/13]: failed with error: expected state: SUSPENDED; got: STARTED @@ -262,7 +262,7 @@ it will result in: object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [7/13]: failed with error: expected scaling policy: ECONOMY; got: STANDARD object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [8/13]: failed with error: expected auto suspend: 123; got: 600 object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [9/13]: failed with error: expected auto resume: false; got: true - object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [10/13]: failed with error: expected resource monitor: some-id; got: + object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [10/13]: failed with error: expected resource monitor: some-id; got: object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [11/13]: failed with error: expected comment: bad comment; got: Promise my huh off certain you bravery dynasty with Roman. object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [12/13]: failed with error: expected enable query acceleration: true; got: false object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [13/13]: failed with error: expected query acceleration max scale factor: 12; got: 8 @@ -291,7 +291,7 @@ it will result in: ``` it will result in: ``` - commons.go:101: + commons.go:101: Error Trace: /Users/asawicki/Projects/terraform-provider-snowflake/pkg/sdk/testint/warehouses_integration_test.go:149 Error: Received unexpected error: object WAREHOUSE["VKSENEIT_535F314F_6549_348F_370E_AB430EE4BC7B"] assertion [1/13]: failed with error: expected name: bad name; got: VKSENEIT_535F314F_6549_348F_370E_AB430EE4BC7B @@ -344,8 +344,9 @@ func (w *WarehouseDatasourceShowOutputAssert) IsEmpty() { - Omit computed fields in the model (like FullyQualifiedName), because it doesn't make sense to set them - There's an error when generating models, steps to reproduce: - Go to view resource code and change `data_metric_function` field to `testing` and make it required - - During the generation, the following error appears: mixed named and unnamed parameters. + - During the generation, the following error appears: mixed named and unnamed parameters. It's a golang error indicating that the parameter has both unnamed and named parameters in function (e.g. `func(abc string, int)`). The error is a result of both things: 1. Lists of objects are partially generated, and only parameter name is generated in some functions (the type has to be added manually). 2. `testing` is a package name that makes Go think that we want to have unnamed parameter there, but we just didn't generate the type for that field in the function argument. +- generate assertions checking that time is not empty - we often do not compare time fields by value, but check if they are set diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_ext.go index cb474d651e..560dfc889f 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_ext.go @@ -38,3 +38,50 @@ func (s *StreamAssert) HasStageName(expected string) *StreamAssert { }) return s } + +func (s *StreamAssert) HasSourceType(expected sdk.StreamSourceType) *StreamAssert { + s.AddAssertion(func(t *testing.T, o *sdk.Stream) error { + t.Helper() + if o.SourceType == nil { + return fmt.Errorf("expected source type to have value; got: nil") + } + if *o.SourceType != expected { + return fmt.Errorf("expected source type: %v; got: %v", expected, *o.SourceType) + } + return nil + }) + return s +} + +func (s *StreamAssert) HasBaseTables(expected []sdk.SchemaObjectIdentifier) *StreamAssert { + s.AddAssertion(func(t *testing.T, o *sdk.Stream) error { + t.Helper() + if o.BaseTables == nil { + return fmt.Errorf("expected base tables to have value; got: nil") + } + if len(o.BaseTables) != len(expected) { + return fmt.Errorf("expected base tables length: %v; got: %v", len(expected), len(o.BaseTables)) + } + for i := range o.BaseTables { + if o.BaseTables[i].FullyQualifiedName() != expected[i].FullyQualifiedName() { + return fmt.Errorf("expected base table id: %v; got: %v", expected[i], o.BaseTables[i]) + } + } + return nil + }) + return s +} + +func (s *StreamAssert) HasMode(expected sdk.StreamMode) *StreamAssert { + s.AddAssertion(func(t *testing.T, o *sdk.Stream) error { + t.Helper() + if o.Mode == nil { + return fmt.Errorf("expected mode to have value; got: nil") + } + if *o.Mode != expected { + return fmt.Errorf("expected mode: %v; got: %v", expected, *o.Mode) + } + return nil + }) + return s +} diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_gen.go b/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_gen.go index 95b71b3f0f..5874d1fed9 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_gen.go @@ -117,33 +117,6 @@ func (s *StreamAssert) HasTableName(expected string) *StreamAssert { return s } -func (s *StreamAssert) HasSourceType(expected string) *StreamAssert { - s.AddAssertion(func(t *testing.T, o *sdk.Stream) error { - t.Helper() - if o.SourceType == nil { - return fmt.Errorf("expected source type to have value; got: nil") - } - if *o.SourceType != expected { - return fmt.Errorf("expected source type: %v; got: %v", expected, *o.SourceType) - } - return nil - }) - return s -} - -func (s *StreamAssert) HasBaseTables(expected string) *StreamAssert { - s.AddAssertion(func(t *testing.T, o *sdk.Stream) error { - t.Helper() - if o.BaseTables == nil { - return fmt.Errorf("expected base tables to have value; got: nil") - } - if *o.BaseTables != expected { - return fmt.Errorf("expected base tables: %v; got: %v", expected, *o.BaseTables) - } - return nil - }) - return s -} func (s *StreamAssert) HasType(expected string) *StreamAssert { s.AddAssertion(func(t *testing.T, o *sdk.Stream) error { @@ -173,19 +146,6 @@ func (s *StreamAssert) HasStale(expected string) *StreamAssert { return s } -func (s *StreamAssert) HasMode(expected string) *StreamAssert { - s.AddAssertion(func(t *testing.T, o *sdk.Stream) error { - t.Helper() - if o.Mode == nil { - return fmt.Errorf("expected mode to have value; got: nil") - } - if *o.Mode != expected { - return fmt.Errorf("expected mode: %v; got: %v", expected, *o.Mode) - } - return nil - }) - return s -} func (s *StreamAssert) HasStaleAfter(expected time.Time) *StreamAssert { s.AddAssertion(func(t *testing.T, o *sdk.Stream) error { diff --git a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/task_parameters_snowflake_gen.go b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/task_parameters_snowflake_gen.go index 24ac1f78bd..b5c571149d 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/task_parameters_snowflake_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/task_parameters_snowflake_gen.go @@ -1027,7 +1027,7 @@ func (t *TaskParametersAssert) HasDefaultTaskAutoRetryAttemptsValueExplicit() *T } func (t *TaskParametersAssert) HasDefaultUserTaskManagedInitialWarehouseSizeValueExplicit() *TaskParametersAssert { - return t.HasUserTaskManagedInitialWarehouseSize("Medium") + return t.HasUserTaskManagedInitialWarehouseSize(sdk.WarehouseSize("Medium")) } func (t *TaskParametersAssert) HasDefaultUserTaskMinimumTriggerIntervalInSecondsValueExplicit() *TaskParametersAssert { diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go index d3670bbba6..3b71f952ec 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go @@ -49,4 +49,8 @@ var allResourceSchemaDefs = []ResourceSchemaDef{ name: "MaskingPolicy", schema: resources.MaskingPolicy().Schema, }, + { + name: "StreamOnTable", + schema: resources.StreamOnTable().Schema, + }, } diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/stream_on_table_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/stream_on_table_resource_gen.go new file mode 100644 index 0000000000..ad83af307c --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/stream_on_table_resource_gen.go @@ -0,0 +1,147 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceassert + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +type StreamOnTableResourceAssert struct { + *assert.ResourceAssert +} + +func StreamOnTableResource(t *testing.T, name string) *StreamOnTableResourceAssert { + t.Helper() + + return &StreamOnTableResourceAssert{ + ResourceAssert: assert.NewResourceAssert(name, "resource"), + } +} + +func ImportedStreamOnTableResource(t *testing.T, id string) *StreamOnTableResourceAssert { + t.Helper() + + return &StreamOnTableResourceAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "imported resource"), + } +} + +/////////////////////////////////// +// Attribute value string checks // +/////////////////////////////////// + +func (s *StreamOnTableResourceAssert) HasAppendOnlyString(expected string) *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueSet("append_only", expected)) + return s +} + +func (s *StreamOnTableResourceAssert) HasAtString(expected string) *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueSet("at", expected)) + return s +} + +func (s *StreamOnTableResourceAssert) HasBeforeString(expected string) *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueSet("before", expected)) + return s +} + +func (s *StreamOnTableResourceAssert) HasCommentString(expected string) *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueSet("comment", expected)) + return s +} + +func (s *StreamOnTableResourceAssert) HasCopyGrantsString(expected string) *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueSet("copy_grants", expected)) + return s +} + +func (s *StreamOnTableResourceAssert) HasDatabaseString(expected string) *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueSet("database", expected)) + return s +} + +func (s *StreamOnTableResourceAssert) HasFullyQualifiedNameString(expected string) *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueSet("fully_qualified_name", expected)) + return s +} + +func (s *StreamOnTableResourceAssert) HasNameString(expected string) *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueSet("name", expected)) + return s +} + +func (s *StreamOnTableResourceAssert) HasSchemaString(expected string) *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueSet("schema", expected)) + return s +} + +func (s *StreamOnTableResourceAssert) HasShowInitialRowsString(expected string) *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueSet("show_initial_rows", expected)) + return s +} + +func (s *StreamOnTableResourceAssert) HasTableString(expected string) *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueSet("table", expected)) + return s +} + +//////////////////////////// +// Attribute empty checks // +//////////////////////////// + +func (s *StreamOnTableResourceAssert) HasNoAppendOnly() *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueNotSet("append_only")) + return s +} + +func (s *StreamOnTableResourceAssert) HasNoAt() *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueNotSet("at")) + return s +} + +func (s *StreamOnTableResourceAssert) HasNoBefore() *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueNotSet("before")) + return s +} + +func (s *StreamOnTableResourceAssert) HasNoComment() *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueNotSet("comment")) + return s +} + +func (s *StreamOnTableResourceAssert) HasNoCopyGrants() *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueNotSet("copy_grants")) + return s +} + +func (s *StreamOnTableResourceAssert) HasNoDatabase() *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueNotSet("database")) + return s +} + +func (s *StreamOnTableResourceAssert) HasNoFullyQualifiedName() *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueNotSet("fully_qualified_name")) + return s +} + +func (s *StreamOnTableResourceAssert) HasNoName() *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueNotSet("name")) + return s +} + +func (s *StreamOnTableResourceAssert) HasNoSchema() *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueNotSet("schema")) + return s +} + +func (s *StreamOnTableResourceAssert) HasNoShowInitialRows() *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueNotSet("show_initial_rows")) + return s +} + +func (s *StreamOnTableResourceAssert) HasNoTable() *StreamOnTableResourceAssert { + s.AddAssertion(assert.ValueNotSet("table")) + return s +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/authentication_policy_show_output_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/authentication_policy_show_output_gen.go new file mode 100644 index 0000000000..ea399f6fc4 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/authentication_policy_show_output_gen.go @@ -0,0 +1,81 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceshowoutputassert + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +// to ensure sdk package is used +var _ = sdk.Object{} + +type AuthenticationPolicyShowOutputAssert struct { + *assert.ResourceAssert +} + +func AuthenticationPolicyShowOutput(t *testing.T, name string) *AuthenticationPolicyShowOutputAssert { + t.Helper() + + a := AuthenticationPolicyShowOutputAssert{ + ResourceAssert: assert.NewResourceAssert(name, "show_output"), + } + a.AddAssertion(assert.ValueSet("show_output.#", "1")) + return &a +} + +func ImportedAuthenticationPolicyShowOutput(t *testing.T, id string) *AuthenticationPolicyShowOutputAssert { + t.Helper() + + a := AuthenticationPolicyShowOutputAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "show_output"), + } + a.AddAssertion(assert.ValueSet("show_output.#", "1")) + return &a +} + +//////////////////////////// +// Attribute value checks // +//////////////////////////// + +func (a *AuthenticationPolicyShowOutputAssert) HasCreatedOn(expected string) *AuthenticationPolicyShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("created_on", expected)) + return a +} + +func (a *AuthenticationPolicyShowOutputAssert) HasName(expected string) *AuthenticationPolicyShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("name", expected)) + return a +} + +func (a *AuthenticationPolicyShowOutputAssert) HasComment(expected string) *AuthenticationPolicyShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("comment", expected)) + return a +} + +func (a *AuthenticationPolicyShowOutputAssert) HasDatabaseName(expected string) *AuthenticationPolicyShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("database_name", expected)) + return a +} + +func (a *AuthenticationPolicyShowOutputAssert) HasSchemaName(expected string) *AuthenticationPolicyShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("schema_name", expected)) + return a +} + +func (a *AuthenticationPolicyShowOutputAssert) HasOwner(expected string) *AuthenticationPolicyShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("owner", expected)) + return a +} + +func (a *AuthenticationPolicyShowOutputAssert) HasOwnerRoleType(expected string) *AuthenticationPolicyShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("owner_role_type", expected)) + return a +} + +func (a *AuthenticationPolicyShowOutputAssert) HasOptions(expected string) *AuthenticationPolicyShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("options", expected)) + return a +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_ext.go new file mode 100644 index 0000000000..f13f830192 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_ext.go @@ -0,0 +1,30 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceshowoutputassert + +import ( + "fmt" + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + + +func (s *StreamShowOutputAssert) HasCreatedOnNotEmpty() *StreamShowOutputAssert { + s.AddAssertion(assert.ResourceShowOutputValuePresent("created_on")) + return s +} + +func (s *StreamShowOutputAssert) HasStaleAfterNotEmpty() *StreamShowOutputAssert { + s.AddAssertion(assert.ResourceShowOutputValuePresent("created_on")) + return s +} + +func (s *StreamShowOutputAssert) HasBaseTables(ids []sdk.SchemaObjectIdentifier) *StreamShowOutputAssert { + s.AddAssertion(assert.ResourceShowOutputValueSet("base_tables.#", strconv.FormatInt(int64(len(ids)), 10))) + for i := range ids { + s.AddAssertion(assert.ResourceShowOutputValueSet(fmt.Sprintf("base_tables.%d", i),ids[i].FullyQualifiedName())) + } + return s +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_gen.go index 32394c1dce..79c93d2beb 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_gen.go @@ -61,11 +61,6 @@ func (s *StreamShowOutputAssert) HasSchemaName(expected string) *StreamShowOutpu return s } -func (s *StreamShowOutputAssert) HasTableOn(expected string) *StreamShowOutputAssert { - s.AddAssertion(assert.ResourceShowOutputValueSet("table_on", expected)) - return s -} - func (s *StreamShowOutputAssert) HasOwner(expected string) *StreamShowOutputAssert { s.AddAssertion(assert.ResourceShowOutputValueSet("owner", expected)) return s @@ -81,13 +76,8 @@ func (s *StreamShowOutputAssert) HasTableName(expected string) *StreamShowOutput return s } -func (s *StreamShowOutputAssert) HasSourceType(expected string) *StreamShowOutputAssert { - s.AddAssertion(assert.ResourceShowOutputValueSet("source_type", expected)) - return s -} - -func (s *StreamShowOutputAssert) HasBaseTables(expected string) *StreamShowOutputAssert { - s.AddAssertion(assert.ResourceShowOutputValueSet("base_tables", expected)) +func (s *StreamShowOutputAssert) HasSourceType(expected sdk.StreamSourceType) *StreamShowOutputAssert { + s.AddAssertion(assert.ResourceShowOutputStringUnderlyingValueSet("source_type", expected)) return s } @@ -101,8 +91,8 @@ func (s *StreamShowOutputAssert) HasStale(expected string) *StreamShowOutputAsse return s } -func (s *StreamShowOutputAssert) HasMode(expected string) *StreamShowOutputAssert { - s.AddAssertion(assert.ResourceShowOutputValueSet("mode", expected)) +func (s *StreamShowOutputAssert) HasMode(expected sdk.StreamMode) *StreamShowOutputAssert { + s.AddAssertion(assert.ResourceShowOutputStringUnderlyingValueSet("mode", expected)) return s } diff --git a/pkg/acceptance/bettertestspoc/config/model/stream_on_table_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/stream_on_table_model_gen.go new file mode 100644 index 0000000000..05cbdb51e9 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/stream_on_table_model_gen.go @@ -0,0 +1,171 @@ +// Code generated by config model builder generator; DO NOT EDIT. + +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" +) + +type StreamOnTableModel struct { + AppendOnly tfconfig.Variable `json:"append_only,omitempty"` + At tfconfig.Variable `json:"at,omitempty"` + Before tfconfig.Variable `json:"before,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + CopyGrants tfconfig.Variable `json:"copy_grants,omitempty"` + Database tfconfig.Variable `json:"database,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + Schema tfconfig.Variable `json:"schema,omitempty"` + ShowInitialRows tfconfig.Variable `json:"show_initial_rows,omitempty"` + Table tfconfig.Variable `json:"table,omitempty"` + + *config.ResourceModelMeta +} + +///////////////////////////////////////////////// +// Basic builders (resource name and required) // +///////////////////////////////////////////////// + +func StreamOnTable( + resourceName string, + database string, + name string, + schema string, + table string, +) *StreamOnTableModel { + s := &StreamOnTableModel{ResourceModelMeta: config.Meta(resourceName, resources.StreamOnTable)} + s.WithDatabase(database) + s.WithName(name) + s.WithSchema(schema) + s.WithTable(table) + return s +} + +func StreamOnTableWithDefaultMeta( + database string, + name string, + schema string, + table string, +) *StreamOnTableModel { + s := &StreamOnTableModel{ResourceModelMeta: config.DefaultMeta(resources.StreamOnTable)} + s.WithDatabase(database) + s.WithName(name) + s.WithSchema(schema) + s.WithTable(table) + return s +} + +///////////////////////////////// +// below all the proper values // +///////////////////////////////// + +func (s *StreamOnTableModel) WithAppendOnly(appendOnly string) *StreamOnTableModel { + s.AppendOnly = tfconfig.StringVariable(appendOnly) + return s +} + +// at attribute type is not yet supported, so WithAt can't be generated + +// before attribute type is not yet supported, so WithBefore can't be generated + +func (s *StreamOnTableModel) WithComment(comment string) *StreamOnTableModel { + s.Comment = tfconfig.StringVariable(comment) + return s +} + +func (s *StreamOnTableModel) WithCopyGrants(copyGrants bool) *StreamOnTableModel { + s.CopyGrants = tfconfig.BoolVariable(copyGrants) + return s +} + +func (s *StreamOnTableModel) WithDatabase(database string) *StreamOnTableModel { + s.Database = tfconfig.StringVariable(database) + return s +} + +func (s *StreamOnTableModel) WithFullyQualifiedName(fullyQualifiedName string) *StreamOnTableModel { + s.FullyQualifiedName = tfconfig.StringVariable(fullyQualifiedName) + return s +} + +func (s *StreamOnTableModel) WithName(name string) *StreamOnTableModel { + s.Name = tfconfig.StringVariable(name) + return s +} + +func (s *StreamOnTableModel) WithSchema(schema string) *StreamOnTableModel { + s.Schema = tfconfig.StringVariable(schema) + return s +} + +func (s *StreamOnTableModel) WithShowInitialRows(showInitialRows string) *StreamOnTableModel { + s.ShowInitialRows = tfconfig.StringVariable(showInitialRows) + return s +} + +func (s *StreamOnTableModel) WithTable(table string) *StreamOnTableModel { + s.Table = tfconfig.StringVariable(table) + return s +} + +////////////////////////////////////////// +// below it's possible to set any value // +////////////////////////////////////////// + +func (s *StreamOnTableModel) WithAppendOnlyValue(value tfconfig.Variable) *StreamOnTableModel { + s.AppendOnly = value + return s +} + +func (s *StreamOnTableModel) WithAtValue(value tfconfig.Variable) *StreamOnTableModel { + s.At = value + return s +} + +func (s *StreamOnTableModel) WithBeforeValue(value tfconfig.Variable) *StreamOnTableModel { + s.Before = value + return s +} + +func (s *StreamOnTableModel) WithCommentValue(value tfconfig.Variable) *StreamOnTableModel { + s.Comment = value + return s +} + +func (s *StreamOnTableModel) WithCopyGrantsValue(value tfconfig.Variable) *StreamOnTableModel { + s.CopyGrants = value + return s +} + +func (s *StreamOnTableModel) WithDatabaseValue(value tfconfig.Variable) *StreamOnTableModel { + s.Database = value + return s +} + +func (s *StreamOnTableModel) WithFullyQualifiedNameValue(value tfconfig.Variable) *StreamOnTableModel { + s.FullyQualifiedName = value + return s +} + +func (s *StreamOnTableModel) WithNameValue(value tfconfig.Variable) *StreamOnTableModel { + s.Name = value + return s +} + +func (s *StreamOnTableModel) WithSchemaValue(value tfconfig.Variable) *StreamOnTableModel { + s.Schema = value + return s +} + +func (s *StreamOnTableModel) WithShowInitialRowsValue(value tfconfig.Variable) *StreamOnTableModel { + s.ShowInitialRows = value + return s +} + +func (s *StreamOnTableModel) WithTableValue(value tfconfig.Variable) *StreamOnTableModel { + s.Table = value + return s +} diff --git a/pkg/acceptance/check_destroy.go b/pkg/acceptance/check_destroy.go index 0001f75795..a05da630c2 100644 --- a/pkg/acceptance/check_destroy.go +++ b/pkg/acceptance/check_destroy.go @@ -194,6 +194,9 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.Stream: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Streams.ShowByID) }, + resources.StreamOnTable: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Streams.ShowByID) + }, resources.Streamlit: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Streamlits.ShowByID) }, diff --git a/pkg/acceptance/helpers/context_client.go b/pkg/acceptance/helpers/context_client.go index c3c2f2358b..e2186026e9 100644 --- a/pkg/acceptance/helpers/context_client.go +++ b/pkg/acceptance/helpers/context_client.go @@ -84,3 +84,13 @@ func (c *ContextClient) IssuerURL(t *testing.T) string { t.Helper() return fmt.Sprintf("https://%s.snowflakecomputing.com", c.CurrentAccount(t)) } + +func (c *ContextClient) LastQueryId(t *testing.T) string { + t.Helper() + ctx := context.Background() + + id, err := c.client().LastQueryId(ctx) + require.NoError(t, err) + + return id +} diff --git a/pkg/acceptance/helpers/stream_client.go b/pkg/acceptance/helpers/stream_client.go index 7a32a464b5..9403063c9c 100644 --- a/pkg/acceptance/helpers/stream_client.go +++ b/pkg/acceptance/helpers/stream_client.go @@ -67,3 +67,11 @@ func (c *StreamClient) Show(t *testing.T, id sdk.SchemaObjectIdentifier) (*sdk.S return c.client().ShowByID(ctx, id) } + +func (c *StreamClient) Alter(t *testing.T, req *sdk.AlterStreamRequest) { + t.Helper() + ctx := context.Background() + + err := c.client().Alter(ctx, req) + require.NoError(t, err) +} diff --git a/pkg/acceptance/helpers/table_client.go b/pkg/acceptance/helpers/table_client.go index 273fbe2e57..3b716d4fd9 100644 --- a/pkg/acceptance/helpers/table_client.go +++ b/pkg/acceptance/helpers/table_client.go @@ -129,6 +129,14 @@ func (c *TableClient) GetTableColumnsFor(t *testing.T, tableId sdk.SchemaObjectI return columns } +func (c *TableClient) InsertInt(t *testing.T, tableId sdk.SchemaObjectIdentifier) { + t.Helper() + ctx := context.Background() + + _, err := c.context.client.ExecForTests(ctx, fmt.Sprintf("INSERT INTO %s VALUES(1);", tableId.FullyQualifiedName())) + require.NoError(t, err) +} + type InformationSchemaColumns struct { TableCatalog string `db:"TABLE_CATALOG"` TableSchema string `db:"TABLE_SCHEMA"` diff --git a/pkg/provider/provider.go b/pkg/provider/provider.go index edb760e412..b42f1abbf3 100644 --- a/pkg/provider/provider.go +++ b/pkg/provider/provider.go @@ -478,6 +478,7 @@ func getResources() map[string]*schema.Resource { "snowflake_stage": resources.Stage(), "snowflake_storage_integration": resources.StorageIntegration(), "snowflake_stream": resources.Stream(), + "snowflake_stream_on_table": resources.StreamOnTable(), "snowflake_streamlit": resources.Streamlit(), "snowflake_table": resources.Table(), "snowflake_table_column_masking_policy_application": resources.TableColumnMaskingPolicyApplication(), diff --git a/pkg/provider/resources/resources.go b/pkg/provider/resources/resources.go index 219617d4e6..9ebc3e8070 100644 --- a/pkg/provider/resources/resources.go +++ b/pkg/provider/resources/resources.go @@ -46,6 +46,7 @@ const ( Stage resource = "snowflake_stage" StorageIntegration resource = "snowflake_storage_integration" Stream resource = "snowflake_stream" + StreamOnTable resource = "snowflake_stream_on_table" Streamlit resource = "snowflake_streamlit" Table resource = "snowflake_table" Tag resource = "snowflake_tag" diff --git a/pkg/resources/api_authentication_integration_common.go b/pkg/resources/api_authentication_integration_common.go index 8791a5d0df..89493d3afe 100644 --- a/pkg/resources/api_authentication_integration_common.go +++ b/pkg/resources/api_authentication_integration_common.go @@ -31,8 +31,9 @@ var apiAuthCommonSchema = map[string]*schema.Schema{ Description: "Specifies the client ID for the OAuth application in the external service.", }, "oauth_client_secret": { - Type: schema.TypeString, - Required: true, + Type: schema.TypeString, + Required: true, + // TODO: sensitive? Description: "Specifies the client secret for the OAuth application in the ServiceNow instance from the previous step. The connector uses this to request an access token from the ServiceNow instance.", }, "oauth_token_endpoint": { diff --git a/pkg/resources/common.go b/pkg/resources/common.go index b74b467930..8a5df06f11 100644 --- a/pkg/resources/common.go +++ b/pkg/resources/common.go @@ -11,7 +11,11 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) -const FullyQualifiedNameAttributeName = "fully_qualified_name" +const ( + FullyQualifiedNameAttributeName = "fully_qualified_name" + AtAttributeName = "at" + BeforeAttributeName = "before" +) // DiffSuppressStatement will suppress diffs between statements if they differ in only case or in // runs of whitespace (\s+ = \s). This is needed because the snowflake api does not faithfully diff --git a/pkg/resources/masking_policy.go b/pkg/resources/masking_policy.go index 27600154be..36739fde0f 100644 --- a/pkg/resources/masking_policy.go +++ b/pkg/resources/masking_policy.go @@ -95,7 +95,7 @@ var maskingPolicySchema = map[string]*schema.Schema{ ShowOutputAttributeName: { Type: schema.TypeList, Computed: true, - Description: "Outputs the result of `SHOW MASKING POLICY` for the given masking policy.", + Description: "Outputs the result of `SHOW MASKING POLICIES` for the given masking policy.", Elem: &schema.Resource{ Schema: schemas.ShowMaskingPolicySchema, }, @@ -221,7 +221,6 @@ func CreateMaskingPolicy(ctx context.Context, d *schema.ResourceData, meta any) if err != nil { return diag.FromErr(err) } - opts.ExemptOtherPolicies = sdk.Pointer(parsed) } diff --git a/pkg/resources/row_access_policy.go b/pkg/resources/row_access_policy.go index 4ccc0558e2..08949fbbcb 100644 --- a/pkg/resources/row_access_policy.go +++ b/pkg/resources/row_access_policy.go @@ -77,7 +77,7 @@ var rowAccessPolicySchema = map[string]*schema.Schema{ ShowOutputAttributeName: { Type: schema.TypeList, Computed: true, - Description: "Outputs the result of `SHOW ROW ACCESS POLICY` for the given row access policy.", + Description: "Outputs the result of `SHOW ROW ACCESS POLICIES` for the given row access policy.", Elem: &schema.Resource{ Schema: schemas.ShowRowAccessPolicySchema, }, diff --git a/pkg/resources/stream_common.go b/pkg/resources/stream_common.go new file mode 100644 index 0000000000..63b9e85262 --- /dev/null +++ b/pkg/resources/stream_common.go @@ -0,0 +1,59 @@ +package resources + +import ( + "context" + "fmt" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func handleStreamTimeTravel(d *schema.ResourceData, req *sdk.CreateOnTableStreamRequest) { + if v := d.Get(AtAttributeName).([]any); len(v) > 0 { + req.WithOn(*sdk.NewOnStreamRequest().WithAt(true).WithStatement(handleStreamTimeTravelStatement(v[0].(map[string]any)))) + } + if v := d.Get(BeforeAttributeName).([]any); len(v) > 0 { + req.WithOn(*sdk.NewOnStreamRequest().WithBefore(true).WithStatement(handleStreamTimeTravelStatement(v[0].(map[string]any)))) + } +} + +func handleStreamTimeTravelStatement(timeTravelConfig map[string]any) sdk.OnStreamStatementRequest { + statement := sdk.OnStreamStatementRequest{} + if v := timeTravelConfig["timestamp"].(string); len(v) > 0 { + statement.WithTimestamp(v) + } + if v := timeTravelConfig["offset"].(string); len(v) > 0 { + statement.WithOffset(v) + } + if v := timeTravelConfig["statement"].(string); len(v) > 0 { + statement.WithStatement(v) + } + if v := timeTravelConfig["stream"].(string); len(v) > 0 { + statement.WithStream(v) + } + return statement +} + +func DeleteStreamContext(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + err = client.Streams.Drop(ctx, sdk.NewDropStreamRequest(id).WithIfExists(true)) + if err != nil { + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Error, + Summary: "Error deleting stream", + Detail: fmt.Sprintf("id %v err = %v", id.Name(), err), + }, + } + } + + d.SetId("") + return nil +} diff --git a/pkg/resources/stream_on_table.go b/pkg/resources/stream_on_table.go new file mode 100644 index 0000000000..1c58d0b4a4 --- /dev/null +++ b/pkg/resources/stream_on_table.go @@ -0,0 +1,296 @@ +package resources + +import ( + "context" + "errors" + "fmt" + "log" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var streamOnTableSchema = map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: blocklistedCharactersFieldDescription("Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created."), + }, + "schema": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: blocklistedCharactersFieldDescription("The schema in which to create the stream."), + }, + "database": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: blocklistedCharactersFieldDescription("The database in which to create the stream."), + }, + "copy_grants": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: "Retains the access permissions from the original stream when a new stream is created using the OR REPLACE clause.", + DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return oldValue != "" && oldValue != newValue + }, + }, + "table": { + Type: schema.TypeString, + Required: true, + Description: "Specifies an identifier for the table the stream will monitor.", + DiffSuppressFunc: suppressIdentifierQuoting, + ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), + }, + "append_only": { + Type: schema.TypeString, + Optional: true, + Default: BooleanDefault, + ValidateDiagFunc: validateBooleanString, + Description: booleanStringFieldDescription("Type of the stream that will be created."), + }, + "show_initial_rows": { + Type: schema.TypeString, + Optional: true, + Default: BooleanDefault, + ValidateDiagFunc: validateBooleanString, + DiffSuppressFunc: IgnoreAfterCreation, + Description: booleanStringFieldDescription("Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed. This field is used only during stream creation."), + }, + "comment": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a comment for the stream.", + }, + AtAttributeName: schemas.AtSchema, + BeforeAttributeName: schemas.BeforeSchema, + ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Outputs the result of `SHOW STREAMS` for the given stream.", + Elem: &schema.Resource{ + Schema: schemas.ShowStreamSchema, + }, + }, + DescribeOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Outputs the result of `DESCRIBE STREAM` for the given stream.", + Elem: &schema.Resource{ + Schema: schemas.DescribeStreamSchema, + }, + }, + FullyQualifiedNameAttributeName: schemas.FullyQualifiedNameSchema, +} + +func StreamOnTable() *schema.Resource { + return &schema.Resource{ + CreateContext: CreateStreamOnTable(false), + ReadContext: ReadStreamOnTable(true), + UpdateContext: UpdateStreamOnTable, + DeleteContext: DeleteStreamContext, + Description: "Resource used to manage streams on tables. For more information, check [stream documentation](https://docs.snowflake.com/en/sql-reference/sql/create-stream).", + + CustomizeDiff: customdiff.All( + ComputedIfAnyAttributeChanged(streamOnTableSchema, ShowOutputAttributeName, "table", "append_only", "comment"), + ComputedIfAnyAttributeChanged(streamOnTableSchema, DescribeOutputAttributeName, "table", "append_only", "comment"), + ComputedIfAnyAttributeChanged(streamOnTableSchema, FullyQualifiedNameAttributeName, "name"), + ), + + Schema: streamOnTableSchema, + + Importer: &schema.ResourceImporter{ + StateContext: ImportStreamOnTable, + }, + } +} + +func ImportStreamOnTable(ctx context.Context, d *schema.ResourceData, meta any) ([]*schema.ResourceData, error) { + log.Printf("[DEBUG] Starting stream import") + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifier(d.Id()) + if err != nil { + return nil, err + } + + v, err := client.Streams.ShowByID(ctx, id) + if err != nil { + return nil, err + } + if err := d.Set("name", id.Name()); err != nil { + return nil, err + } + if err := d.Set("database", id.DatabaseName()); err != nil { + return nil, err + } + if err := d.Set("schema", id.SchemaName()); err != nil { + return nil, err + } + if err := d.Set("append_only", booleanStringFromBool(*v.Mode == sdk.StreamModeAppendOnly)); err != nil { + return nil, err + } + return []*schema.ResourceData{d}, nil +} + +func CreateStreamOnTable(orReplace bool) schema.CreateContextFunc { + return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + databaseName := d.Get("database").(string) + schemaName := d.Get("schema").(string) + name := d.Get("name").(string) + id := sdk.NewSchemaObjectIdentifier(databaseName, schemaName, name) + + tableIdRaw := d.Get("table").(string) + tableId, err := sdk.ParseSchemaObjectIdentifier(tableIdRaw) + if err != nil { + return diag.FromErr(err) + } + + req := sdk.NewCreateOnTableStreamRequest(id, tableId) + if orReplace { + req.WithOrReplace(true) + } + + if v := d.Get("copy_grants"); v.(bool) { + req.WithCopyGrants(true).WithOrReplace(true) + } + + if v := d.Get("append_only").(string); v != BooleanDefault { + parsed, err := booleanStringToBool(v) + if err != nil { + return diag.FromErr(err) + } + req.WithAppendOnly(parsed) + } + + if v := d.Get("show_initial_rows").(string); v != BooleanDefault { + parsed, err := booleanStringToBool(v) + if err != nil { + return diag.FromErr(err) + } + req.WithShowInitialRows(parsed) + } + + handleStreamTimeTravel(d, req) + + if v, ok := d.GetOk("comment"); ok { + req.WithComment(v.(string)) + } + + err = client.Streams.CreateOnTable(ctx, req) + if err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeResourceIdentifier(id)) + + return ReadStreamOnTable(false)(ctx, d, meta) + } +} + +func ReadStreamOnTable(withExternalChangesMarking bool) schema.ReadContextFunc { + return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } + stream, err := client.Streams.ShowByID(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to stream. Marking the resource as removed.", + Detail: fmt.Sprintf("stream name: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return diag.FromErr(err) + } + if err := d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()); err != nil { + return diag.FromErr(err) + } + tableId, err := sdk.ParseSchemaObjectIdentifier(*stream.TableName) + if err != nil { + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Error, + Summary: "Failed to parse table ID in Read.", + Detail: fmt.Sprintf("stream name: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + if err := d.Set("table", tableId.FullyQualifiedName()); err != nil { + return diag.FromErr(err) + } + if err := d.Set("comment", *stream.Comment); err != nil { + return diag.FromErr(err) + } + streamDescription, err := client.Streams.Describe(ctx, id) + if err != nil { + return diag.FromErr(err) + } + if withExternalChangesMarking { + if err = handleExternalChangesToObjectInShow(d, + showMapping{"mode", "append_only", string(*stream.Mode), booleanStringFromBool(*stream.Mode == sdk.StreamModeAppendOnly), nil}, + ); err != nil { + return diag.FromErr(err) + } + } + + if err = setStateToValuesFromConfig(d, streamOnTableSchema, []string{ + "append_only", + }); err != nil { + return diag.FromErr(err) + } + + if err = d.Set(ShowOutputAttributeName, []map[string]any{schemas.StreamToSchema(stream)}); err != nil { + return diag.FromErr(err) + } + if err = d.Set(DescribeOutputAttributeName, []map[string]any{schemas.StreamDescriptionToSchema(*streamDescription)}); err != nil { + return diag.FromErr(err) + } + return nil + } +} + +func UpdateStreamOnTable(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id, err := sdk.ParseSchemaObjectIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + // change on these fields can not be ForceNew because then the object is dropped explicitly and copying grants does not have effect + if keys := changedKeys(d, "table", "append_only", "at", "before"); len(keys) > 0 { + log.Printf("[DEBUG] Detected change on %q, recreating...", keys) + return CreateStreamOnTable(true)(ctx, d, meta) + } + + if d.HasChange("comment") { + comment := d.Get("comment").(string) + if comment == "" { + err := client.Streams.Alter(ctx, sdk.NewAlterStreamRequest(id).WithUnsetComment(true)) + if err != nil { + return diag.FromErr(err) + } + } else { + err := client.Streams.Alter(ctx, sdk.NewAlterStreamRequest(id).WithSetComment(comment)) + if err != nil { + return diag.FromErr(err) + } + } + } + + return ReadStreamOnTable(false)(ctx, d, meta) +} diff --git a/pkg/resources/stream_on_table_acceptance_test.go b/pkg/resources/stream_on_table_acceptance_test.go new file mode 100644 index 0000000000..f16f81ee21 --- /dev/null +++ b/pkg/resources/stream_on_table_acceptance_test.go @@ -0,0 +1,518 @@ +package resources_test + +import ( + "fmt" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + tfconfig "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeroles" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + pluginconfig "github.com/hashicorp/terraform-plugin-testing/config" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/tfversion" +) + +func TestAcc_StreamOnTable_Basic(t *testing.T) { + acc.TestAccPreCheck(t) + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + resourceId := helpers.EncodeResourceIdentifier(id) + resourceName := "snowflake_stream_on_table.test" + + table, cleanupTable := acc.TestClient().Table.CreateWithChangeTracking(t) + t.Cleanup(cleanupTable) + model := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), table.ID().FullyQualifiedName()) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.StreamOnTable), + Steps: []resource.TestStep{ + // without optionals + { + Config: config.FromModel(t, model), + Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasAppendOnlyString(r.BooleanDefault). + HasTableString(table.ID().FullyQualifiedName()), + resourceshowoutputassert.StreamShowOutput(t, resourceName). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasTableName(table.ID().FullyQualifiedName()). + HasSourceType(sdk.StreamSourceTypeTable). + HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasType("DELTA"). + HasStale("false"). + HasMode(sdk.StreamModeDefault). + HasStaleAfterNotEmpty(). + HasInvalidReason("N/A"). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttrSet(resourceName, "describe_output.0.created_on")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.name", id.Name())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.database_name", id.DatabaseName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.schema_name", id.SchemaName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner", snowflakeroles.Accountadmin.Name())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.comment", "")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.table_name", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", "Table")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.#", "1")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.0", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.type", "DELTA")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.stale", "false")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.mode", string(sdk.StreamModeDefault))), + assert.Check(resource.TestCheckResourceAttrSet(resourceName, "describe_output.0.stale_after")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner_role_type", "ROLE")), + ), + }, + // import without optionals + { + Config: config.FromModel(t, model), + ResourceName: resourceName, + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedStreamOnTableResource(t, resourceId). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasAppendOnlyString(r.BooleanFalse). + HasTableString(table.ID().FullyQualifiedName()), + ), + }, + // set all fields + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithCopyGrants(true).WithComment("foo").WithAppendOnly(r.BooleanTrue).WithShowInitialRows(r.BooleanTrue).WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "offset": pluginconfig.StringVariable("0"), + }))), + Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasAppendOnlyString(r.BooleanTrue). + HasTableString(table.ID().FullyQualifiedName()), + resourceshowoutputassert.StreamShowOutput(t, resourceName). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasTableName(table.ID().FullyQualifiedName()). + HasSourceType(sdk.StreamSourceTypeTable). + HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasType("DELTA"). + HasStale("false"). + HasMode(sdk.StreamModeAppendOnly). + HasStaleAfterNotEmpty(). + HasInvalidReason("N/A"). + HasComment("foo"). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttrSet(resourceName, "describe_output.0.created_on")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.name", id.Name())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.database_name", id.DatabaseName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.schema_name", id.SchemaName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner", snowflakeroles.Accountadmin.Name())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.comment", "foo")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.table_name", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", "Table")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.#", "1")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.0", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.type", "DELTA")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.stale", "false")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.mode", string(sdk.StreamModeAppendOnly))), + assert.Check(resource.TestCheckResourceAttrSet(resourceName, "describe_output.0.stale_after")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner_role_type", "ROLE")), + ), + }, + // external change + { + PreConfig: func() { + acc.TestClient().Stream.Alter(t, sdk.NewAlterStreamRequest(id).WithSetComment("bar")) + }, + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionUpdate), + }, + }, + Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasAppendOnlyString(r.BooleanTrue). + HasTableString(table.ID().FullyQualifiedName()), + resourceshowoutputassert.StreamShowOutput(t, resourceName). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasTableName(table.ID().FullyQualifiedName()). + HasSourceType(sdk.StreamSourceTypeTable). + HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasType("DELTA"). + HasStale("false"). + HasMode(sdk.StreamModeAppendOnly). + HasStaleAfterNotEmpty(). + HasInvalidReason("N/A"). + HasComment("foo"). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttrSet(resourceName, "describe_output.0.created_on")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.name", id.Name())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.database_name", id.DatabaseName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.schema_name", id.SchemaName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner", snowflakeroles.Accountadmin.Name())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.comment", "foo")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.table_name", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", string(sdk.StreamSourceTypeTable))), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.#", "1")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.0", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.type", "DELTA")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.stale", "false")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.mode", string(sdk.StreamModeAppendOnly))), + assert.Check(resource.TestCheckResourceAttrSet(resourceName, "describe_output.0.stale_after")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner_role_type", "ROLE")), + ), + }, + // update fields that recreate the object + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithAppendOnly(r.BooleanFalse)), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionUpdate), + }, + }, + Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasAppendOnlyString(r.BooleanFalse). + HasTableString(table.ID().FullyQualifiedName()), + resourceshowoutputassert.StreamShowOutput(t, resourceName). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasTableName(table.ID().FullyQualifiedName()). + HasSourceType(sdk.StreamSourceTypeTable). + HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasType("DELTA"). + HasStale("false"). + HasMode(sdk.StreamModeDefault). + HasStaleAfterNotEmpty(). + HasInvalidReason("N/A"). + HasComment("foo"). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttrSet(resourceName, "describe_output.0.created_on")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.name", id.Name())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.database_name", id.DatabaseName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.schema_name", id.SchemaName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner", snowflakeroles.Accountadmin.Name())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.comment", "foo")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.table_name", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", string(sdk.StreamSourceTypeTable))), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.#", "1")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.0", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.type", "DELTA")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.stale", "false")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.mode", string(sdk.StreamModeDefault))), + assert.Check(resource.TestCheckResourceAttrSet(resourceName, "describe_output.0.stale_after")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner_role_type", "ROLE")), + ), + }, + // import + { + Config: config.FromModel(t, model), + ResourceName: resourceName, + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedStreamOnTableResource(t, resourceId). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasAppendOnlyString(r.BooleanFalse). + HasTableString(table.ID().FullyQualifiedName()), + ), + }, + }, + }) +} + +func TestAcc_StreamOnTable_CopyGrants(t *testing.T) { + acc.TestAccPreCheck(t) + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + resourceName := "snowflake_stream_on_table.test" + + var createdOn string + + table, cleanupTable := acc.TestClient().Table.CreateWithChangeTracking(t) + t.Cleanup(cleanupTable) + model := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), table.ID().FullyQualifiedName()) + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.StreamOnTable), + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, model.WithCopyGrants(true)), + Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). + HasNameString(id.Name()), + assert.Check(resource.TestCheckResourceAttrWith(resourceName, "show_output.0.created_on", func(value string) error { + createdOn = value + return nil + })), + ), + }, + { + Config: config.FromModel(t, model.WithCopyGrants(false)), + Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). + HasNameString(id.Name()), + assert.Check(resource.TestCheckResourceAttrWith(resourceName, "show_output.0.created_on", func(value string) error { + if value != createdOn { + return fmt.Errorf("view was recreated") + } + return nil + })), + ), + }, + { + Config: config.FromModel(t, model.WithCopyGrants(true)), + Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). + HasNameString(id.Name()), + assert.Check(resource.TestCheckResourceAttrWith(resourceName, "show_output.0.created_on", func(value string) error { + if value != createdOn { + return fmt.Errorf("view was recreated") + } + return nil + })), + ), + }, + }, + }) +} + +// There is no way to check at/before fields in show and describe. That's why in we try creating with these values, but do not assert them. +func TestAcc_StreamOnTable_At(t *testing.T) { + acc.TestAccPreCheck(t) + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + resourceId := helpers.EncodeResourceIdentifier(id) + resourceName := "snowflake_stream_on_table.test" + + table, cleanupTable := acc.TestClient().Table.CreateWithChangeTracking(t) + t.Cleanup(cleanupTable) + acc.TestClient().Table.InsertInt(t, table.ID()) + + lastQueryId := acc.TestClient().Context.LastQueryId(t) + + model := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), table.ID().FullyQualifiedName()). + WithComment("foo").WithAppendOnly(r.BooleanTrue).WithShowInitialRows(r.BooleanTrue).WithCopyGrants(true) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.StreamOnTable), + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "offset": pluginconfig.StringVariable("0"), + }))), + Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasTableString(table.ID().FullyQualifiedName()). + HasAppendOnlyString(r.BooleanTrue). + HasShowInitialRowsString(r.BooleanTrue). + HasCommentString("foo"), + resourceshowoutputassert.StreamShowOutput(t, resourceName). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasComment("foo"). + HasTableName(table.ID().FullyQualifiedName()). + HasSourceType(sdk.StreamSourceTypeTable). + HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasType("DELTA"). + HasStale("false"). + HasMode(sdk.StreamModeAppendOnly). + HasStaleAfterNotEmpty(). + HasInvalidReason("N/A"). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttrSet(resourceName, "describe_output.0.created_on")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.name", id.Name())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.database_name", id.DatabaseName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.schema_name", id.SchemaName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner", snowflakeroles.Accountadmin.Name())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.comment", "foo")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.table_name", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", "Table")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.#", "1")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.0", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.type", "DELTA")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.stale", "false")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.mode", "APPEND_ONLY")), + assert.Check(resource.TestCheckResourceAttrSet(resourceName, "describe_output.0.stale_after")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner_role_type", "ROLE")), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "stream": pluginconfig.StringVariable(id.FullyQualifiedName()), + }))), + Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). + HasNameString(id.Name()), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "statement": pluginconfig.StringVariable(lastQueryId), + }))), + Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). + HasNameString(id.Name()), + ), + }, + // TODO(SNOW-1689111): test timestamps + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model), + ResourceName: resourceName, + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedStreamOnTableResource(t, resourceId). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasAppendOnlyString(r.BooleanTrue). + HasTableString(table.ID().FullyQualifiedName()), + ), + }, + }, + }) +} + +// There is no way to check at/before fields in show and describe. That's why in we try creating with these values, but do not assert them. +func TestAcc_StreamOnTable_Before(t *testing.T) { + acc.TestAccPreCheck(t) + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + resourceName := "snowflake_stream_on_table.test" + + table, cleanupTable := acc.TestClient().Table.CreateWithChangeTracking(t) + t.Cleanup(cleanupTable) + acc.TestClient().Table.InsertInt(t, table.ID()) + + lastQueryId := acc.TestClient().Context.LastQueryId(t) + + model := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), table.ID().FullyQualifiedName()). + WithComment("foo").WithAppendOnly(r.BooleanTrue).WithShowInitialRows(r.BooleanTrue).WithCopyGrants(true) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.StreamOnTable), + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/before"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "offset": pluginconfig.StringVariable("0"), + }))), + Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). + HasNameString(id.Name()). + HasDatabaseString(id.DatabaseName()). + HasSchemaString(id.SchemaName()). + HasFullyQualifiedNameString(id.FullyQualifiedName()). + HasTableString(table.ID().FullyQualifiedName()). + HasAppendOnlyString(r.BooleanTrue). + HasShowInitialRowsString(r.BooleanTrue). + HasCommentString("foo"), + resourceshowoutputassert.StreamShowOutput(t, resourceName). + HasCreatedOnNotEmpty(). + HasName(id.Name()). + HasDatabaseName(id.DatabaseName()). + HasSchemaName(id.SchemaName()). + HasOwner(snowflakeroles.Accountadmin.Name()). + HasComment("foo"). + HasTableName(table.ID().FullyQualifiedName()). + HasSourceType(sdk.StreamSourceTypeTable). + HasBaseTables([]sdk.SchemaObjectIdentifier{table.ID()}). + HasType("DELTA"). + HasStale("false"). + HasMode(sdk.StreamModeAppendOnly). + HasStaleAfterNotEmpty(). + HasInvalidReason("N/A"). + HasOwnerRoleType("ROLE"), + assert.Check(resource.TestCheckResourceAttrSet(resourceName, "describe_output.0.created_on")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.name", id.Name())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.database_name", id.DatabaseName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.schema_name", id.SchemaName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner", snowflakeroles.Accountadmin.Name())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.comment", "foo")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.table_name", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", "Table")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.#", "1")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.0", table.ID().FullyQualifiedName())), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.type", "DELTA")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.stale", "false")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.mode", "APPEND_ONLY")), + assert.Check(resource.TestCheckResourceAttrSet(resourceName, "describe_output.0.stale_after")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner_role_type", "ROLE")), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/before"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "stream": pluginconfig.StringVariable(id.FullyQualifiedName()), + }))), + Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). + HasNameString(id.Name()), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/before"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "statement": pluginconfig.StringVariable(lastQueryId), + }))), + Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). + HasNameString(id.Name()), + ), + }, + // TODO(SNOW-1689111): test timestamps + }, + }) +} diff --git a/pkg/resources/testdata/TestAcc_StreamOnTable/at/test.tf b/pkg/resources/testdata/TestAcc_StreamOnTable/at/test.tf new file mode 100644 index 0000000000..1b59b8225e --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StreamOnTable/at/test.tf @@ -0,0 +1,19 @@ +resource "snowflake_stream_on_table" "test" { + name = var.name + database = var.database + schema = var.schema + + copy_grants = true + table = var.table + append_only = var.append_only + show_initial_rows = "true" + + at { + timestamp = try(var.at["timestamp"], null) + offset = try(var.at["offset"], null) + stream = try(var.at["stream"], null) + statement = try(var.at["statement"], null) + } + + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_StreamOnTable/at/variables.tf b/pkg/resources/testdata/TestAcc_StreamOnTable/at/variables.tf new file mode 100644 index 0000000000..122583b5bb --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StreamOnTable/at/variables.tf @@ -0,0 +1,31 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "schema" { + type = string +} + +variable "table" { + type = string +} + +variable "copy_grants" { + type = bool +} + +variable "append_only" { + type = bool +} + +variable "at" { + type = map(string) +} + +variable "comment" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_StreamOnTable/basic/test.tf b/pkg/resources/testdata/TestAcc_StreamOnTable/basic/test.tf new file mode 100644 index 0000000000..e61457cfac --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StreamOnTable/basic/test.tf @@ -0,0 +1,13 @@ +resource "snowflake_view" "test" { + name = var.name + database = var.database + schema = var.schema + statement = var.statement + + dynamic "column" { + for_each = var.column + content { + column_name = column.value["column_name"] + } + } +} diff --git a/pkg/resources/testdata/TestAcc_StreamOnTable/basic/variables.tf b/pkg/resources/testdata/TestAcc_StreamOnTable/basic/variables.tf new file mode 100644 index 0000000000..dc7d599d87 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StreamOnTable/basic/variables.tf @@ -0,0 +1,19 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "schema" { + type = string +} + +variable "statement" { + type = string +} + +variable "column" { + type = set(map(string)) +} diff --git a/pkg/resources/testdata/TestAcc_StreamOnTable/before/test.tf b/pkg/resources/testdata/TestAcc_StreamOnTable/before/test.tf new file mode 100644 index 0000000000..84758aaf72 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StreamOnTable/before/test.tf @@ -0,0 +1,19 @@ +resource "snowflake_stream_on_table" "test" { + name = var.name + database = var.database + schema = var.schema + + copy_grants = true + table = var.table + append_only = "true" + show_initial_rows = "true" + + at { + timestamp = try(var.before["timestamp"], null) + offset = try(var.before["offset"], null) + stream = try(var.before["stream"], null) + statement = try(var.before["statement"], null) + } + + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_StreamOnTable/before/variables.tf b/pkg/resources/testdata/TestAcc_StreamOnTable/before/variables.tf new file mode 100644 index 0000000000..6322e980ff --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StreamOnTable/before/variables.tf @@ -0,0 +1,27 @@ +variable "name" { + type = string +} + +variable "database" { + type = string +} + +variable "schema" { + type = string +} + +variable "table" { + type = string +} + +variable "copy_grants" { + type = bool +} + +variable "before" { + type = map(string) +} + +variable "comment" { + type = string +} diff --git a/pkg/resources/view.go b/pkg/resources/view.go index d9f2f6bb51..a5fb1dbcb8 100644 --- a/pkg/resources/view.go +++ b/pkg/resources/view.go @@ -124,7 +124,7 @@ var viewSchema = map[string]*schema.Schema{ "minutes": { Type: schema.TypeInt, Optional: true, - Description: fmt.Sprintf("Specifies an interval (in minutes) of wait time inserted between runs of the data metric function. Conflicts with `using_cron`. Valid values are: %s. Due to Snowflake limitations, changes in this field is not managed by the provider. Please consider using [taint](https://developer.hashicorp.com/terraform/cli/commands/taint) command, `using_cron` field, or [replace_triggered_by](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#replace_triggered_by) metadata argument.", possibleValuesListed(sdk.AllViewDataMetricScheduleMinutes)), + Description: fmt.Sprintf("Specifies an interval (in minutes) of wait time inserted between runs of the data metric function. Conflicts with `using_cron`. Valid values are: %s. Due to Snowflake limitations, changes in this field are not managed by the provider. Please consider using [taint](https://developer.hashicorp.com/terraform/cli/commands/taint) command, `using_cron` field, or [replace_triggered_by](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#replace_triggered_by) metadata argument.", possibleValuesListed(sdk.AllViewDataMetricScheduleMinutes)), ValidateDiagFunc: IntInSlice(sdk.AllViewDataMetricScheduleMinutes), ConflictsWith: []string{"data_metric_schedule.using_cron"}, }, @@ -844,7 +844,7 @@ func extractDataMetricFunctions(v any) (dmfs []ViewDataMetricFunctionConfig, err return } -func changedKeys(d *schema.ResourceData, keys []string) []string { +func changedKeys(d *schema.ResourceData, keys ...string) []string { changed := make([]string, 0, len(keys)) for _, key := range keys { if d.HasChange(key) { @@ -862,8 +862,8 @@ func UpdateView(ctx context.Context, d *schema.ResourceData, meta any) diag.Diag } // change on these fields can not be ForceNew because then view is dropped explicitly and copying grants does not have effect - if d.HasChange("statement") || d.HasChange("is_temporary") || d.HasChange("is_recursive") || d.HasChange("copy_grant") || d.HasChange("column") { - log.Printf("[DEBUG] Detected change on %q, recreating...", changedKeys(d, []string{"statement", "is_temporary", "is_recursive", "copy_grant", "column"})) + if keys := changedKeys(d, "statement", "is_temporary", "is_recursive", "copy_grant", "column"); len(keys) > 0 { + log.Printf("[DEBUG] Detected change on %q, recreating...", keys) return CreateView(true)(ctx, d, meta) } diff --git a/pkg/schemas/common_types.go b/pkg/schemas/common_types.go index ac4f7f9e0f..841609c516 100644 --- a/pkg/schemas/common_types.go +++ b/pkg/schemas/common_types.go @@ -27,3 +27,75 @@ var FullyQualifiedNameSchema = &schema.Schema{ Computed: true, Description: "Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution).", } + +var AtSchema = &schema.Schema{ + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "This field specifies that the request is inclusive of any changes made by a statement or transaction with a timestamp equal to the specified parameter. Due to Snowflake limitations, the provider does not detect external changes on this field.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "timestamp": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies an exact date and time to use for Time Travel. The value must be explicitly cast to a TIMESTAMP, TIMESTAMP_LTZ, TIMESTAMP_NTZ, or TIMESTAMP_TZ data type.", + ExactlyOneOf: []string{"at.0.timestamp", "at.0.offset", "at.0.statement", "at.0.stream"}, + }, + "offset": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the difference in seconds from the current time to use for Time Travel, in the form -N where N can be an integer or arithmetic expression (e.g. -120 is 120 seconds, -30*60 is 1800 seconds or 30 minutes).", + ExactlyOneOf: []string{"at.0.timestamp", "at.0.offset", "at.0.statement", "at.0.stream"}, + }, + "statement": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the query ID of a statement to use as the reference point for Time Travel. This parameter supports any statement of one of the following types: DML (e.g. INSERT, UPDATE, DELETE), TCL (BEGIN, COMMIT transaction), SELECT.", + ExactlyOneOf: []string{"at.0.timestamp", "at.0.offset", "at.0.statement", "at.0.stream"}, + }, + "stream": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the identifier (i.e. name) for an existing stream on the queried table or view. The current offset in the stream is used as the AT point in time for returning change data for the source object.", + ExactlyOneOf: []string{"at.0.timestamp", "at.0.offset", "at.0.statement", "at.0.stream"}, + }, + }, + }, + ConflictsWith: []string{"before"}, +} + +var BeforeSchema = &schema.Schema{ + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "This field specifies that the request refers to a point immediately preceding the specified parameter. This point in time is just before the statement, identified by its query ID, is completed. Due to Snowflake limitations, the provider does not detect external changes on this field.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "timestamp": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies an exact date and time to use for Time Travel. The value must be explicitly cast to a TIMESTAMP, TIMESTAMP_LTZ, TIMESTAMP_NTZ, or TIMESTAMP_TZ data type.", + ExactlyOneOf: []string{"before.0.timestamp", "before.0.offset", "before.0.statement", "before.0.stream"}, + }, + "offset": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the difference in seconds from the current time to use for Time Travel, in the form -N where N can be an integer or arithmetic expression (e.g. -120 is 120 seconds, -30*60 is 1800 seconds or 30 minutes).", + ExactlyOneOf: []string{"before.0.timestamp", "before.0.offset", "before.0.statement", "before.0.stream"}, + }, + "statement": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the query ID of a statement to use as the reference point for Time Travel. This parameter supports any statement of one of the following types: DML (e.g. INSERT, UPDATE, DELETE), TCL (BEGIN, COMMIT transaction), SELECT.", + ExactlyOneOf: []string{"before.0.timestamp", "before.0.offset", "before.0.statement", "before.0.stream"}, + }, + "stream": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the identifier (i.e. name) for an existing stream on the queried table or view. The current offset in the stream is used as the AT point in time for returning change data for the source object.", + ExactlyOneOf: []string{"before.0.timestamp", "before.0.offset", "before.0.statement", "before.0.stream"}, + }, + }, + }, + ConflictsWith: []string{"at"}, +} diff --git a/pkg/schemas/stream.go b/pkg/schemas/stream.go new file mode 100644 index 0000000000..c4f9f22c80 --- /dev/null +++ b/pkg/schemas/stream.go @@ -0,0 +1,132 @@ +// Code generated by sdk-to-schema generator; DO NOT EDIT. + +package schemas + +import ( + "log" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +// DescribeStreamSchema represents output of SHOW query for the single Stream. +var DescribeStreamSchema = map[string]*schema.Schema{ + "created_on": { + Type: schema.TypeString, + Computed: true, + }, + "name": { + Type: schema.TypeString, + Computed: true, + }, + "database_name": { + Type: schema.TypeString, + Computed: true, + }, + "schema_name": { + Type: schema.TypeString, + Computed: true, + }, + "owner": { + Type: schema.TypeString, + Computed: true, + }, + "comment": { + Type: schema.TypeString, + Computed: true, + }, + "table_name": { + Type: schema.TypeString, + Computed: true, + }, + "source_type": { + Type: schema.TypeString, + Computed: true, + }, + "base_tables": { + Type: schema.TypeList, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Computed: true, + }, + "type": { + Type: schema.TypeString, + Computed: true, + }, + "stale": { + Type: schema.TypeString, + Computed: true, + }, + "mode": { + Type: schema.TypeString, + Computed: true, + }, + "stale_after": { + Type: schema.TypeString, + Computed: true, + }, + "invalid_reason": { + Type: schema.TypeString, + Computed: true, + }, + "owner_role_type": { + Type: schema.TypeString, + Computed: true, + }, +} + +var _ = ShowStreamSchema + +func StreamDescriptionToSchema(stream sdk.Stream) map[string]any { + streamSchema := make(map[string]any) + streamSchema["created_on"] = stream.CreatedOn.String() + streamSchema["name"] = stream.Name + streamSchema["database_name"] = stream.DatabaseName + streamSchema["schema_name"] = stream.SchemaName + if stream.Owner != nil { + streamSchema["owner"] = stream.Owner + } + if stream.Comment != nil { + streamSchema["comment"] = stream.Comment + } + if stream.TableName != nil { + tableId, err := sdk.ParseSchemaObjectIdentifier(*stream.TableName) + if err != nil { + log.Printf("[DEBUG] could not parse table ID: %v", err) + } else { + streamSchema["table_name"] = tableId.FullyQualifiedName() + } + } + if stream.SourceType != nil { + streamSchema["source_type"] = stream.SourceType + } + if stream.BaseTables != nil { + idsFQN := make([]string, len(stream.BaseTables)) + for i := range idsFQN { + idsFQN[i] = stream.BaseTables[i].FullyQualifiedName() + } + streamSchema["base_tables"] = idsFQN + } + if stream.Type != nil { + streamSchema["type"] = stream.Type + } + if stream.Stale != nil { + streamSchema["stale"] = stream.Stale + } + if stream.Mode != nil { + streamSchema["mode"] = stream.Mode + } + if stream.StaleAfter != nil { + streamSchema["stale_after"] = stream.StaleAfter.String() + } + if stream.InvalidReason != nil { + streamSchema["invalid_reason"] = stream.InvalidReason + } + if stream.OwnerRoleType != nil { + streamSchema["owner_role_type"] = stream.OwnerRoleType + } + return streamSchema +} + +var _ = StreamToSchema diff --git a/pkg/schemas/stream_gen.go b/pkg/schemas/stream_gen.go index 841f6fb79b..7a116777ab 100644 --- a/pkg/schemas/stream_gen.go +++ b/pkg/schemas/stream_gen.go @@ -3,11 +3,14 @@ package schemas import ( + "log" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) // ShowStreamSchema represents output of SHOW query for the single Stream. +// TODO: merge with desc schema var ShowStreamSchema = map[string]*schema.Schema{ "created_on": { Type: schema.TypeString, @@ -42,7 +45,10 @@ var ShowStreamSchema = map[string]*schema.Schema{ Computed: true, }, "base_tables": { - Type: schema.TypeString, + Type: schema.TypeList, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, Computed: true, }, "type": { @@ -86,13 +92,22 @@ func StreamToSchema(stream *sdk.Stream) map[string]any { streamSchema["comment"] = stream.Comment } if stream.TableName != nil { - streamSchema["table_name"] = stream.TableName + tableId, err := sdk.ParseSchemaObjectIdentifier(*stream.TableName) + if err != nil { + log.Printf("[DEBUG] could not parse table ID: %v", err) + } else { + streamSchema["table_name"] = tableId.FullyQualifiedName() + } } if stream.SourceType != nil { streamSchema["source_type"] = stream.SourceType } if stream.BaseTables != nil { - streamSchema["base_tables"] = stream.BaseTables + idsFQN := make([]string, len(stream.BaseTables)) + for i := range idsFQN { + idsFQN[i] = stream.BaseTables[i].FullyQualifiedName() + } + streamSchema["base_tables"] = idsFQN } if stream.Type != nil { streamSchema["type"] = stream.Type diff --git a/pkg/sdk/parsers.go b/pkg/sdk/parsers.go index da3d53fc85..e980e75d01 100644 --- a/pkg/sdk/parsers.go +++ b/pkg/sdk/parsers.go @@ -37,3 +37,17 @@ func ParseCommaSeparatedStringArray(value string, trimQuotes bool) []string { } return trimmedListItems } + +// TODO: add description and unit tests +func ParseCommaSeparatedSchemaObjectIdentifierArray(value string) ([]SchemaObjectIdentifier, error) { + idsRaw := ParseCommaSeparatedStringArray(value, false) + ids := make([]SchemaObjectIdentifier, len(idsRaw)) + for i := range idsRaw { + id, err := ParseSchemaObjectIdentifier(idsRaw[i]) + if err != nil { + return nil, err + } + ids[i] = id + } + return ids, nil +} diff --git a/pkg/sdk/streams_def.go b/pkg/sdk/streams_def.go index 535a4ac5db..fae2264c89 100644 --- a/pkg/sdk/streams_def.go +++ b/pkg/sdk/streams_def.go @@ -1,9 +1,54 @@ package sdk -import g "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/poc/generator" +import ( + "fmt" + "strings" + + g "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/poc/generator" +) //go:generate go run ./poc/main.go +type StreamSourceType string + +const ( + StreamSourceTypeTable StreamSourceType = "Table" + StreamSourceTypeExternalTable StreamSourceType = "External Table" + StreamSourceTypeView StreamSourceType = "View" + StreamSourceTypeStage StreamSourceType = "Stage" +) + +func ToStreamSourceType(s string) (StreamSourceType, error) { + switch streamSourceType := StreamSourceType(s); streamSourceType { + case StreamSourceTypeTable, + StreamSourceTypeExternalTable, + StreamSourceTypeView, + StreamSourceTypeStage: + return streamSourceType, nil + default: + return "", fmt.Errorf("invalid stream source type: %s", s) + } +} + +type StreamMode string + +const ( + StreamModeDefault StreamMode = "DEFAULT" + StreamModeAppendOnly StreamMode = "APPEND_ONLY" + StreamModeInsertOnly StreamMode = "INSERT_ONLY" +) + +func ToStreamMode(s string) (StreamMode, error) { + switch streamMode := StreamMode(strings.ToUpper(s)); streamMode { + case StreamModeDefault, + StreamModeAppendOnly, + StreamModeInsertOnly: + return streamMode, nil + default: + return "", fmt.Errorf("invalid stream mode: %s", s) + } +} + var ( onStreamDef = g.NewQueryStruct("OnStream"). OptionalSQL("AT"). @@ -45,11 +90,11 @@ var ( Field("Owner", "*string"). Field("Comment", "*string"). Field("TableName", "*string"). - Field("SourceType", "*string"). - Field("BaseTables", "*string"). + Field("SourceType", "*StreamSourceType"). + Field("BaseTables", "[]SchemaObjectIdentifier"). Field("Type", "*string"). Field("Stale", "*string"). - Field("Mode", "*string"). + Field("Mode", "*StreamMode"). Field("StaleAfter", "*time.Time"). Field("InvalidReason", "*string"). Field("OwnerRoleType", "*string") diff --git a/pkg/sdk/streams_gen.go b/pkg/sdk/streams_gen.go index 0471d47595..da44c90a07 100644 --- a/pkg/sdk/streams_gen.go +++ b/pkg/sdk/streams_gen.go @@ -163,11 +163,11 @@ type Stream struct { Owner *string Comment *string TableName *string - SourceType *string - BaseTables *string + SourceType *StreamSourceType + BaseTables []SchemaObjectIdentifier Type *string Stale *string - Mode *string + Mode *StreamMode StaleAfter *time.Time InvalidReason *string OwnerRoleType *string diff --git a/pkg/sdk/streams_impl_gen.go b/pkg/sdk/streams_impl_gen.go index 64b1b7e81a..4e6a7c02b2 100644 --- a/pkg/sdk/streams_impl_gen.go +++ b/pkg/sdk/streams_impl_gen.go @@ -2,6 +2,7 @@ package sdk import ( "context" + "log" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" ) @@ -124,6 +125,7 @@ func (r *CreateOnExternalTableStreamRequest) toOpts() *CreateOnExternalTableStre Comment: r.Comment, } if r.On != nil { + opts.On = &OnStream{ At: r.On.At, Before: r.On.Before, @@ -242,10 +244,20 @@ func (r showStreamsDbRow) convert() *Stream { s.TableName = &r.TableName.String } if r.SourceType.Valid { - s.SourceType = &r.SourceType.String + sourceType, err := ToStreamSourceType(r.SourceType.String) + if err != nil { + log.Printf("[DEBUG] error converting show stream: %v", err) + } else { + s.SourceType = &sourceType + } } if r.BaseTables.Valid { - s.BaseTables = &r.BaseTables.String + baseTables, err := ParseCommaSeparatedSchemaObjectIdentifierArray(r.BaseTables.String) + if err != nil { + log.Printf("[DEBUG] error converting show stream: %v", err) + } else { + s.BaseTables = baseTables + } } if r.Type.Valid { s.Type = &r.Type.String @@ -254,7 +266,12 @@ func (r showStreamsDbRow) convert() *Stream { s.Stale = &r.Stale.String } if r.Mode.Valid { - s.Mode = &r.Mode.String + mode, err := ToStreamMode(r.Mode.String) + if err != nil { + log.Printf("[DEBUG] error converting show stream: %v", err) + } else { + s.Mode = &mode + } } if r.InvalidReason.Valid { s.InvalidReason = &r.InvalidReason.String diff --git a/pkg/sdk/streams_validations_gen.go b/pkg/sdk/streams_validations_gen.go index 14fb378c7a..d5e9eda232 100644 --- a/pkg/sdk/streams_validations_gen.go +++ b/pkg/sdk/streams_validations_gen.go @@ -32,7 +32,7 @@ func (opts *CreateOnTableStreamOptions) validate() error { } if valueSet(opts.On.Statement) { if !exactlyOneValueSet(opts.On.Statement.Timestamp, opts.On.Statement.Offset, opts.On.Statement.Statement, opts.On.Statement.Stream) { - errs = append(errs, errExactlyOneOf("CreateOnTableStreamOptions.On.Statement", "Timestamp", "Offset", "Statement", "Stream")) + errs = append(errs, errExactlyOneOf("CreateOnViewStreamOptions.On.Statement", "Timestamp", "Offset", "Statement", "Stream")) } } } @@ -59,7 +59,7 @@ func (opts *CreateOnExternalTableStreamOptions) validate() error { } if valueSet(opts.On.Statement) { if !exactlyOneValueSet(opts.On.Statement.Timestamp, opts.On.Statement.Offset, opts.On.Statement.Statement, opts.On.Statement.Stream) { - errs = append(errs, errExactlyOneOf("CreateOnExternalTableStreamOptions.On.Statement", "Timestamp", "Offset", "Statement", "Stream")) + errs = append(errs, errExactlyOneOf("CreateOnViewStreamOptions.On.Statement", "Timestamp", "Offset", "Statement", "Stream")) } } } diff --git a/pkg/sdk/testint/streams_gen_integration_test.go b/pkg/sdk/testint/streams_gen_integration_test.go index 3c53684ca1..e79508ab8f 100644 --- a/pkg/sdk/testint/streams_gen_integration_test.go +++ b/pkg/sdk/testint/streams_gen_integration_test.go @@ -53,8 +53,8 @@ func TestInt_Streams(t *testing.T) { HasDatabaseName(id.DatabaseName()). HasSchemaName(id.SchemaName()). HasComment("some comment"). - HasSourceType("Table"). - HasMode("APPEND_ONLY"). + HasSourceType(sdk.StreamSourceTypeTable). + HasMode(sdk.StreamModeAppendOnly). HasTableId(tableId.FullyQualifiedName()), ) @@ -130,8 +130,8 @@ func TestInt_Streams(t *testing.T) { HasDatabaseName(id.DatabaseName()). HasSchemaName(id.SchemaName()). HasComment("some comment"). - HasSourceType("External Table"). - HasMode("INSERT_ONLY"). + HasSourceType(sdk.StreamSourceTypeExternalTable). + HasMode(sdk.StreamModeInsertOnly). HasTableId(externalTableId.FullyQualifiedName()), ) }) @@ -151,8 +151,8 @@ func TestInt_Streams(t *testing.T) { HasDatabaseName(id.DatabaseName()). HasSchemaName(id.SchemaName()). HasComment("some comment"). - HasSourceType("Stage"). - HasMode("DEFAULT"). + HasSourceType(sdk.StreamSourceTypeStage). + HasMode(sdk.StreamModeDefault). HasStageName(stage.ID().Name()), ) }) @@ -178,8 +178,8 @@ func TestInt_Streams(t *testing.T) { HasDatabaseName(id.DatabaseName()). HasSchemaName(id.SchemaName()). HasComment("some comment"). - HasSourceType("View"). - HasMode("APPEND_ONLY"). + HasSourceType(sdk.StreamSourceTypeView). + HasMode(sdk.StreamModeAppendOnly). HasTableId(view.ID().FullyQualifiedName()), ) }) @@ -204,8 +204,8 @@ func TestInt_Streams(t *testing.T) { HasDatabaseName(id.DatabaseName()). HasSchemaName(id.SchemaName()). HasComment("some comment"). - HasSourceType("Table"). - HasMode("DEFAULT"). + HasSourceType(sdk.StreamSourceTypeTable). + HasMode(sdk.StreamModeDefault). HasTableId(table.ID().FullyQualifiedName()), ) }) @@ -359,8 +359,8 @@ func TestInt_Streams(t *testing.T) { HasDatabaseName(id.DatabaseName()). HasSchemaName(id.SchemaName()). HasComment("some comment"). - HasSourceType("Table"). - HasMode("DEFAULT"). + HasSourceType(sdk.StreamSourceTypeTable). + HasMode(sdk.StreamModeDefault). HasTableId(table.ID().FullyQualifiedName()), ) }) @@ -450,8 +450,8 @@ func TestInt_Streams(t *testing.T) { HasDatabaseName(id.DatabaseName()). HasSchemaName(id.SchemaName()). HasComment("some comment"). - HasSourceType("Table"). - HasMode("DEFAULT"). + HasSourceType(sdk.StreamSourceTypeTable). + HasMode(sdk.StreamModeDefault). HasTableId(table.ID().FullyQualifiedName()), ) }) diff --git a/templates/resources/stream.md.tmpl b/templates/resources/stream.md.tmpl new file mode 100644 index 0000000000..d1045d341c --- /dev/null +++ b/templates/resources/stream.md.tmpl @@ -0,0 +1,35 @@ +--- +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ if gt (len (split .Description "")) 1 -}} +{{ index (split .Description "") 1 | plainmarkdown | trimspace | prefixlines " " }} +{{- else -}} +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +{{- end }} +--- + +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} diff --git a/v1-preparations/CHANGES_BEFORE_V1.md b/v1-preparations/CHANGES_BEFORE_V1.md index b42c021e3a..ce0f16100c 100644 --- a/v1-preparations/CHANGES_BEFORE_V1.md +++ b/v1-preparations/CHANGES_BEFORE_V1.md @@ -24,7 +24,7 @@ Refer to [this issue](https://github.com/Snowflake-Labs/terraform-provider-snowf This point connects with the one on about the [default values](#default-values). First of all, we want to reduce the coupling between Snowflake and the provider. Secondly, some of the value limits are soft (consult issues [#2948](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2948) and [#1919](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1919)) which makes it difficult to align provider validations with the custom setups. Lastly, some values depend on the Snowflake edition used. -Because of all that, we plan to reduce the number of validations (mostly numeric) on the provider side. We won't get rid of them entirely, so that successful plans but apply failures can be limited, but please be aware that you may encounter them. +Because of all that, we plan to reduce the number of validations (mostly numeric) on the provider side. We won't get rid of them entirely, so that successful plans but apply failures can be limited, but please be aware that you may encounter them. ## "Empty" values The [Terraform SDK v2](https://github.com/hashicorp/terraform-plugin-sdk) that is currently used in our provider detects the presence of the attribute based on its non-zero Golang value. This means, that it is not possible to distinguish the removal of the value inside a config from setting it explicitely to a zero value, e.g. `0` for the numeric value (check [this thread](https://discuss.hashicorp.com/t/is-it-possible-to-differentiate-between-a-zero-value-and-a-removed-property-in-the-terraform-provider-sdk/43131)). Before we migrate to the new recommended [Terraform Plugin Framework](https://github.com/hashicorp/terraform-plugin-framework) we want to handle such cases the same way inside the provider. It means that: @@ -73,4 +73,7 @@ may act in an unexpected way. An alternative solution is to use plain SQL with [ ## Identifier design decisions The summary of design decisions taken during the [identifiers rework](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md#identifiers-rework) -was put into a separate document ([here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md)). \ No newline at end of file +was put into a separate document ([here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md)). + +## Supporting COPY GRANTS +Some Snowflake objects, like [views](https://docs.snowflake.com/en/sql-reference/sql/create-view#examples), support the `COPY GRANTS` property. This retains the access permissions from the original object when a new object is created using the `OR REPLACE` clause. We have decided to support this property in the relevant resources. By default, the resources are not created with `COPY GRANTS` , but you can do this with `copy_grants=true`. However, a detected diff in some of the fields requires resource recreation and setting a new field during `CREATE`. This means that we can not use ForceNew, because Terraform calls `DROP` and `CREATE`, and the grants are not copied. To mitigate this, when Terraform detects a change in such a field, we have decided not to use ForceNew. Instead, the provider runs `CREATE OR REPLACE` in the `UPDATE` function. During such changes, the objects are recreated, but in the plan, it looks like they're being updated. This should not cause problems in objects that are stateless. We will revisit this topic with the migration to [Terraform Plugin Framework](https://github.com/hashicorp/terraform-plugin-framework). From c67968c20a4f56b0c6e02df2909021f83c8e5041 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Tue, 1 Oct 2024 14:41:43 +0200 Subject: [PATCH 02/11] Cleanup --- MIGRATION_GUIDE.md | 3 +- docs/resources/masking_policy.md | 2 +- docs/resources/row_access_policy.md | 2 +- docs/resources/stream_on_table.md | 147 ++++++++++++++++++ docs/resources/view.md | 2 +- .../snowflake_stream_on_table/resource.tf | 2 +- pkg/acceptance/bettertestspoc/README.md | 14 +- .../objectassert/stream_snowflake_gen.go | 2 - .../task_parameters_snowflake_gen.go | 2 +- .../stream_show_output_ext.go | 5 +- .../api_authentication_integration_common.go | 5 +- pkg/resources/stream_common.go | 7 +- pkg/resources/stream_on_table.go | 53 ++++--- .../stream_on_table_acceptance_test.go | 46 ++++++ .../TestAcc_StreamOnTable/basic/test.tf | 13 -- .../TestAcc_StreamOnTable/basic/variables.tf | 19 --- .../TestAcc_StreamOnTable/before/test.tf | 2 +- pkg/sdk/parsers.go | 3 +- pkg/sdk/parsers_test.go | 94 +++++++++++ pkg/sdk/streams_impl_gen.go | 1 - pkg/sdk/streams_validations_gen.go | 4 +- ...stream.md.tmpl => stream_on_table.md.tmpl} | 2 +- 22 files changed, 346 insertions(+), 84 deletions(-) create mode 100644 docs/resources/stream_on_table.md delete mode 100644 pkg/resources/testdata/TestAcc_StreamOnTable/basic/test.tf delete mode 100644 pkg/resources/testdata/TestAcc_StreamOnTable/basic/variables.tf rename templates/resources/{stream.md.tmpl => stream_on_table.md.tmpl} (97%) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 45313f6b9f..73368c9815 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -8,8 +8,7 @@ across different versions. ### *(new feature)* snowflake_stream_on_table resource -To enhance clarity and functionality, the new resource `snowflake_stream_on_table` has been introduced -to replace the previous `snowflake_stream`. Recognizing that the old resource carried multiple responsibilities within a single entity, we opted to divide it into more specialized resources. +To enhance clarity and functionality, the new resource `snowflake_stream_on_table` has been introduced to replace the previous `snowflake_stream`. Recognizing that the old resource carried multiple responsibilities within a single entity, we opted to divide it into more specialized resources. The newly introduced resources are aligned with the latest Snowflake documentation at the time of implementation, and adhere to our [new conventions](#general-changes). This segregation was based on the object on which the stream is created. The mapping between SQL statements and the resources is the following: - `ON TABLE ` -> `snowflake_stream_on_table` diff --git a/docs/resources/masking_policy.md b/docs/resources/masking_policy.md index 0c119173f0..513083aaf3 100644 --- a/docs/resources/masking_policy.md +++ b/docs/resources/masking_policy.md @@ -100,7 +100,7 @@ EOF - `describe_output` (List of Object) Outputs the result of `DESCRIBE MASKING POLICY` for the given masking policy. (see [below for nested schema](#nestedatt--describe_output)) - `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). - `id` (String) The ID of this resource. -- `show_output` (List of Object) Outputs the result of `SHOW MASKING POLICY` for the given masking policy. (see [below for nested schema](#nestedatt--show_output)) +- `show_output` (List of Object) Outputs the result of `SHOW MASKING POLICIES` for the given masking policy. (see [below for nested schema](#nestedatt--show_output)) ### Nested Schema for `argument` diff --git a/docs/resources/row_access_policy.md b/docs/resources/row_access_policy.md index 5e0c651176..3bd3afa7d3 100644 --- a/docs/resources/row_access_policy.md +++ b/docs/resources/row_access_policy.md @@ -57,7 +57,7 @@ resource "snowflake_row_access_policy" "example_row_access_policy" { - `describe_output` (List of Object) Outputs the result of `DESCRIBE ROW ACCESS POLICY` for the given row access policy. (see [below for nested schema](#nestedatt--describe_output)) - `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). - `id` (String) The ID of this resource. -- `show_output` (List of Object) Outputs the result of `SHOW ROW ACCESS POLICY` for the given row access policy. (see [below for nested schema](#nestedatt--show_output)) +- `show_output` (List of Object) Outputs the result of `SHOW ROW ACCESS POLICIES` for the given row access policy. (see [below for nested schema](#nestedatt--show_output)) ### Nested Schema for `argument` diff --git a/docs/resources/stream_on_table.md b/docs/resources/stream_on_table.md new file mode 100644 index 0000000000..238fb6a841 --- /dev/null +++ b/docs/resources/stream_on_table.md @@ -0,0 +1,147 @@ +--- +page_title: "snowflake_stream_on_table Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + Resource used to manage streams on tables. For more information, check stream documentation https://docs.snowflake.com/en/sql-reference/sql/create-stream. +--- + +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. + +# snowflake_stream_on_table (Resource) + +Resource used to manage streams on tables. For more information, check [stream documentation](https://docs.snowflake.com/en/sql-reference/sql/create-stream). + +## Example Usage + +```terraform +resource "snowflake_table" "table" { + database = "database" + schema = "schema" + name = "name" + + column { + type = "NUMBER(38,0)" + name = "id" + } +} + + +# resource with more fields set +resource "snowflake_stream_on_table" "stream" { + name = "stream" + schema = "schema" + database = "database" + + copy_grants = true + table = snowflake_table.table.fully_qualified_name + append_only = "true" + show_initial_rows = "true" + + at { + statement = "8e5d0ca9-005e-44e6-b858-a8f5b37c5726" + } + + comment = "A stream." +} +``` +-> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). + + + +## Schema + +### Required + +- `database` (String) The database in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `name` (String) Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `schema` (String) The schema in which to create the stream. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` +- `table` (String) Specifies an identifier for the table the stream will monitor. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `(`, `)`, `"` + +### Optional + +- `append_only` (String) Specifies whether this is an append-only stream. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `at` (Block List, Max: 1) This field specifies that the request is inclusive of any changes made by a statement or transaction with a timestamp equal to the specified parameter. Due to Snowflake limitations, the provider does not detect external changes on this field. (see [below for nested schema](#nestedblock--at)) +- `before` (Block List, Max: 1) This field specifies that the request refers to a point immediately preceding the specified parameter. This point in time is just before the statement, identified by its query ID, is completed. Due to Snowflake limitations, the provider does not detect external changes on this field. (see [below for nested schema](#nestedblock--before)) +- `comment` (String) Specifies a comment for the stream. +- `copy_grants` (Boolean) Retains the access permissions from the original stream when a new stream is created using the OR REPLACE clause. +- `show_initial_rows` (String) Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. + +### Read-Only + +- `describe_output` (List of Object) Outputs the result of `DESCRIBE STREAM` for the given stream. (see [below for nested schema](#nestedatt--describe_output)) +- `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). +- `id` (String) The ID of this resource. +- `show_output` (List of Object) Outputs the result of `SHOW STREAMS` for the given stream. (see [below for nested schema](#nestedatt--show_output)) + + +### Nested Schema for `at` + +Optional: + +- `offset` (String) Specifies the difference in seconds from the current time to use for Time Travel, in the form -N where N can be an integer or arithmetic expression (e.g. -120 is 120 seconds, -30*60 is 1800 seconds or 30 minutes). +- `statement` (String) Specifies the query ID of a statement to use as the reference point for Time Travel. This parameter supports any statement of one of the following types: DML (e.g. INSERT, UPDATE, DELETE), TCL (BEGIN, COMMIT transaction), SELECT. +- `stream` (String) Specifies the identifier (i.e. name) for an existing stream on the queried table or view. The current offset in the stream is used as the AT point in time for returning change data for the source object. +- `timestamp` (String) Specifies an exact date and time to use for Time Travel. The value must be explicitly cast to a TIMESTAMP, TIMESTAMP_LTZ, TIMESTAMP_NTZ, or TIMESTAMP_TZ data type. + + + +### Nested Schema for `before` + +Optional: + +- `offset` (String) Specifies the difference in seconds from the current time to use for Time Travel, in the form -N where N can be an integer or arithmetic expression (e.g. -120 is 120 seconds, -30*60 is 1800 seconds or 30 minutes). +- `statement` (String) Specifies the query ID of a statement to use as the reference point for Time Travel. This parameter supports any statement of one of the following types: DML (e.g. INSERT, UPDATE, DELETE), TCL (BEGIN, COMMIT transaction), SELECT. +- `stream` (String) Specifies the identifier (i.e. name) for an existing stream on the queried table or view. The current offset in the stream is used as the AT point in time for returning change data for the source object. +- `timestamp` (String) Specifies an exact date and time to use for Time Travel. The value must be explicitly cast to a TIMESTAMP, TIMESTAMP_LTZ, TIMESTAMP_NTZ, or TIMESTAMP_TZ data type. + + + +### Nested Schema for `describe_output` + +Read-Only: + +- `base_tables` (List of String) +- `comment` (String) +- `created_on` (String) +- `database_name` (String) +- `invalid_reason` (String) +- `mode` (String) +- `name` (String) +- `owner` (String) +- `owner_role_type` (String) +- `schema_name` (String) +- `source_type` (String) +- `stale` (String) +- `stale_after` (String) +- `table_name` (String) +- `type` (String) + + + +### Nested Schema for `show_output` + +Read-Only: + +- `base_tables` (List of String) +- `comment` (String) +- `created_on` (String) +- `database_name` (String) +- `invalid_reason` (String) +- `mode` (String) +- `name` (String) +- `owner` (String) +- `owner_role_type` (String) +- `schema_name` (String) +- `source_type` (String) +- `stale` (String) +- `stale_after` (String) +- `table_name` (String) +- `type` (String) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_stream_on_table.example '""."".""' +``` diff --git a/docs/resources/view.md b/docs/resources/view.md index ba07139c62..018d352b41 100644 --- a/docs/resources/view.md +++ b/docs/resources/view.md @@ -176,7 +176,7 @@ Required: Optional: -- `minutes` (Number) Specifies an interval (in minutes) of wait time inserted between runs of the data metric function. Conflicts with `using_cron`. Valid values are: `5` | `15` | `30` | `60` | `720` | `1440`. Due to Snowflake limitations, changes in this field is not managed by the provider. Please consider using [taint](https://developer.hashicorp.com/terraform/cli/commands/taint) command, `using_cron` field, or [replace_triggered_by](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#replace_triggered_by) metadata argument. +- `minutes` (Number) Specifies an interval (in minutes) of wait time inserted between runs of the data metric function. Conflicts with `using_cron`. Valid values are: `5` | `15` | `30` | `60` | `720` | `1440`. Due to Snowflake limitations, changes in this field are not managed by the provider. Please consider using [taint](https://developer.hashicorp.com/terraform/cli/commands/taint) command, `using_cron` field, or [replace_triggered_by](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#replace_triggered_by) metadata argument. - `using_cron` (String) Specifies a cron expression and time zone for periodically running the data metric function. Supports a subset of standard cron utility syntax. Conflicts with `minutes`. diff --git a/examples/resources/snowflake_stream_on_table/resource.tf b/examples/resources/snowflake_stream_on_table/resource.tf index 576472fa8a..c3bf45a71e 100644 --- a/examples/resources/snowflake_stream_on_table/resource.tf +++ b/examples/resources/snowflake_stream_on_table/resource.tf @@ -10,7 +10,7 @@ resource "snowflake_table" "table" { } -# resource with all fields set (excluding time travel with BEFORE) +# resource with more fields set resource "snowflake_stream_on_table" "stream" { name = "stream" schema = "schema" diff --git a/pkg/acceptance/bettertestspoc/README.md b/pkg/acceptance/bettertestspoc/README.md index ddc4405fe7..82b203f4d3 100644 --- a/pkg/acceptance/bettertestspoc/README.md +++ b/pkg/acceptance/bettertestspoc/README.md @@ -177,7 +177,7 @@ it will result in: object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [7/13]: failed with error: expected scaling policy: ECONOMY; got: STANDARD object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [8/13]: failed with error: expected auto suspend: 123; got: 600 object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [9/13]: failed with error: expected auto resume: false; got: true - object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [10/13]: failed with error: expected resource monitor: some-id; got: + object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [10/13]: failed with error: expected resource monitor: some-id; got: object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [11/13]: failed with error: expected comment: bad comment; got: Who does encouraging eagerly annoying dream several their scold straightaway. object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [12/13]: failed with error: expected enable query acceleration: true; got: false object WAREHOUSE["XHZJCKAT_35D0BCC1_7797_974E_ACAF_C622C56FA2D2"] assertion [13/13]: failed with error: expected query acceleration max scale factor: 12; got: 8 @@ -241,17 +241,17 @@ it will result in: WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [6/12]: failed with error: expected: ECONOMY, got: STANDARD WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [7/12]: failed with error: expected: 123, got: 600 WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [8/12]: failed with error: expected: false, got: true - WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [9/12]: failed with error: expected: abc, got: + WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [9/12]: failed with error: expected: abc, got: WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [10/12]: failed with error: expected: bad comment, got: Promise my huh off certain you bravery dynasty with Roman. WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [11/12]: failed with error: expected: true, got: false WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported resource assertion [12/12]: failed with error: expected: 16, got: 8 check 9/11 error: WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [2/7]: failed with error: expected: 1, got: 8 - WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [3/7]: failed with error: expected: WAREHOUSE, got: + WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [3/7]: failed with error: expected: WAREHOUSE, got: WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [4/7]: failed with error: expected: 23, got: 0 - WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [5/7]: failed with error: expected: WAREHOUSE, got: + WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [5/7]: failed with error: expected: WAREHOUSE, got: WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [6/7]: failed with error: expected: 1232, got: 172800 - WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [7/7]: failed with error: expected: WAREHOUSE, got: + WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 imported parameters assertion [7/7]: failed with error: expected: WAREHOUSE, got: check 10/11 error: object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [1/13]: failed with error: expected name: bad name; got: WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65 object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [2/13]: failed with error: expected state: SUSPENDED; got: STARTED @@ -262,7 +262,7 @@ it will result in: object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [7/13]: failed with error: expected scaling policy: ECONOMY; got: STANDARD object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [8/13]: failed with error: expected auto suspend: 123; got: 600 object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [9/13]: failed with error: expected auto resume: false; got: true - object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [10/13]: failed with error: expected resource monitor: some-id; got: + object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [10/13]: failed with error: expected resource monitor: some-id; got: object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [11/13]: failed with error: expected comment: bad comment; got: Promise my huh off certain you bravery dynasty with Roman. object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [12/13]: failed with error: expected enable query acceleration: true; got: false object WAREHOUSE["WBJKHLAT_2E52D1E6_D23D_33A0_F568_4EBDBE083B65"] assertion [13/13]: failed with error: expected query acceleration max scale factor: 12; got: 8 @@ -291,7 +291,7 @@ it will result in: ``` it will result in: ``` - commons.go:101: + commons.go:101: Error Trace: /Users/asawicki/Projects/terraform-provider-snowflake/pkg/sdk/testint/warehouses_integration_test.go:149 Error: Received unexpected error: object WAREHOUSE["VKSENEIT_535F314F_6549_348F_370E_AB430EE4BC7B"] assertion [1/13]: failed with error: expected name: bad name; got: VKSENEIT_535F314F_6549_348F_370E_AB430EE4BC7B diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_gen.go b/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_gen.go index 5874d1fed9..460b5355a7 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_gen.go @@ -117,7 +117,6 @@ func (s *StreamAssert) HasTableName(expected string) *StreamAssert { return s } - func (s *StreamAssert) HasType(expected string) *StreamAssert { s.AddAssertion(func(t *testing.T, o *sdk.Stream) error { t.Helper() @@ -146,7 +145,6 @@ func (s *StreamAssert) HasStale(expected string) *StreamAssert { return s } - func (s *StreamAssert) HasStaleAfter(expected time.Time) *StreamAssert { s.AddAssertion(func(t *testing.T, o *sdk.Stream) error { t.Helper() diff --git a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/task_parameters_snowflake_gen.go b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/task_parameters_snowflake_gen.go index b5c571149d..24ac1f78bd 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectparametersassert/task_parameters_snowflake_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/objectparametersassert/task_parameters_snowflake_gen.go @@ -1027,7 +1027,7 @@ func (t *TaskParametersAssert) HasDefaultTaskAutoRetryAttemptsValueExplicit() *T } func (t *TaskParametersAssert) HasDefaultUserTaskManagedInitialWarehouseSizeValueExplicit() *TaskParametersAssert { - return t.HasUserTaskManagedInitialWarehouseSize(sdk.WarehouseSize("Medium")) + return t.HasUserTaskManagedInitialWarehouseSize("Medium") } func (t *TaskParametersAssert) HasDefaultUserTaskMinimumTriggerIntervalInSecondsValueExplicit() *TaskParametersAssert { diff --git a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_ext.go index f13f830192..9c350e1809 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/stream_show_output_ext.go @@ -10,21 +10,20 @@ import ( "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" ) - func (s *StreamShowOutputAssert) HasCreatedOnNotEmpty() *StreamShowOutputAssert { s.AddAssertion(assert.ResourceShowOutputValuePresent("created_on")) return s } func (s *StreamShowOutputAssert) HasStaleAfterNotEmpty() *StreamShowOutputAssert { - s.AddAssertion(assert.ResourceShowOutputValuePresent("created_on")) + s.AddAssertion(assert.ResourceShowOutputValuePresent("stale_after")) return s } func (s *StreamShowOutputAssert) HasBaseTables(ids []sdk.SchemaObjectIdentifier) *StreamShowOutputAssert { s.AddAssertion(assert.ResourceShowOutputValueSet("base_tables.#", strconv.FormatInt(int64(len(ids)), 10))) for i := range ids { - s.AddAssertion(assert.ResourceShowOutputValueSet(fmt.Sprintf("base_tables.%d", i),ids[i].FullyQualifiedName())) + s.AddAssertion(assert.ResourceShowOutputValueSet(fmt.Sprintf("base_tables.%d", i), ids[i].FullyQualifiedName())) } return s } diff --git a/pkg/resources/api_authentication_integration_common.go b/pkg/resources/api_authentication_integration_common.go index 89493d3afe..8791a5d0df 100644 --- a/pkg/resources/api_authentication_integration_common.go +++ b/pkg/resources/api_authentication_integration_common.go @@ -31,9 +31,8 @@ var apiAuthCommonSchema = map[string]*schema.Schema{ Description: "Specifies the client ID for the OAuth application in the external service.", }, "oauth_client_secret": { - Type: schema.TypeString, - Required: true, - // TODO: sensitive? + Type: schema.TypeString, + Required: true, Description: "Specifies the client secret for the OAuth application in the ServiceNow instance from the previous step. The connector uses this to request an access token from the ServiceNow instance.", }, "oauth_token_endpoint": { diff --git a/pkg/resources/stream_common.go b/pkg/resources/stream_common.go index 63b9e85262..843e2879e5 100644 --- a/pkg/resources/stream_common.go +++ b/pkg/resources/stream_common.go @@ -10,13 +10,14 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) -func handleStreamTimeTravel(d *schema.ResourceData, req *sdk.CreateOnTableStreamRequest) { +func handleStreamTimeTravel(d *schema.ResourceData) *sdk.OnStreamRequest { if v := d.Get(AtAttributeName).([]any); len(v) > 0 { - req.WithOn(*sdk.NewOnStreamRequest().WithAt(true).WithStatement(handleStreamTimeTravelStatement(v[0].(map[string]any)))) + return sdk.NewOnStreamRequest().WithAt(true).WithStatement(handleStreamTimeTravelStatement(v[0].(map[string]any))) } if v := d.Get(BeforeAttributeName).([]any); len(v) > 0 { - req.WithOn(*sdk.NewOnStreamRequest().WithBefore(true).WithStatement(handleStreamTimeTravelStatement(v[0].(map[string]any)))) + return sdk.NewOnStreamRequest().WithBefore(true).WithStatement(handleStreamTimeTravelStatement(v[0].(map[string]any))) } + return nil } func handleStreamTimeTravelStatement(timeTravelConfig map[string]any) sdk.OnStreamStatementRequest { diff --git a/pkg/resources/stream_on_table.go b/pkg/resources/stream_on_table.go index 1c58d0b4a4..203823c0e9 100644 --- a/pkg/resources/stream_on_table.go +++ b/pkg/resources/stream_on_table.go @@ -18,22 +18,25 @@ import ( var streamOnTableSchema = map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: blocklistedCharactersFieldDescription("Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created."), + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: blocklistedCharactersFieldDescription("Specifies the identifier for the stream; must be unique for the database and schema in which the stream is created."), + DiffSuppressFunc: suppressIdentifierQuoting, }, "schema": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: blocklistedCharactersFieldDescription("The schema in which to create the stream."), + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: blocklistedCharactersFieldDescription("The schema in which to create the stream."), + DiffSuppressFunc: suppressIdentifierQuoting, }, "database": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: blocklistedCharactersFieldDescription("The database in which to create the stream."), + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: blocklistedCharactersFieldDescription("The database in which to create the stream."), + DiffSuppressFunc: suppressIdentifierQuoting, }, "copy_grants": { Type: schema.TypeBool, @@ -47,8 +50,8 @@ var streamOnTableSchema = map[string]*schema.Schema{ "table": { Type: schema.TypeString, Required: true, - Description: "Specifies an identifier for the table the stream will monitor.", - DiffSuppressFunc: suppressIdentifierQuoting, + Description: blocklistedCharactersFieldDescription("Specifies an identifier for the table the stream will monitor."), + DiffSuppressFunc: SuppressIfAny(suppressIdentifierQuoting, IgnoreChangeToCurrentSnowflakeValueInShow("table_name")), ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), }, "append_only": { @@ -56,15 +59,17 @@ var streamOnTableSchema = map[string]*schema.Schema{ Optional: true, Default: BooleanDefault, ValidateDiagFunc: validateBooleanString, - Description: booleanStringFieldDescription("Type of the stream that will be created."), + DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValueInShowWithMapping("mode", func(x any) any { + return x.(string) == string(sdk.StreamModeAppendOnly) + }), + Description: booleanStringFieldDescription("Specifies whether this is an append-only stream."), }, "show_initial_rows": { Type: schema.TypeString, Optional: true, Default: BooleanDefault, ValidateDiagFunc: validateBooleanString, - DiffSuppressFunc: IgnoreAfterCreation, - Description: booleanStringFieldDescription("Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed. This field is used only during stream creation."), + Description: booleanStringFieldDescription("Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed."), }, "comment": { Type: schema.TypeString, @@ -103,7 +108,6 @@ func StreamOnTable() *schema.Resource { CustomizeDiff: customdiff.All( ComputedIfAnyAttributeChanged(streamOnTableSchema, ShowOutputAttributeName, "table", "append_only", "comment"), ComputedIfAnyAttributeChanged(streamOnTableSchema, DescribeOutputAttributeName, "table", "append_only", "comment"), - ComputedIfAnyAttributeChanged(streamOnTableSchema, FullyQualifiedNameAttributeName, "name"), ), Schema: streamOnTableSchema, @@ -180,7 +184,10 @@ func CreateStreamOnTable(orReplace bool) schema.CreateContextFunc { req.WithShowInitialRows(parsed) } - handleStreamTimeTravel(d, req) + streamTimeTravelReq := handleStreamTimeTravel(d) + if streamTimeTravelReq != nil { + req.WithOn(*streamTimeTravelReq) + } if v, ok := d.GetOk("comment"); ok { req.WithComment(v.(string)) @@ -241,8 +248,12 @@ func ReadStreamOnTable(withExternalChangesMarking bool) schema.ReadContextFunc { return diag.FromErr(err) } if withExternalChangesMarking { + var mode sdk.StreamMode + if stream.Mode != nil { + mode = *stream.Mode + } if err = handleExternalChangesToObjectInShow(d, - showMapping{"mode", "append_only", string(*stream.Mode), booleanStringFromBool(*stream.Mode == sdk.StreamModeAppendOnly), nil}, + showMapping{"mode", "append_only", string(mode), booleanStringFromBool(mode == sdk.StreamModeAppendOnly), nil}, ); err != nil { return diag.FromErr(err) } @@ -272,7 +283,7 @@ func UpdateStreamOnTable(ctx context.Context, d *schema.ResourceData, meta any) } // change on these fields can not be ForceNew because then the object is dropped explicitly and copying grants does not have effect - if keys := changedKeys(d, "table", "append_only", "at", "before"); len(keys) > 0 { + if keys := changedKeys(d, "table", "append_only", "at", "before", "show_initial_rows"); len(keys) > 0 { log.Printf("[DEBUG] Detected change on %q, recreating...", keys) return CreateStreamOnTable(true)(ctx, d, meta) } diff --git a/pkg/resources/stream_on_table_acceptance_test.go b/pkg/resources/stream_on_table_acceptance_test.go index f16f81ee21..8e4d054fef 100644 --- a/pkg/resources/stream_on_table_acceptance_test.go +++ b/pkg/resources/stream_on_table_acceptance_test.go @@ -2,6 +2,7 @@ package resources_test import ( "fmt" + "regexp" "testing" acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" @@ -516,3 +517,48 @@ func TestAcc_StreamOnTable_Before(t *testing.T) { }, }) } + +func TestAcc_StreamOnTable_InvalidConfiguration(t *testing.T) { + id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + + model := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), "foo.bar.hoge"). + WithComment("foo").WithAppendOnly(r.BooleanTrue).WithShowInitialRows(r.BooleanTrue).WithCopyGrants(true) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Saml2SecurityIntegration), + Steps: []resource.TestStep{ + // multiple excluding options - before + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/before"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "offset": pluginconfig.StringVariable("0"), + "timestamp": pluginconfig.StringVariable("0"), + "statement": pluginconfig.StringVariable("0"), + "stream": pluginconfig.StringVariable("0"), + }))), + ExpectError: regexp.MustCompile("Error: Invalid combination of arguments"), + }, + // multiple excluding options - at + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "offset": pluginconfig.StringVariable("0"), + "timestamp": pluginconfig.StringVariable("0"), + "statement": pluginconfig.StringVariable("0"), + "stream": pluginconfig.StringVariable("0"), + }))), + ExpectError: regexp.MustCompile("Error: Invalid combination of arguments"), + }, + // invalid table id + { + Config: config.FromModel(t, model.WithBeforeValue(nil).WithAtValue(nil).WithTable("invalid")), + ExpectError: regexp.MustCompile("Error: Invalid identifier type"), + }, + }, + }) +} diff --git a/pkg/resources/testdata/TestAcc_StreamOnTable/basic/test.tf b/pkg/resources/testdata/TestAcc_StreamOnTable/basic/test.tf deleted file mode 100644 index e61457cfac..0000000000 --- a/pkg/resources/testdata/TestAcc_StreamOnTable/basic/test.tf +++ /dev/null @@ -1,13 +0,0 @@ -resource "snowflake_view" "test" { - name = var.name - database = var.database - schema = var.schema - statement = var.statement - - dynamic "column" { - for_each = var.column - content { - column_name = column.value["column_name"] - } - } -} diff --git a/pkg/resources/testdata/TestAcc_StreamOnTable/basic/variables.tf b/pkg/resources/testdata/TestAcc_StreamOnTable/basic/variables.tf deleted file mode 100644 index dc7d599d87..0000000000 --- a/pkg/resources/testdata/TestAcc_StreamOnTable/basic/variables.tf +++ /dev/null @@ -1,19 +0,0 @@ -variable "name" { - type = string -} - -variable "database" { - type = string -} - -variable "schema" { - type = string -} - -variable "statement" { - type = string -} - -variable "column" { - type = set(map(string)) -} diff --git a/pkg/resources/testdata/TestAcc_StreamOnTable/before/test.tf b/pkg/resources/testdata/TestAcc_StreamOnTable/before/test.tf index 84758aaf72..035a4863af 100644 --- a/pkg/resources/testdata/TestAcc_StreamOnTable/before/test.tf +++ b/pkg/resources/testdata/TestAcc_StreamOnTable/before/test.tf @@ -8,7 +8,7 @@ resource "snowflake_stream_on_table" "test" { append_only = "true" show_initial_rows = "true" - at { + before { timestamp = try(var.before["timestamp"], null) offset = try(var.before["offset"], null) stream = try(var.before["stream"], null) diff --git a/pkg/sdk/parsers.go b/pkg/sdk/parsers.go index e980e75d01..09952d9381 100644 --- a/pkg/sdk/parsers.go +++ b/pkg/sdk/parsers.go @@ -38,7 +38,8 @@ func ParseCommaSeparatedStringArray(value string, trimQuotes bool) []string { return trimmedListItems } -// TODO: add description and unit tests +// ParseCommaSeparatedSchemaObjectIdentifierArray can be used to parse Snowflake output containing a list of schema-level object identifiers +// in the format of ["db".SCHEMA."name", "db"."schema2"."name2", ...], func ParseCommaSeparatedSchemaObjectIdentifierArray(value string) ([]SchemaObjectIdentifier, error) { idsRaw := ParseCommaSeparatedStringArray(value, false) ids := make([]SchemaObjectIdentifier, len(idsRaw)) diff --git a/pkg/sdk/parsers_test.go b/pkg/sdk/parsers_test.go index d31fa0063d..0865f7e5bc 100644 --- a/pkg/sdk/parsers_test.go +++ b/pkg/sdk/parsers_test.go @@ -4,6 +4,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestParseCommaSeparatedStringArray(t *testing.T) { @@ -104,3 +105,96 @@ func TestParseCommaSeparatedStringArray(t *testing.T) { }) } } + +func TestParseCommaSeparatedSchemaObjectIdentifierArray(t *testing.T) { + testCases := []struct { + Name string + Value string + Result []SchemaObjectIdentifier + }{ + { + Name: "empty list", + Value: "[]", + Result: []SchemaObjectIdentifier{}, + }, + { + Name: "empty string", + Value: "", + Result: []SchemaObjectIdentifier{}, + }, + { + Name: "one element in list", + Value: "[A.B.C]", + Result: []SchemaObjectIdentifier{NewSchemaObjectIdentifier("A", "B", "C")}, + }, + { + Name: "one element in list - with mixed cases", + Value: `[A."b".C]`, + Result: []SchemaObjectIdentifier{NewSchemaObjectIdentifier("A", "b", "C")}, + }, + { + Name: "multiple elements in list", + Value: "[A.B.C, D.E.F]", + Result: []SchemaObjectIdentifier{NewSchemaObjectIdentifier("A", "B", "C"), NewSchemaObjectIdentifier("D", "E", "F")}, + }, + { + Name: "multiple elements in list - with mixed cases", + Value: `[A."b".C, "d"."e"."f"]`, + Result: []SchemaObjectIdentifier{NewSchemaObjectIdentifier("A", "b", "C"), NewSchemaObjectIdentifier("d", "e", "f")}, + }, + { + Name: "multiple elements in list - packed", + Value: "[A.B.C,D.E.F]", + Result: []SchemaObjectIdentifier{NewSchemaObjectIdentifier("A", "B", "C"), NewSchemaObjectIdentifier("D", "E", "F")}, + }, + { + Name: "multiple elements in list - additional spaces", + Value: "[A.B.C, D.E.F]", + Result: []SchemaObjectIdentifier{NewSchemaObjectIdentifier("A", "B", "C"), NewSchemaObjectIdentifier("D", "E", "F")}, + }, + { + Name: "list without brackets", + Value: "A.B.C, D.E.F", + Result: []SchemaObjectIdentifier{NewSchemaObjectIdentifier("A", "B", "C"), NewSchemaObjectIdentifier("D", "E", "F")}, + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(t *testing.T) { + ids, err := ParseCommaSeparatedSchemaObjectIdentifierArray(tc.Value) + require.NoError(t, err) + require.Equal(t, tc.Result, ids) + }) + } +} + +func TestParseCommaSeparatedSchemaObjectIdentifierArray_Invalid(t *testing.T) { + testCases := []struct { + Name string + Value string + Error string + }{ + { + Name: "bad quotes", + Value: `["a]`, + Error: "unable to read identifier: \"a, err = parse error on line 1, column 3: extraneous or missing \" in quoted-field", + }, + { + Name: "missing parts", + Value: "[a.b.c, a.b]", + Error: "unexpected number of parts 2 in identifier a.b, expected 3 in a form of \"..\"", + }, + { + Name: "missing parts - empty id", + Value: "[a.b.c, ]", + Error: "incompatible identifier", + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(t *testing.T) { + _, err := ParseCommaSeparatedSchemaObjectIdentifierArray(tc.Value) + require.ErrorContains(t, err, tc.Error) + }) + } +} diff --git a/pkg/sdk/streams_impl_gen.go b/pkg/sdk/streams_impl_gen.go index 4e6a7c02b2..2299666164 100644 --- a/pkg/sdk/streams_impl_gen.go +++ b/pkg/sdk/streams_impl_gen.go @@ -125,7 +125,6 @@ func (r *CreateOnExternalTableStreamRequest) toOpts() *CreateOnExternalTableStre Comment: r.Comment, } if r.On != nil { - opts.On = &OnStream{ At: r.On.At, Before: r.On.Before, diff --git a/pkg/sdk/streams_validations_gen.go b/pkg/sdk/streams_validations_gen.go index d5e9eda232..14fb378c7a 100644 --- a/pkg/sdk/streams_validations_gen.go +++ b/pkg/sdk/streams_validations_gen.go @@ -32,7 +32,7 @@ func (opts *CreateOnTableStreamOptions) validate() error { } if valueSet(opts.On.Statement) { if !exactlyOneValueSet(opts.On.Statement.Timestamp, opts.On.Statement.Offset, opts.On.Statement.Statement, opts.On.Statement.Stream) { - errs = append(errs, errExactlyOneOf("CreateOnViewStreamOptions.On.Statement", "Timestamp", "Offset", "Statement", "Stream")) + errs = append(errs, errExactlyOneOf("CreateOnTableStreamOptions.On.Statement", "Timestamp", "Offset", "Statement", "Stream")) } } } @@ -59,7 +59,7 @@ func (opts *CreateOnExternalTableStreamOptions) validate() error { } if valueSet(opts.On.Statement) { if !exactlyOneValueSet(opts.On.Statement.Timestamp, opts.On.Statement.Offset, opts.On.Statement.Statement, opts.On.Statement.Stream) { - errs = append(errs, errExactlyOneOf("CreateOnViewStreamOptions.On.Statement", "Timestamp", "Offset", "Statement", "Stream")) + errs = append(errs, errExactlyOneOf("CreateOnExternalTableStreamOptions.On.Statement", "Timestamp", "Offset", "Statement", "Stream")) } } } diff --git a/templates/resources/stream.md.tmpl b/templates/resources/stream_on_table.md.tmpl similarity index 97% rename from templates/resources/stream.md.tmpl rename to templates/resources/stream_on_table.md.tmpl index d1045d341c..20eda79f70 100644 --- a/templates/resources/stream.md.tmpl +++ b/templates/resources/stream_on_table.md.tmpl @@ -9,7 +9,7 @@ description: |- {{- end }} --- -!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0930--v0940) to use it. +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0960--v0970) to use it. # {{.Name}} ({{.Type}}) From d61356336d2253d924befc0e47db4006421c81b4 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 2 Oct 2024 11:52:33 +0200 Subject: [PATCH 03/11] Review --- .../objectassert/stream_snowflake_ext.go | 22 +-- pkg/resources/stream_common.go | 76 +++++++++ pkg/resources/stream_on_table.go | 8 +- .../stream_on_table_acceptance_test.go | 155 ++++++++++++------ pkg/schemas/common_types.go | 72 -------- pkg/schemas/stream.go | 9 +- pkg/schemas/stream_gen.go | 1 - pkg/sdk/parsers_test.go | 5 + pkg/sdk/streams_def.go | 10 +- pkg/sdk/streams_gen.go | 4 + .../testint/streams_gen_integration_test.go | 12 +- 11 files changed, 220 insertions(+), 154 deletions(-) diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_ext.go b/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_ext.go index 560dfc889f..400b8497c6 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_ext.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/stream_snowflake_ext.go @@ -1,13 +1,15 @@ package objectassert import ( + "errors" "fmt" + "slices" "testing" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" ) -func (s *StreamAssert) HasTableId(expected string) *StreamAssert { +func (s *StreamAssert) HasTableId(expected sdk.SchemaObjectIdentifier) *StreamAssert { s.AddAssertion(func(t *testing.T, o *sdk.Stream) error { t.Helper() if o.TableName == nil { @@ -17,7 +19,7 @@ func (s *StreamAssert) HasTableId(expected string) *StreamAssert { if err != nil { return err } - if gotTableId.FullyQualifiedName() != expected { + if gotTableId.FullyQualifiedName() != expected.FullyQualifiedName() { return fmt.Errorf("expected table name: %v; got: %v", expected, *o.TableName) } return nil @@ -53,21 +55,21 @@ func (s *StreamAssert) HasSourceType(expected sdk.StreamSourceType) *StreamAsser return s } -func (s *StreamAssert) HasBaseTables(expected []sdk.SchemaObjectIdentifier) *StreamAssert { +func (s *StreamAssert) HasBaseTables(expected ...sdk.SchemaObjectIdentifier) *StreamAssert { s.AddAssertion(func(t *testing.T, o *sdk.Stream) error { t.Helper() - if o.BaseTables == nil { - return fmt.Errorf("expected base tables to have value; got: nil") - } if len(o.BaseTables) != len(expected) { return fmt.Errorf("expected base tables length: %v; got: %v", len(expected), len(o.BaseTables)) } - for i := range o.BaseTables { - if o.BaseTables[i].FullyQualifiedName() != expected[i].FullyQualifiedName() { - return fmt.Errorf("expected base table id: %v; got: %v", expected[i], o.BaseTables[i]) + var errs []error + for _, wantId := range expected { + if !slices.ContainsFunc(o.BaseTables, func(gotId sdk.SchemaObjectIdentifier) bool { + return wantId.FullyQualifiedName() == gotId.FullyQualifiedName() + }) { + errs = append(errs, fmt.Errorf("expected id: %s, to be in the list ids: %v", wantId.FullyQualifiedName(), o.BaseTables)) } } - return nil + return errors.Join(errs...) }) return s } diff --git a/pkg/resources/stream_common.go b/pkg/resources/stream_common.go index 843e2879e5..1d1fd3b085 100644 --- a/pkg/resources/stream_common.go +++ b/pkg/resources/stream_common.go @@ -58,3 +58,79 @@ func DeleteStreamContext(ctx context.Context, d *schema.ResourceData, meta any) d.SetId("") return nil } + +var atSchema = &schema.Schema{ + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "This field specifies that the request is inclusive of any changes made by a statement or transaction with a timestamp equal to the specified parameter. Due to Snowflake limitations, the provider does not detect external changes on this field.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "timestamp": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies an exact date and time to use for Time Travel. The value must be explicitly cast to a TIMESTAMP, TIMESTAMP_LTZ, TIMESTAMP_NTZ, or TIMESTAMP_TZ data type.", + ExactlyOneOf: []string{"at.0.timestamp", "at.0.offset", "at.0.statement", "at.0.stream"}, + }, + "offset": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the difference in seconds from the current time to use for Time Travel, in the form -N where N can be an integer or arithmetic expression (e.g. -120 is 120 seconds, -30*60 is 1800 seconds or 30 minutes).", + ExactlyOneOf: []string{"at.0.timestamp", "at.0.offset", "at.0.statement", "at.0.stream"}, + }, + "statement": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the query ID of a statement to use as the reference point for Time Travel. This parameter supports any statement of one of the following types: DML (e.g. INSERT, UPDATE, DELETE), TCL (BEGIN, COMMIT transaction), SELECT.", + ExactlyOneOf: []string{"at.0.timestamp", "at.0.offset", "at.0.statement", "at.0.stream"}, + }, + "stream": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the identifier (i.e. name) for an existing stream on the queried table or view. The current offset in the stream is used as the AT point in time for returning change data for the source object.", + ExactlyOneOf: []string{"at.0.timestamp", "at.0.offset", "at.0.statement", "at.0.stream"}, + DiffSuppressFunc: suppressIdentifierQuoting, + ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), + }, + }, + }, + ConflictsWith: []string{"before"}, +} + +var beforeSchema = &schema.Schema{ + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: "This field specifies that the request refers to a point immediately preceding the specified parameter. This point in time is just before the statement, identified by its query ID, is completed. Due to Snowflake limitations, the provider does not detect external changes on this field.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "timestamp": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies an exact date and time to use for Time Travel. The value must be explicitly cast to a TIMESTAMP, TIMESTAMP_LTZ, TIMESTAMP_NTZ, or TIMESTAMP_TZ data type.", + ExactlyOneOf: []string{"before.0.timestamp", "before.0.offset", "before.0.statement", "before.0.stream"}, + }, + "offset": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the difference in seconds from the current time to use for Time Travel, in the form -N where N can be an integer or arithmetic expression (e.g. -120 is 120 seconds, -30*60 is 1800 seconds or 30 minutes).", + ExactlyOneOf: []string{"before.0.timestamp", "before.0.offset", "before.0.statement", "before.0.stream"}, + }, + "statement": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the query ID of a statement to use as the reference point for Time Travel. This parameter supports any statement of one of the following types: DML (e.g. INSERT, UPDATE, DELETE), TCL (BEGIN, COMMIT transaction), SELECT.", + ExactlyOneOf: []string{"before.0.timestamp", "before.0.offset", "before.0.statement", "before.0.stream"}, + }, + "stream": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the identifier (i.e. name) for an existing stream on the queried table or view. The current offset in the stream is used as the AT point in time for returning change data for the source object.", + ExactlyOneOf: []string{"before.0.timestamp", "before.0.offset", "before.0.statement", "before.0.stream"}, + DiffSuppressFunc: suppressIdentifierQuoting, + ValidateDiagFunc: IsValidIdentifier[sdk.SchemaObjectIdentifier](), + }, + }, + }, + ConflictsWith: []string{"at"}, +} diff --git a/pkg/resources/stream_on_table.go b/pkg/resources/stream_on_table.go index 203823c0e9..494b09b0b9 100644 --- a/pkg/resources/stream_on_table.go +++ b/pkg/resources/stream_on_table.go @@ -76,8 +76,8 @@ var streamOnTableSchema = map[string]*schema.Schema{ Optional: true, Description: "Specifies a comment for the stream.", }, - AtAttributeName: schemas.AtSchema, - BeforeAttributeName: schemas.BeforeSchema, + AtAttributeName: atSchema, + BeforeAttributeName: beforeSchema, ShowOutputAttributeName: { Type: schema.TypeList, Computed: true, @@ -139,7 +139,7 @@ func ImportStreamOnTable(ctx context.Context, d *schema.ResourceData, meta any) if err := d.Set("schema", id.SchemaName()); err != nil { return nil, err } - if err := d.Set("append_only", booleanStringFromBool(*v.Mode == sdk.StreamModeAppendOnly)); err != nil { + if err := d.Set("append_only", booleanStringFromBool(v.IsAppendOnly())); err != nil { return nil, err } return []*schema.ResourceData{d}, nil @@ -253,7 +253,7 @@ func ReadStreamOnTable(withExternalChangesMarking bool) schema.ReadContextFunc { mode = *stream.Mode } if err = handleExternalChangesToObjectInShow(d, - showMapping{"mode", "append_only", string(mode), booleanStringFromBool(mode == sdk.StreamModeAppendOnly), nil}, + showMapping{"mode", "append_only", string(mode), booleanStringFromBool(stream.IsAppendOnly()), nil}, ); err != nil { return diag.FromErr(err) } diff --git a/pkg/resources/stream_on_table_acceptance_test.go b/pkg/resources/stream_on_table_acceptance_test.go index 8e4d054fef..51611b7b79 100644 --- a/pkg/resources/stream_on_table_acceptance_test.go +++ b/pkg/resources/stream_on_table_acceptance_test.go @@ -12,6 +12,7 @@ import ( "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" tfconfig "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeroles" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" @@ -24,6 +25,7 @@ import ( ) func TestAcc_StreamOnTable_Basic(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) acc.TestAccPreCheck(t) id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() resourceId := helpers.EncodeResourceIdentifier(id) @@ -31,7 +33,28 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { table, cleanupTable := acc.TestClient().Table.CreateWithChangeTracking(t) t.Cleanup(cleanupTable) - model := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), table.ID().FullyQualifiedName()) + + baseModel := func() *model.StreamOnTableModel { + return model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), table.ID().FullyQualifiedName()) + } + + modelWithExtraFields := baseModel(). + WithCopyGrants(true). + WithComment("foo"). + WithAppendOnly(r.BooleanTrue). + WithShowInitialRows(r.BooleanTrue). + WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "offset": pluginconfig.StringVariable("0"), + })) + + modelWithExtraFieldsDefaultMode := baseModel(). + WithCopyGrants(true). + WithComment("foo"). + WithAppendOnly(r.BooleanFalse). + WithShowInitialRows(r.BooleanTrue). + WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "offset": pluginconfig.StringVariable("0"), + })) resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -42,7 +65,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { Steps: []resource.TestStep{ // without optionals { - Config: config.FromModel(t, model), + Config: config.FromModel(t, baseModel()), Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). HasNameString(id.Name()). HasDatabaseString(id.DatabaseName()). @@ -84,7 +107,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { }, // import without optionals { - Config: config.FromModel(t, model), + Config: config.FromModel(t, baseModel()), ResourceName: resourceName, ImportState: true, ImportStateCheck: assert.AssertThatImport(t, @@ -100,9 +123,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { // set all fields { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), - ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithCopyGrants(true).WithComment("foo").WithAppendOnly(r.BooleanTrue).WithShowInitialRows(r.BooleanTrue).WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ - "offset": pluginconfig.StringVariable("0"), - }))), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, modelWithExtraFields), Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). HasNameString(id.Name()). HasDatabaseString(id.DatabaseName()). @@ -149,7 +170,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { acc.TestClient().Stream.Alter(t, sdk.NewAlterStreamRequest(id).WithSetComment("bar")) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), - ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, modelWithExtraFields), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionUpdate), @@ -198,7 +219,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { // update fields that recreate the object { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), - ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithAppendOnly(r.BooleanFalse)), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, modelWithExtraFieldsDefaultMode), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionUpdate), @@ -246,7 +267,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { }, // import { - Config: config.FromModel(t, model), + Config: config.FromModel(t, modelWithExtraFieldsDefaultMode), ResourceName: resourceName, ImportState: true, ImportStateCheck: assert.AssertThatImport(t, @@ -264,6 +285,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { } func TestAcc_StreamOnTable_CopyGrants(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) acc.TestAccPreCheck(t) id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() resourceName := "snowflake_stream_on_table.test" @@ -318,8 +340,9 @@ func TestAcc_StreamOnTable_CopyGrants(t *testing.T) { }) } -// There is no way to check at/before fields in show and describe. That's why in we try creating with these values, but do not assert them. +// There is no way to check at/before fields in show and describe. That's why we try creating with these values, but do not assert them. func TestAcc_StreamOnTable_At(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) acc.TestAccPreCheck(t) id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() resourceId := helpers.EncodeResourceIdentifier(id) @@ -331,8 +354,23 @@ func TestAcc_StreamOnTable_At(t *testing.T) { lastQueryId := acc.TestClient().Context.LastQueryId(t) - model := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), table.ID().FullyQualifiedName()). - WithComment("foo").WithAppendOnly(r.BooleanTrue).WithShowInitialRows(r.BooleanTrue).WithCopyGrants(true) + baseModel := func() *model.StreamOnTableModel { + return model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), table.ID().FullyQualifiedName()). + WithComment("foo"). + WithAppendOnly(r.BooleanTrue). + WithShowInitialRows(r.BooleanTrue). + WithCopyGrants(true) + } + + modelWithOffset := baseModel().WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "offset": pluginconfig.StringVariable("0"), + })) + modelWithStream := baseModel().WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "stream": pluginconfig.StringVariable(id.FullyQualifiedName()), + })) + modelWithStatement := baseModel().WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "statement": pluginconfig.StringVariable(lastQueryId), + })) resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -343,9 +381,7 @@ func TestAcc_StreamOnTable_At(t *testing.T) { Steps: []resource.TestStep{ { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), - ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ - "offset": pluginconfig.StringVariable("0"), - }))), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, modelWithOffset), Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). HasNameString(id.Name()). HasDatabaseString(id.DatabaseName()). @@ -390,18 +426,14 @@ func TestAcc_StreamOnTable_At(t *testing.T) { }, { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), - ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ - "stream": pluginconfig.StringVariable(id.FullyQualifiedName()), - }))), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, modelWithStream), Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). HasNameString(id.Name()), ), }, { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), - ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ - "statement": pluginconfig.StringVariable(lastQueryId), - }))), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, modelWithStatement), Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). HasNameString(id.Name()), ), @@ -409,7 +441,7 @@ func TestAcc_StreamOnTable_At(t *testing.T) { // TODO(SNOW-1689111): test timestamps { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), - ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, modelWithOffset), ResourceName: resourceName, ImportState: true, ImportStateCheck: assert.AssertThatImport(t, @@ -426,8 +458,9 @@ func TestAcc_StreamOnTable_At(t *testing.T) { }) } -// There is no way to check at/before fields in show and describe. That's why in we try creating with these values, but do not assert them. +// There is no way to check at/before fields in show and describe. That's why we try creating with these values, but do not assert them. func TestAcc_StreamOnTable_Before(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) acc.TestAccPreCheck(t) id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() resourceName := "snowflake_stream_on_table.test" @@ -438,8 +471,23 @@ func TestAcc_StreamOnTable_Before(t *testing.T) { lastQueryId := acc.TestClient().Context.LastQueryId(t) - model := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), table.ID().FullyQualifiedName()). - WithComment("foo").WithAppendOnly(r.BooleanTrue).WithShowInitialRows(r.BooleanTrue).WithCopyGrants(true) + baseModel := func() *model.StreamOnTableModel { + return model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), table.ID().FullyQualifiedName()). + WithComment("foo"). + WithAppendOnly(r.BooleanTrue). + WithShowInitialRows(r.BooleanTrue). + WithCopyGrants(true) + } + + modelWithOffset := baseModel().WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "offset": pluginconfig.StringVariable("0"), + })) + modelWithStream := baseModel().WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "stream": pluginconfig.StringVariable(id.FullyQualifiedName()), + })) + modelWithStatement := baseModel().WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "statement": pluginconfig.StringVariable(lastQueryId), + })) resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -450,9 +498,7 @@ func TestAcc_StreamOnTable_Before(t *testing.T) { Steps: []resource.TestStep{ { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/before"), - ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ - "offset": pluginconfig.StringVariable("0"), - }))), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, modelWithOffset), Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). HasNameString(id.Name()). HasDatabaseString(id.DatabaseName()). @@ -497,18 +543,14 @@ func TestAcc_StreamOnTable_Before(t *testing.T) { }, { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/before"), - ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ - "stream": pluginconfig.StringVariable(id.FullyQualifiedName()), - }))), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, modelWithStream), Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). HasNameString(id.Name()), ), }, { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/before"), - ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ - "statement": pluginconfig.StringVariable(lastQueryId), - }))), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, modelWithStatement), Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). HasNameString(id.Name()), ), @@ -521,8 +563,29 @@ func TestAcc_StreamOnTable_Before(t *testing.T) { func TestAcc_StreamOnTable_InvalidConfiguration(t *testing.T) { id := acc.TestClient().Ids.RandomSchemaObjectIdentifier() - model := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), "foo.bar.hoge"). - WithComment("foo").WithAppendOnly(r.BooleanTrue).WithShowInitialRows(r.BooleanTrue).WithCopyGrants(true) + modelWithInvalidTableId := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), "invalid") + + modelWithBefore := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), "foo.bar.hoge"). + WithComment("foo"). + WithCopyGrants(false). + WithAppendOnly(r.BooleanFalse). + WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "offset": pluginconfig.StringVariable("0"), + "timestamp": pluginconfig.StringVariable("0"), + "statement": pluginconfig.StringVariable("0"), + "stream": pluginconfig.StringVariable("0"), + })) + + modelWithAt := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), "foo.bar.hoge"). + WithComment("foo"). + WithCopyGrants(false). + WithAppendOnly(r.BooleanFalse). + WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ + "offset": pluginconfig.StringVariable("0"), + "timestamp": pluginconfig.StringVariable("0"), + "statement": pluginconfig.StringVariable("0"), + "stream": pluginconfig.StringVariable("0"), + })) resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -535,28 +598,18 @@ func TestAcc_StreamOnTable_InvalidConfiguration(t *testing.T) { // multiple excluding options - before { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/before"), - ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ - "offset": pluginconfig.StringVariable("0"), - "timestamp": pluginconfig.StringVariable("0"), - "statement": pluginconfig.StringVariable("0"), - "stream": pluginconfig.StringVariable("0"), - }))), - ExpectError: regexp.MustCompile("Error: Invalid combination of arguments"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, modelWithBefore), + ExpectError: regexp.MustCompile("Error: Invalid combination of arguments"), }, // multiple excluding options - at { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StreamOnTable/at"), - ConfigVariables: tfconfig.ConfigVariablesFromModel(t, model.WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ - "offset": pluginconfig.StringVariable("0"), - "timestamp": pluginconfig.StringVariable("0"), - "statement": pluginconfig.StringVariable("0"), - "stream": pluginconfig.StringVariable("0"), - }))), - ExpectError: regexp.MustCompile("Error: Invalid combination of arguments"), + ConfigVariables: tfconfig.ConfigVariablesFromModel(t, modelWithAt), + ExpectError: regexp.MustCompile("Error: Invalid combination of arguments"), }, // invalid table id { - Config: config.FromModel(t, model.WithBeforeValue(nil).WithAtValue(nil).WithTable("invalid")), + Config: config.FromModel(t, modelWithInvalidTableId), ExpectError: regexp.MustCompile("Error: Invalid identifier type"), }, }, diff --git a/pkg/schemas/common_types.go b/pkg/schemas/common_types.go index 841609c516..ac4f7f9e0f 100644 --- a/pkg/schemas/common_types.go +++ b/pkg/schemas/common_types.go @@ -27,75 +27,3 @@ var FullyQualifiedNameSchema = &schema.Schema{ Computed: true, Description: "Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution).", } - -var AtSchema = &schema.Schema{ - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Description: "This field specifies that the request is inclusive of any changes made by a statement or transaction with a timestamp equal to the specified parameter. Due to Snowflake limitations, the provider does not detect external changes on this field.", - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "timestamp": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies an exact date and time to use for Time Travel. The value must be explicitly cast to a TIMESTAMP, TIMESTAMP_LTZ, TIMESTAMP_NTZ, or TIMESTAMP_TZ data type.", - ExactlyOneOf: []string{"at.0.timestamp", "at.0.offset", "at.0.statement", "at.0.stream"}, - }, - "offset": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies the difference in seconds from the current time to use for Time Travel, in the form -N where N can be an integer or arithmetic expression (e.g. -120 is 120 seconds, -30*60 is 1800 seconds or 30 minutes).", - ExactlyOneOf: []string{"at.0.timestamp", "at.0.offset", "at.0.statement", "at.0.stream"}, - }, - "statement": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies the query ID of a statement to use as the reference point for Time Travel. This parameter supports any statement of one of the following types: DML (e.g. INSERT, UPDATE, DELETE), TCL (BEGIN, COMMIT transaction), SELECT.", - ExactlyOneOf: []string{"at.0.timestamp", "at.0.offset", "at.0.statement", "at.0.stream"}, - }, - "stream": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies the identifier (i.e. name) for an existing stream on the queried table or view. The current offset in the stream is used as the AT point in time for returning change data for the source object.", - ExactlyOneOf: []string{"at.0.timestamp", "at.0.offset", "at.0.statement", "at.0.stream"}, - }, - }, - }, - ConflictsWith: []string{"before"}, -} - -var BeforeSchema = &schema.Schema{ - Type: schema.TypeList, - Optional: true, - MaxItems: 1, - Description: "This field specifies that the request refers to a point immediately preceding the specified parameter. This point in time is just before the statement, identified by its query ID, is completed. Due to Snowflake limitations, the provider does not detect external changes on this field.", - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "timestamp": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies an exact date and time to use for Time Travel. The value must be explicitly cast to a TIMESTAMP, TIMESTAMP_LTZ, TIMESTAMP_NTZ, or TIMESTAMP_TZ data type.", - ExactlyOneOf: []string{"before.0.timestamp", "before.0.offset", "before.0.statement", "before.0.stream"}, - }, - "offset": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies the difference in seconds from the current time to use for Time Travel, in the form -N where N can be an integer or arithmetic expression (e.g. -120 is 120 seconds, -30*60 is 1800 seconds or 30 minutes).", - ExactlyOneOf: []string{"before.0.timestamp", "before.0.offset", "before.0.statement", "before.0.stream"}, - }, - "statement": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies the query ID of a statement to use as the reference point for Time Travel. This parameter supports any statement of one of the following types: DML (e.g. INSERT, UPDATE, DELETE), TCL (BEGIN, COMMIT transaction), SELECT.", - ExactlyOneOf: []string{"before.0.timestamp", "before.0.offset", "before.0.statement", "before.0.stream"}, - }, - "stream": { - Type: schema.TypeString, - Optional: true, - Description: "Specifies the identifier (i.e. name) for an existing stream on the queried table or view. The current offset in the stream is used as the AT point in time for returning change data for the source object.", - ExactlyOneOf: []string{"before.0.timestamp", "before.0.offset", "before.0.statement", "before.0.stream"}, - }, - }, - }, - ConflictsWith: []string{"at"}, -} diff --git a/pkg/schemas/stream.go b/pkg/schemas/stream.go index c4f9f22c80..bc64f5e05e 100644 --- a/pkg/schemas/stream.go +++ b/pkg/schemas/stream.go @@ -5,6 +5,7 @@ package schemas import ( "log" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -102,11 +103,9 @@ func StreamDescriptionToSchema(stream sdk.Stream) map[string]any { streamSchema["source_type"] = stream.SourceType } if stream.BaseTables != nil { - idsFQN := make([]string, len(stream.BaseTables)) - for i := range idsFQN { - idsFQN[i] = stream.BaseTables[i].FullyQualifiedName() - } - streamSchema["base_tables"] = idsFQN + streamSchema["base_tables"] = collections.Map(stream.BaseTables, func(id sdk.SchemaObjectIdentifier) string { + return id.FullyQualifiedName() + }) } if stream.Type != nil { streamSchema["type"] = stream.Type diff --git a/pkg/schemas/stream_gen.go b/pkg/schemas/stream_gen.go index 7a116777ab..2a35b1ec79 100644 --- a/pkg/schemas/stream_gen.go +++ b/pkg/schemas/stream_gen.go @@ -10,7 +10,6 @@ import ( ) // ShowStreamSchema represents output of SHOW query for the single Stream. -// TODO: merge with desc schema var ShowStreamSchema = map[string]*schema.Schema{ "created_on": { Type: schema.TypeString, diff --git a/pkg/sdk/parsers_test.go b/pkg/sdk/parsers_test.go index 0865f7e5bc..314ef9892b 100644 --- a/pkg/sdk/parsers_test.go +++ b/pkg/sdk/parsers_test.go @@ -184,6 +184,11 @@ func TestParseCommaSeparatedSchemaObjectIdentifierArray_Invalid(t *testing.T) { Value: "[a.b.c, a.b]", Error: "unexpected number of parts 2 in identifier a.b, expected 3 in a form of \"..\"", }, + { + Name: "too many parts", + Value: "[a.b.c, a.b.c.d]", + Error: "unexpected number of parts 4 in identifier a.b.c.d, expected 3 in a form of \"..\"", + }, { Name: "missing parts - empty id", Value: "[a.b.c, ]", diff --git a/pkg/sdk/streams_def.go b/pkg/sdk/streams_def.go index fae2264c89..53a612d624 100644 --- a/pkg/sdk/streams_def.go +++ b/pkg/sdk/streams_def.go @@ -12,14 +12,14 @@ import ( type StreamSourceType string const ( - StreamSourceTypeTable StreamSourceType = "Table" - StreamSourceTypeExternalTable StreamSourceType = "External Table" - StreamSourceTypeView StreamSourceType = "View" - StreamSourceTypeStage StreamSourceType = "Stage" + StreamSourceTypeTable StreamSourceType = "TABLE" + StreamSourceTypeExternalTable StreamSourceType = "EXTERNAL TABLE" + StreamSourceTypeView StreamSourceType = "VIEW" + StreamSourceTypeStage StreamSourceType = "STAGE" ) func ToStreamSourceType(s string) (StreamSourceType, error) { - switch streamSourceType := StreamSourceType(s); streamSourceType { + switch streamSourceType := StreamSourceType(strings.ToUpper(s)); streamSourceType { case StreamSourceTypeTable, StreamSourceTypeExternalTable, StreamSourceTypeView, diff --git a/pkg/sdk/streams_gen.go b/pkg/sdk/streams_gen.go index da44c90a07..7ca207b1e9 100644 --- a/pkg/sdk/streams_gen.go +++ b/pkg/sdk/streams_gen.go @@ -177,6 +177,10 @@ func (v *Stream) ID() SchemaObjectIdentifier { return NewSchemaObjectIdentifier(v.DatabaseName, v.SchemaName, v.Name) } +func (v *Stream) IsAppendOnly() bool { + return v != nil && v.Mode != nil && *v.Mode == StreamModeAppendOnly +} + // DescribeStreamOptions is based on https://docs.snowflake.com/en/sql-reference/sql/desc-stream. type DescribeStreamOptions struct { describe bool `ddl:"static" sql:"DESCRIBE"` diff --git a/pkg/sdk/testint/streams_gen_integration_test.go b/pkg/sdk/testint/streams_gen_integration_test.go index e79508ab8f..f1900073c7 100644 --- a/pkg/sdk/testint/streams_gen_integration_test.go +++ b/pkg/sdk/testint/streams_gen_integration_test.go @@ -55,7 +55,7 @@ func TestInt_Streams(t *testing.T) { HasComment("some comment"). HasSourceType(sdk.StreamSourceTypeTable). HasMode(sdk.StreamModeAppendOnly). - HasTableId(tableId.FullyQualifiedName()), + HasTableId(tableId), ) // at stream @@ -132,7 +132,7 @@ func TestInt_Streams(t *testing.T) { HasComment("some comment"). HasSourceType(sdk.StreamSourceTypeExternalTable). HasMode(sdk.StreamModeInsertOnly). - HasTableId(externalTableId.FullyQualifiedName()), + HasTableId(externalTableId), ) }) @@ -180,7 +180,7 @@ func TestInt_Streams(t *testing.T) { HasComment("some comment"). HasSourceType(sdk.StreamSourceTypeView). HasMode(sdk.StreamModeAppendOnly). - HasTableId(view.ID().FullyQualifiedName()), + HasTableId(view.ID()), ) }) @@ -206,7 +206,7 @@ func TestInt_Streams(t *testing.T) { HasComment("some comment"). HasSourceType(sdk.StreamSourceTypeTable). HasMode(sdk.StreamModeDefault). - HasTableId(table.ID().FullyQualifiedName()), + HasTableId(table.ID()), ) }) @@ -361,7 +361,7 @@ func TestInt_Streams(t *testing.T) { HasComment("some comment"). HasSourceType(sdk.StreamSourceTypeTable). HasMode(sdk.StreamModeDefault). - HasTableId(table.ID().FullyQualifiedName()), + HasTableId(table.ID()), ) }) @@ -452,7 +452,7 @@ func TestInt_Streams(t *testing.T) { HasComment("some comment"). HasSourceType(sdk.StreamSourceTypeTable). HasMode(sdk.StreamModeDefault). - HasTableId(table.ID().FullyQualifiedName()), + HasTableId(table.ID()), ) }) From c1f7c234fcd8962531b2672c8f10191f471b0ae3 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Wed, 2 Oct 2024 11:56:16 +0200 Subject: [PATCH 04/11] Fix tests --- pkg/resources/stream_on_table_acceptance_test.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkg/resources/stream_on_table_acceptance_test.go b/pkg/resources/stream_on_table_acceptance_test.go index 51611b7b79..e6ed79bd6c 100644 --- a/pkg/resources/stream_on_table_acceptance_test.go +++ b/pkg/resources/stream_on_table_acceptance_test.go @@ -95,7 +95,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner", snowflakeroles.Accountadmin.Name())), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.comment", "")), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.table_name", table.ID().FullyQualifiedName())), - assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", "Table")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", string(sdk.StreamSourceTypeTable))), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.#", "1")), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.0", table.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.type", "DELTA")), @@ -154,7 +154,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner", snowflakeroles.Accountadmin.Name())), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.comment", "foo")), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.table_name", table.ID().FullyQualifiedName())), - assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", "Table")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", string(sdk.StreamSourceTypeTable))), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.#", "1")), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.0", table.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.type", "DELTA")), @@ -414,7 +414,7 @@ func TestAcc_StreamOnTable_At(t *testing.T) { assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner", snowflakeroles.Accountadmin.Name())), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.comment", "foo")), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.table_name", table.ID().FullyQualifiedName())), - assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", "Table")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", string(sdk.StreamSourceTypeTable))), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.#", "1")), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.0", table.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.type", "DELTA")), @@ -531,7 +531,7 @@ func TestAcc_StreamOnTable_Before(t *testing.T) { assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.owner", snowflakeroles.Accountadmin.Name())), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.comment", "foo")), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.table_name", table.ID().FullyQualifiedName())), - assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", "Table")), + assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.source_type", string(sdk.StreamSourceTypeTable))), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.#", "1")), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.base_tables.0", table.ID().FullyQualifiedName())), assert.Check(resource.TestCheckResourceAttr(resourceName, "describe_output.0.type", "DELTA")), From bb56ac99ba21c82519f5a5a17a498893f2acc4cf Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Thu, 3 Oct 2024 11:58:02 +0200 Subject: [PATCH 05/11] Fixes --- MIGRATION_GUIDE.md | 31 ++++++++ pkg/resources/stream_on_table.go | 18 ++--- pkg/schemas/stream_gen.go | 1 + pkg/sdk/streams_gen_test.go | 118 ++++++++++++++++++++++++++++++- 4 files changed, 155 insertions(+), 13 deletions(-) diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index fe25fb6916..4c461f6fa8 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -16,6 +16,37 @@ The newly introduced resources are aligned with the latest Snowflake documentati This segregation was based on the object on which the stream is created. The mapping between SQL statements and the resources is the following: - `ON TABLE ` -> `snowflake_stream_on_table` +To use the new `stream_on_table`, change the old `stream` from +```terraform +resource "snowflake_stream" "stream" { + name = "stream" + schema = "schema" + database = "database" + + on_table = snowflake_table.table.fully_qualified_name + append_only = true + + comment = "A stream." +} +``` + +to + +``` +resource "snowflake_stream_on_table" "stream" { + name = "stream" + schema = "schema" + database = "database" + + table = snowflake_table.table.fully_qualified_name + append_only = "true" + + comment = "A stream." +} +``` + +Then, follow our [Resource migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/resource_migration.md). + ## v0.95.0 ➞ v0.96.0 ### snowflake_masking_policies data source changes diff --git a/pkg/resources/stream_on_table.go b/pkg/resources/stream_on_table.go index 494b09b0b9..a31b484c43 100644 --- a/pkg/resources/stream_on_table.go +++ b/pkg/resources/stream_on_table.go @@ -168,20 +168,14 @@ func CreateStreamOnTable(orReplace bool) schema.CreateContextFunc { req.WithCopyGrants(true).WithOrReplace(true) } - if v := d.Get("append_only").(string); v != BooleanDefault { - parsed, err := booleanStringToBool(v) - if err != nil { - return diag.FromErr(err) - } - req.WithAppendOnly(parsed) + err = booleanStringAttributeCreate(d, "append_only", &req.AppendOnly) + if err != nil { + return diag.FromErr(err) } - if v := d.Get("show_initial_rows").(string); v != BooleanDefault { - parsed, err := booleanStringToBool(v) - if err != nil { - return diag.FromErr(err) - } - req.WithShowInitialRows(parsed) + err = booleanStringAttributeCreate(d, "show_initial_rows", &req.ShowInitialRows) + if err != nil { + return diag.FromErr(err) } streamTimeTravelReq := handleStreamTimeTravel(d) diff --git a/pkg/schemas/stream_gen.go b/pkg/schemas/stream_gen.go index 2a35b1ec79..de5383a0d6 100644 --- a/pkg/schemas/stream_gen.go +++ b/pkg/schemas/stream_gen.go @@ -78,6 +78,7 @@ var ShowStreamSchema = map[string]*schema.Schema{ var _ = ShowStreamSchema +// Adjusted manually. func StreamToSchema(stream *sdk.Stream) map[string]any { streamSchema := make(map[string]any) streamSchema["created_on"] = stream.CreatedOn.String() diff --git a/pkg/sdk/streams_gen_test.go b/pkg/sdk/streams_gen_test.go index 76963db251..295f3edfb8 100644 --- a/pkg/sdk/streams_gen_test.go +++ b/pkg/sdk/streams_gen_test.go @@ -1,6 +1,10 @@ package sdk -import "testing" +import ( + "testing" + + "github.com/stretchr/testify/require" +) func TestStreams_CreateOnTable(t *testing.T) { id := randomSchemaObjectIdentifier() @@ -540,3 +544,115 @@ func TestStreams_Describe(t *testing.T) { assertOptsValidAndSQLEquals(t, opts, `DESCRIBE STREAM %s`, id.FullyQualifiedName()) }) } + +func TestToStreamSourceType(t *testing.T) { + tests := []struct { + input string + want StreamSourceType + wantErr string + }{ + { + input: "TABLE", + want: StreamSourceTypeTable, + }, + { + input: "EXTERNAL TABLE", + want: StreamSourceTypeExternalTable, + }, + { + input: "VIEW", + want: StreamSourceTypeView, + }, + { + input: "STAGE", + want: StreamSourceTypeStage, + }, + { + input: "table", + want: StreamSourceTypeTable, + }, + { + input: "external table", + want: StreamSourceTypeExternalTable, + }, + { + input: "view", + want: StreamSourceTypeView, + }, + { + input: "stage", + want: StreamSourceTypeStage, + }, + { + input: "", + wantErr: "invalid stream source type", + }, + { + input: "foo", + wantErr: "invalid stream source type", + }, + } + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + got, err := ToStreamSourceType(tt.input) + if tt.wantErr != "" { + require.ErrorContains(t, err, tt.wantErr) + } else { + require.NoError(t, err) + require.Equal(t, tt.want, got) + } + }) + } +} + +func TestToStreamMode(t *testing.T) { + tests := []struct { + input string + want StreamMode + wantErr string + }{ + { + input: "DEFAULT", + want: StreamModeDefault, + }, + { + input: "APPEND_ONLY", + want: StreamModeAppendOnly, + }, + { + input: "INSERT_ONLY", + want: StreamModeInsertOnly, + }, + { + input: "default", + want: StreamModeDefault, + }, + { + input: "append_only", + want: StreamModeAppendOnly, + }, + { + input: "insert_only", + want: StreamModeInsertOnly, + }, + { + input: "", + wantErr: "invalid stream mode", + }, + { + input: "foo", + wantErr: "invalid stream mode", + }, + } + for _, tt := range tests { + t.Run(tt.input, func(t *testing.T) { + got, err := ToStreamMode(tt.input) + if tt.wantErr != "" { + require.ErrorContains(t, err, tt.wantErr) + } else { + require.NoError(t, err) + require.Equal(t, tt.want, got) + } + }) + } +} From 5ba98dd58fa3be86e55c8e4bfcb4061d2eaded4a Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Thu, 3 Oct 2024 15:20:06 +0200 Subject: [PATCH 06/11] Review --- docs/resources/stream_on_table.md | 8 +++---- pkg/resources/stream_common.go | 4 ++-- pkg/resources/stream_on_table.go | 16 ++++++++----- .../stream_on_table_acceptance_test.go | 23 +++++++++++++------ .../testdata/TestAcc_StreamOnTable/at/test.tf | 4 ++-- .../TestAcc_StreamOnTable/at/variables.tf | 6 ++++- .../TestAcc_StreamOnTable/before/test.tf | 6 ++--- .../TestAcc_StreamOnTable/before/variables.tf | 8 +++++++ 8 files changed, 50 insertions(+), 25 deletions(-) diff --git a/docs/resources/stream_on_table.md b/docs/resources/stream_on_table.md index 238fb6a841..37d22ca1ea 100644 --- a/docs/resources/stream_on_table.md +++ b/docs/resources/stream_on_table.md @@ -60,11 +60,11 @@ resource "snowflake_stream_on_table" "stream" { ### Optional - `append_only` (String) Specifies whether this is an append-only stream. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. -- `at` (Block List, Max: 1) This field specifies that the request is inclusive of any changes made by a statement or transaction with a timestamp equal to the specified parameter. Due to Snowflake limitations, the provider does not detect external changes on this field. (see [below for nested schema](#nestedblock--at)) -- `before` (Block List, Max: 1) This field specifies that the request refers to a point immediately preceding the specified parameter. This point in time is just before the statement, identified by its query ID, is completed. Due to Snowflake limitations, the provider does not detect external changes on this field. (see [below for nested schema](#nestedblock--before)) +- `at` (Block List, Max: 1) This field specifies that the request is inclusive of any changes made by a statement or transaction with a timestamp equal to the specified parameter. Due to Snowflake limitations, the provider does not detect external changes on this field. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". (see [below for nested schema](#nestedblock--at)) +- `before` (Block List, Max: 1) This field specifies that the request refers to a point immediately preceding the specified parameter. This point in time is just before the statement, identified by its query ID, is completed. Due to Snowflake limitations, the provider does not detect external changes on this field. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". (see [below for nested schema](#nestedblock--before)) - `comment` (String) Specifies a comment for the stream. -- `copy_grants` (Boolean) Retains the access permissions from the original stream when a new stream is created using the OR REPLACE clause. -- `show_initial_rows` (String) Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `copy_grants` (Boolean) Retains the access permissions from the original stream when a new stream is created using the OR REPLACE clause. Use only if the resource is already managed by Terraform. +- `show_initial_rows` (String) Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". ### Read-Only diff --git a/pkg/resources/stream_common.go b/pkg/resources/stream_common.go index 1d1fd3b085..52dedf2a6b 100644 --- a/pkg/resources/stream_common.go +++ b/pkg/resources/stream_common.go @@ -63,7 +63,7 @@ var atSchema = &schema.Schema{ Type: schema.TypeList, Optional: true, MaxItems: 1, - Description: "This field specifies that the request is inclusive of any changes made by a statement or transaction with a timestamp equal to the specified parameter. Due to Snowflake limitations, the provider does not detect external changes on this field.", + Description: externalChangesNotDetectedFieldDescription("This field specifies that the request is inclusive of any changes made by a statement or transaction with a timestamp equal to the specified parameter. Due to Snowflake limitations, the provider does not detect external changes on this field."), Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "timestamp": { @@ -101,7 +101,7 @@ var beforeSchema = &schema.Schema{ Type: schema.TypeList, Optional: true, MaxItems: 1, - Description: "This field specifies that the request refers to a point immediately preceding the specified parameter. This point in time is just before the statement, identified by its query ID, is completed. Due to Snowflake limitations, the provider does not detect external changes on this field.", + Description: externalChangesNotDetectedFieldDescription("This field specifies that the request refers to a point immediately preceding the specified parameter. This point in time is just before the statement, identified by its query ID, is completed. Due to Snowflake limitations, the provider does not detect external changes on this field."), Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "timestamp": { diff --git a/pkg/resources/stream_on_table.go b/pkg/resources/stream_on_table.go index a31b484c43..31b254e1ad 100644 --- a/pkg/resources/stream_on_table.go +++ b/pkg/resources/stream_on_table.go @@ -42,7 +42,7 @@ var streamOnTableSchema = map[string]*schema.Schema{ Type: schema.TypeBool, Optional: true, Default: false, - Description: "Retains the access permissions from the original stream when a new stream is created using the OR REPLACE clause.", + Description: "Retains the access permissions from the original stream when a new stream is created using the OR REPLACE clause. Use only if the resource is already managed by Terraform.", DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { return oldValue != "" && oldValue != newValue }, @@ -69,7 +69,7 @@ var streamOnTableSchema = map[string]*schema.Schema{ Optional: true, Default: BooleanDefault, ValidateDiagFunc: validateBooleanString, - Description: booleanStringFieldDescription("Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed."), + Description: externalChangesNotDetectedFieldDescription(booleanStringFieldDescription("Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed.")), }, "comment": { Type: schema.TypeString, @@ -162,10 +162,14 @@ func CreateStreamOnTable(orReplace bool) schema.CreateContextFunc { req := sdk.NewCreateOnTableStreamRequest(id, tableId) if orReplace { req.WithOrReplace(true) - } - - if v := d.Get("copy_grants"); v.(bool) { - req.WithCopyGrants(true).WithOrReplace(true) + } else if d.Get("copy_grants").(bool) { + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Error, + Summary: "COPY GRANTS cannot be used without OR REPLACE. If you are creating this object, first import it, so that it's managed by the provider, and try again.", + Detail: fmt.Sprintf("stream name: %s", id.FullyQualifiedName()), + }, + } } err = booleanStringAttributeCreate(d, "append_only", &req.AppendOnly) diff --git a/pkg/resources/stream_on_table_acceptance_test.go b/pkg/resources/stream_on_table_acceptance_test.go index e6ed79bd6c..e3c0d5fabf 100644 --- a/pkg/resources/stream_on_table_acceptance_test.go +++ b/pkg/resources/stream_on_table_acceptance_test.go @@ -39,7 +39,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { } modelWithExtraFields := baseModel(). - WithCopyGrants(true). + WithCopyGrants(false). WithComment("foo"). WithAppendOnly(r.BooleanTrue). WithShowInitialRows(r.BooleanTrue). @@ -48,7 +48,7 @@ func TestAcc_StreamOnTable_Basic(t *testing.T) { })) modelWithExtraFieldsDefaultMode := baseModel(). - WithCopyGrants(true). + WithCopyGrants(false). WithComment("foo"). WithAppendOnly(r.BooleanFalse). WithShowInitialRows(r.BooleanTrue). @@ -303,7 +303,7 @@ func TestAcc_StreamOnTable_CopyGrants(t *testing.T) { CheckDestroy: acc.CheckDestroy(t, resources.StreamOnTable), Steps: []resource.TestStep{ { - Config: config.FromModel(t, model.WithCopyGrants(true)), + Config: config.FromModel(t, model.WithCopyGrants(false)), Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). HasNameString(id.Name()), assert.Check(resource.TestCheckResourceAttrWith(resourceName, "show_output.0.created_on", func(value string) error { @@ -313,7 +313,7 @@ func TestAcc_StreamOnTable_CopyGrants(t *testing.T) { ), }, { - Config: config.FromModel(t, model.WithCopyGrants(false)), + Config: config.FromModel(t, model.WithCopyGrants(true)), Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). HasNameString(id.Name()), assert.Check(resource.TestCheckResourceAttrWith(resourceName, "show_output.0.created_on", func(value string) error { @@ -325,7 +325,7 @@ func TestAcc_StreamOnTable_CopyGrants(t *testing.T) { ), }, { - Config: config.FromModel(t, model.WithCopyGrants(true)), + Config: config.FromModel(t, model.WithCopyGrants(false)), Check: assert.AssertThat(t, resourceassert.StreamOnTableResource(t, resourceName). HasNameString(id.Name()), assert.Check(resource.TestCheckResourceAttrWith(resourceName, "show_output.0.created_on", func(value string) error { @@ -359,7 +359,7 @@ func TestAcc_StreamOnTable_At(t *testing.T) { WithComment("foo"). WithAppendOnly(r.BooleanTrue). WithShowInitialRows(r.BooleanTrue). - WithCopyGrants(true) + WithCopyGrants(false) } modelWithOffset := baseModel().WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ @@ -476,7 +476,7 @@ func TestAcc_StreamOnTable_Before(t *testing.T) { WithComment("foo"). WithAppendOnly(r.BooleanTrue). WithShowInitialRows(r.BooleanTrue). - WithCopyGrants(true) + WithCopyGrants(false) } modelWithOffset := baseModel().WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ @@ -569,6 +569,7 @@ func TestAcc_StreamOnTable_InvalidConfiguration(t *testing.T) { WithComment("foo"). WithCopyGrants(false). WithAppendOnly(r.BooleanFalse). + WithShowInitialRows(r.BooleanFalse). WithBeforeValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ "offset": pluginconfig.StringVariable("0"), "timestamp": pluginconfig.StringVariable("0"), @@ -580,6 +581,7 @@ func TestAcc_StreamOnTable_InvalidConfiguration(t *testing.T) { WithComment("foo"). WithCopyGrants(false). WithAppendOnly(r.BooleanFalse). + WithShowInitialRows(r.BooleanFalse). WithAtValue(pluginconfig.MapVariable(map[string]pluginconfig.Variable{ "offset": pluginconfig.StringVariable("0"), "timestamp": pluginconfig.StringVariable("0"), @@ -587,6 +589,8 @@ func TestAcc_StreamOnTable_InvalidConfiguration(t *testing.T) { "stream": pluginconfig.StringVariable("0"), })) + modelWithCopyGrantsOnCreate := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), "foo.bar.hoge").WithCopyGrants(true) + resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -612,6 +616,11 @@ func TestAcc_StreamOnTable_InvalidConfiguration(t *testing.T) { Config: config.FromModel(t, modelWithInvalidTableId), ExpectError: regexp.MustCompile("Error: Invalid identifier type"), }, + // copy grants during create + { + Config: config.FromModel(t, modelWithCopyGrantsOnCreate), + ExpectError: regexp.MustCompile("Error: COPY GRANTS cannot be used without OR REPLACE. If you are creating this object, first import it, so that it's managed by the provider, and try again"), + }, }, }) } diff --git a/pkg/resources/testdata/TestAcc_StreamOnTable/at/test.tf b/pkg/resources/testdata/TestAcc_StreamOnTable/at/test.tf index 1b59b8225e..4df5f695e8 100644 --- a/pkg/resources/testdata/TestAcc_StreamOnTable/at/test.tf +++ b/pkg/resources/testdata/TestAcc_StreamOnTable/at/test.tf @@ -3,10 +3,10 @@ resource "snowflake_stream_on_table" "test" { database = var.database schema = var.schema - copy_grants = true + copy_grants = var.copy_grants table = var.table append_only = var.append_only - show_initial_rows = "true" + show_initial_rows = var.show_initial_rows at { timestamp = try(var.at["timestamp"], null) diff --git a/pkg/resources/testdata/TestAcc_StreamOnTable/at/variables.tf b/pkg/resources/testdata/TestAcc_StreamOnTable/at/variables.tf index 122583b5bb..2b6ad6130c 100644 --- a/pkg/resources/testdata/TestAcc_StreamOnTable/at/variables.tf +++ b/pkg/resources/testdata/TestAcc_StreamOnTable/at/variables.tf @@ -18,8 +18,12 @@ variable "copy_grants" { type = bool } +variable "show_initial_rows" { + type = string +} + variable "append_only" { - type = bool + type = string } variable "at" { diff --git a/pkg/resources/testdata/TestAcc_StreamOnTable/before/test.tf b/pkg/resources/testdata/TestAcc_StreamOnTable/before/test.tf index 035a4863af..d740590110 100644 --- a/pkg/resources/testdata/TestAcc_StreamOnTable/before/test.tf +++ b/pkg/resources/testdata/TestAcc_StreamOnTable/before/test.tf @@ -3,10 +3,10 @@ resource "snowflake_stream_on_table" "test" { database = var.database schema = var.schema - copy_grants = true + copy_grants = var.copy_grants table = var.table - append_only = "true" - show_initial_rows = "true" + append_only = var.append_only + show_initial_rows = var.show_initial_rows before { timestamp = try(var.before["timestamp"], null) diff --git a/pkg/resources/testdata/TestAcc_StreamOnTable/before/variables.tf b/pkg/resources/testdata/TestAcc_StreamOnTable/before/variables.tf index 6322e980ff..c526973e14 100644 --- a/pkg/resources/testdata/TestAcc_StreamOnTable/before/variables.tf +++ b/pkg/resources/testdata/TestAcc_StreamOnTable/before/variables.tf @@ -18,6 +18,14 @@ variable "copy_grants" { type = bool } +variable "show_initial_rows" { + type = string +} + +variable "append_only" { + type = string +} + variable "before" { type = map(string) } From 72e29d3f49ebc019e54bac1384b9e203210b15d9 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Fri, 4 Oct 2024 09:29:58 +0200 Subject: [PATCH 07/11] Cleanup --- pkg/resources/stream.go | 4 ++-- pkg/schemas/stream.go | 4 +--- pkg/schemas/stream_gen.go | 7 ++----- 3 files changed, 5 insertions(+), 10 deletions(-) diff --git a/pkg/resources/stream.go b/pkg/resources/stream.go index 7a11058005..cf5713e933 100644 --- a/pkg/resources/stream.go +++ b/pkg/resources/stream.go @@ -247,11 +247,11 @@ func ReadStream(d *schema.ResourceData, meta interface{}) error { return err } switch *stream.SourceType { - case "Stage": + case sdk.StreamSourceTypeStage: if err := d.Set("on_stage", *stream.TableName); err != nil { return err } - case "View": + case sdk.StreamSourceTypeView: if err := d.Set("on_view", *stream.TableName); err != nil { return err } diff --git a/pkg/schemas/stream.go b/pkg/schemas/stream.go index bc64f5e05e..fc49e25a71 100644 --- a/pkg/schemas/stream.go +++ b/pkg/schemas/stream.go @@ -103,9 +103,7 @@ func StreamDescriptionToSchema(stream sdk.Stream) map[string]any { streamSchema["source_type"] = stream.SourceType } if stream.BaseTables != nil { - streamSchema["base_tables"] = collections.Map(stream.BaseTables, func(id sdk.SchemaObjectIdentifier) string { - return id.FullyQualifiedName() - }) + streamSchema["base_tables"] = collections.Map(stream.BaseTables, sdk.SchemaObjectIdentifier.FullyQualifiedName) } if stream.Type != nil { streamSchema["type"] = stream.Type diff --git a/pkg/schemas/stream_gen.go b/pkg/schemas/stream_gen.go index de5383a0d6..ae1b06a86b 100644 --- a/pkg/schemas/stream_gen.go +++ b/pkg/schemas/stream_gen.go @@ -7,6 +7,7 @@ import ( "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" ) // ShowStreamSchema represents output of SHOW query for the single Stream. @@ -103,11 +104,7 @@ func StreamToSchema(stream *sdk.Stream) map[string]any { streamSchema["source_type"] = stream.SourceType } if stream.BaseTables != nil { - idsFQN := make([]string, len(stream.BaseTables)) - for i := range idsFQN { - idsFQN[i] = stream.BaseTables[i].FullyQualifiedName() - } - streamSchema["base_tables"] = idsFQN + streamSchema["base_tables"] = collections.Map(stream.BaseTables, sdk.SchemaObjectIdentifier.FullyQualifiedName) } if stream.Type != nil { streamSchema["type"] = stream.Type From 7cb18a96e580feb3bd49e1734ade447c8f735ae3 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Fri, 4 Oct 2024 09:48:04 +0200 Subject: [PATCH 08/11] pre push --- pkg/schemas/stream_gen.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/schemas/stream_gen.go b/pkg/schemas/stream_gen.go index ae1b06a86b..bd9bed5c85 100644 --- a/pkg/schemas/stream_gen.go +++ b/pkg/schemas/stream_gen.go @@ -5,9 +5,9 @@ package schemas import ( "log" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" ) // ShowStreamSchema represents output of SHOW query for the single Stream. From 81dec99a87740624ce17a68afb3914dc5f43a9a3 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Fri, 4 Oct 2024 11:50:20 +0200 Subject: [PATCH 09/11] skip copy grants for empty state --- pkg/resources/stream_on_table.go | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/pkg/resources/stream_on_table.go b/pkg/resources/stream_on_table.go index 31b254e1ad..f80e20f6eb 100644 --- a/pkg/resources/stream_on_table.go +++ b/pkg/resources/stream_on_table.go @@ -42,7 +42,7 @@ var streamOnTableSchema = map[string]*schema.Schema{ Type: schema.TypeBool, Optional: true, Default: false, - Description: "Retains the access permissions from the original stream when a new stream is created using the OR REPLACE clause. Use only if the resource is already managed by Terraform.", + Description: "Retains the access permissions from the original stream when a new stream is created using the OR REPLACE clause. Use only if the resource is already managed by Terraform. Otherwise, this field is skipped.", DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { return oldValue != "" && oldValue != newValue }, @@ -162,13 +162,8 @@ func CreateStreamOnTable(orReplace bool) schema.CreateContextFunc { req := sdk.NewCreateOnTableStreamRequest(id, tableId) if orReplace { req.WithOrReplace(true) - } else if d.Get("copy_grants").(bool) { - return diag.Diagnostics{ - diag.Diagnostic{ - Severity: diag.Error, - Summary: "COPY GRANTS cannot be used without OR REPLACE. If you are creating this object, first import it, so that it's managed by the provider, and try again.", - Detail: fmt.Sprintf("stream name: %s", id.FullyQualifiedName()), - }, + if d.Get("copy_grants").(bool) { + req.WithCopyGrants(true) } } From 08c9c852fe81b9a76f4d76583ed4ea66cf195f10 Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Fri, 4 Oct 2024 11:50:54 +0200 Subject: [PATCH 10/11] pre push --- docs/resources/stream_on_table.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/resources/stream_on_table.md b/docs/resources/stream_on_table.md index 37d22ca1ea..6de9fe8563 100644 --- a/docs/resources/stream_on_table.md +++ b/docs/resources/stream_on_table.md @@ -63,7 +63,7 @@ resource "snowflake_stream_on_table" "stream" { - `at` (Block List, Max: 1) This field specifies that the request is inclusive of any changes made by a statement or transaction with a timestamp equal to the specified parameter. Due to Snowflake limitations, the provider does not detect external changes on this field. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". (see [below for nested schema](#nestedblock--at)) - `before` (Block List, Max: 1) This field specifies that the request refers to a point immediately preceding the specified parameter. This point in time is just before the statement, identified by its query ID, is completed. Due to Snowflake limitations, the provider does not detect external changes on this field. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". (see [below for nested schema](#nestedblock--before)) - `comment` (String) Specifies a comment for the stream. -- `copy_grants` (Boolean) Retains the access permissions from the original stream when a new stream is created using the OR REPLACE clause. Use only if the resource is already managed by Terraform. +- `copy_grants` (Boolean) Retains the access permissions from the original stream when a new stream is created using the OR REPLACE clause. Use only if the resource is already managed by Terraform. Otherwise, this field is skipped. - `show_initial_rows` (String) Specifies whether to return all existing rows in the source table as row inserts the first time the stream is consumed. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". ### Read-Only From 5a16dd94cdd87b082fcb66aede700b9bed79e0da Mon Sep 17 00:00:00 2001 From: Jakub Michalak Date: Fri, 4 Oct 2024 16:56:16 +0200 Subject: [PATCH 11/11] Fix tests --- pkg/resources/stream_on_table_acceptance_test.go | 7 ------- 1 file changed, 7 deletions(-) diff --git a/pkg/resources/stream_on_table_acceptance_test.go b/pkg/resources/stream_on_table_acceptance_test.go index e3c0d5fabf..99604e330c 100644 --- a/pkg/resources/stream_on_table_acceptance_test.go +++ b/pkg/resources/stream_on_table_acceptance_test.go @@ -589,8 +589,6 @@ func TestAcc_StreamOnTable_InvalidConfiguration(t *testing.T) { "stream": pluginconfig.StringVariable("0"), })) - modelWithCopyGrantsOnCreate := model.StreamOnTable("test", id.DatabaseName(), id.Name(), id.SchemaName(), "foo.bar.hoge").WithCopyGrants(true) - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -616,11 +614,6 @@ func TestAcc_StreamOnTable_InvalidConfiguration(t *testing.T) { Config: config.FromModel(t, modelWithInvalidTableId), ExpectError: regexp.MustCompile("Error: Invalid identifier type"), }, - // copy grants during create - { - Config: config.FromModel(t, modelWithCopyGrantsOnCreate), - ExpectError: regexp.MustCompile("Error: COPY GRANTS cannot be used without OR REPLACE. If you are creating this object, first import it, so that it's managed by the provider, and try again"), - }, }, }) }