From fedb1df2a731d139d68d2284bf3be47fcc4d0115 Mon Sep 17 00:00:00 2001 From: Scott Winkler Date: Tue, 23 Jan 2024 01:43:14 -0800 Subject: [PATCH 1/5] fix: app-pkg unset (#2399) Follow up PR from discussion in original PR https://github.com/Snowflake-Labs/terraform-provider-snowflake/pull/2323#discussion_r1444581864 Fixing Unset to use commas, allowing multiple unsets --- pkg/sdk/application_packages_def.go | 4 +-- pkg/sdk/application_packages_gen.go | 2 +- pkg/sdk/application_packages_gen_test.go | 36 ++++++------------- .../application_packages_validations_gen.go | 4 +-- .../application_packages_integration_test.go | 13 ++----- 5 files changed, 19 insertions(+), 40 deletions(-) diff --git a/pkg/sdk/application_packages_def.go b/pkg/sdk/application_packages_def.go index ca64a270b5..8f6bdbce65 100644 --- a/pkg/sdk/application_packages_def.go +++ b/pkg/sdk/application_packages_def.go @@ -48,7 +48,7 @@ var applicationPackageUnset = g.NewQueryStruct("ApplicationPackageUnset"). OptionalSQL("DEFAULT_DDL_COLLATION"). OptionalSQL("COMMENT"). OptionalSQL("DISTRIBUTION"). - WithValidation(g.ExactlyOneValueSet, "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "DefaultDdlCollation", "Comment", "Distribution") + WithValidation(g.AtLeastOneValueSet, "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "DefaultDdlCollation", "Comment", "Distribution") var ApplicationPackagesDef = g.NewInterface( "ApplicationPackages", @@ -83,7 +83,7 @@ var ApplicationPackagesDef = g.NewInterface( OptionalQueryStructField( "Unset", applicationPackageUnset, - g.KeywordOptions().SQL("UNSET"), + g.ListOptions().NoParentheses().SQL("UNSET"), ). OptionalQueryStructField( "ModifyReleaseDirective", diff --git a/pkg/sdk/application_packages_gen.go b/pkg/sdk/application_packages_gen.go index e660bdafcc..a7c730d9e2 100644 --- a/pkg/sdk/application_packages_gen.go +++ b/pkg/sdk/application_packages_gen.go @@ -34,7 +34,7 @@ type AlterApplicationPackageOptions struct { IfExists *bool `ddl:"keyword" sql:"IF EXISTS"` name AccountObjectIdentifier `ddl:"identifier"` Set *ApplicationPackageSet `ddl:"keyword" sql:"SET"` - Unset *ApplicationPackageUnset `ddl:"keyword" sql:"UNSET"` + Unset *ApplicationPackageUnset `ddl:"list,no_parentheses" sql:"UNSET"` ModifyReleaseDirective *ModifyReleaseDirective `ddl:"keyword" sql:"MODIFY RELEASE DIRECTIVE"` SetDefaultReleaseDirective *SetDefaultReleaseDirective `ddl:"keyword" sql:"SET DEFAULT RELEASE DIRECTIVE"` SetReleaseDirective *SetReleaseDirective `ddl:"keyword" sql:"SET RELEASE DIRECTIVE"` diff --git a/pkg/sdk/application_packages_gen_test.go b/pkg/sdk/application_packages_gen_test.go index 0bfc3204fc..339846bc84 100644 --- a/pkg/sdk/application_packages_gen_test.go +++ b/pkg/sdk/application_packages_gen_test.go @@ -79,6 +79,12 @@ func TestApplicationPackages_Alter(t *testing.T) { assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterApplicationPackageOptions", "Set", "Unset", "ModifyReleaseDirective", "SetDefaultReleaseDirective", "SetReleaseDirective", "UnsetReleaseDirective", "AddVersion", "DropVersion", "AddPatchForVersion", "SetTags", "UnsetTags")) }) + t.Run("validation: set options at least one field should be present", func(t *testing.T) { + opts := defaultOpts() + opts.Unset = &ApplicationPackageUnset{} + assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("AlterApplicationPackageOptions.Unset", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "DefaultDdlCollation", "Comment", "Distribution")) + }) + t.Run("alter: set options", func(t *testing.T) { opts := defaultOpts() opts.Set = &ApplicationPackageSet{ @@ -94,33 +100,13 @@ func TestApplicationPackages_Alter(t *testing.T) { t.Run("alter: unset options", func(t *testing.T) { opts := defaultOpts() opts.Unset = &ApplicationPackageUnset{ - Comment: Bool(true), - } - assertOptsValidAndSQLEquals(t, opts, `ALTER APPLICATION PACKAGE IF EXISTS %s UNSET COMMENT`, id.FullyQualifiedName()) - - opts = defaultOpts() - opts.Unset = &ApplicationPackageUnset{ - DataRetentionTimeInDays: Bool(true), - } - assertOptsValidAndSQLEquals(t, opts, `ALTER APPLICATION PACKAGE IF EXISTS %s UNSET DATA_RETENTION_TIME_IN_DAYS`, id.FullyQualifiedName()) - - opts = defaultOpts() - opts.Unset = &ApplicationPackageUnset{ + DataRetentionTimeInDays: Bool(true), MaxDataExtensionTimeInDays: Bool(true), + DefaultDdlCollation: Bool(true), + Comment: Bool(true), + Distribution: Bool(true), } - assertOptsValidAndSQLEquals(t, opts, `ALTER APPLICATION PACKAGE IF EXISTS %s UNSET MAX_DATA_EXTENSION_TIME_IN_DAYS`, id.FullyQualifiedName()) - - opts = defaultOpts() - opts.Unset = &ApplicationPackageUnset{ - DefaultDdlCollation: Bool(true), - } - assertOptsValidAndSQLEquals(t, opts, `ALTER APPLICATION PACKAGE IF EXISTS %s UNSET DEFAULT_DDL_COLLATION`, id.FullyQualifiedName()) - - opts = defaultOpts() - opts.Unset = &ApplicationPackageUnset{ - Distribution: Bool(true), - } - assertOptsValidAndSQLEquals(t, opts, `ALTER APPLICATION PACKAGE IF EXISTS %s UNSET DISTRIBUTION`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `ALTER APPLICATION PACKAGE IF EXISTS %s UNSET DATA_RETENTION_TIME_IN_DAYS, MAX_DATA_EXTENSION_TIME_IN_DAYS, DEFAULT_DDL_COLLATION, COMMENT, DISTRIBUTION`, id.FullyQualifiedName()) }) t.Run("alter: set tags", func(t *testing.T) { diff --git a/pkg/sdk/application_packages_validations_gen.go b/pkg/sdk/application_packages_validations_gen.go index de5102bf76..14ddbd323c 100644 --- a/pkg/sdk/application_packages_validations_gen.go +++ b/pkg/sdk/application_packages_validations_gen.go @@ -30,8 +30,8 @@ func (opts *AlterApplicationPackageOptions) validate() error { errs = append(errs, errExactlyOneOf("AlterApplicationPackageOptions", "Set", "Unset", "ModifyReleaseDirective", "SetDefaultReleaseDirective", "SetReleaseDirective", "UnsetReleaseDirective", "AddVersion", "DropVersion", "AddPatchForVersion", "SetTags", "UnsetTags")) } if valueSet(opts.Unset) { - if !exactlyOneValueSet(opts.Unset.DataRetentionTimeInDays, opts.Unset.MaxDataExtensionTimeInDays, opts.Unset.DefaultDdlCollation, opts.Unset.Comment, opts.Unset.Distribution) { - errs = append(errs, errExactlyOneOf("AlterApplicationPackageOptions.Unset", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "DefaultDdlCollation", "Comment", "Distribution")) + if !anyValueSet(opts.Unset.DataRetentionTimeInDays, opts.Unset.MaxDataExtensionTimeInDays, opts.Unset.DefaultDdlCollation, opts.Unset.Comment, opts.Unset.Distribution) { + errs = append(errs, errAtLeastOneOf("AlterApplicationPackageOptions.Unset", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "DefaultDdlCollation", "Comment", "Distribution")) } } return JoinErrors(errs...) diff --git a/pkg/sdk/testint/application_packages_integration_test.go b/pkg/sdk/testint/application_packages_integration_test.go index 52958249ab..6e232ca589 100644 --- a/pkg/sdk/testint/application_packages_integration_test.go +++ b/pkg/sdk/testint/application_packages_integration_test.go @@ -123,20 +123,13 @@ func TestInt_ApplicationPackages(t *testing.T) { e := createApplicationPackageHandle(t) id := sdk.NewAccountObjectIdentifier(e.Name) - // unset comment - unset := sdk.NewApplicationPackageUnsetRequest().WithComment(sdk.Bool(true)) + // unset comment and distribution + unset := sdk.NewApplicationPackageUnsetRequest().WithComment(sdk.Bool(true)).WithDistribution(sdk.Bool(true)) err := client.ApplicationPackages.Alter(ctx, sdk.NewAlterApplicationPackageRequest(id).WithUnset(unset)) require.NoError(t, err) o, err := client.ApplicationPackages.ShowByID(ctx, id) require.NoError(t, err) require.Empty(t, o.Comment) - - // unset distribution - unset = sdk.NewApplicationPackageUnsetRequest().WithDistribution(sdk.Bool(true)) - err = client.ApplicationPackages.Alter(ctx, sdk.NewAlterApplicationPackageRequest(id).WithUnset(unset)) - require.NoError(t, err) - o, err = client.ApplicationPackages.ShowByID(ctx, id) - require.NoError(t, err) require.Equal(t, sdk.DistributionInternal, sdk.Distribution(o.Distribution)) }) @@ -201,7 +194,7 @@ func TestInt_ApplicationPackagesVersionAndReleaseDirective(t *testing.T) { createApplicationPackageHandle := func(t *testing.T) *sdk.ApplicationPackage { t.Helper() - id := sdk.RandomAccountObjectIdentifier() + id := sdk.NewAccountObjectIdentifier("snowflake_package_test") request := sdk.NewCreateApplicationPackageRequest(id).WithDistribution(sdk.DistributionPointer(sdk.DistributionInternal)) err := client.ApplicationPackages.Create(ctx, request) require.NoError(t, err) From ce0741ce226be9464407b549e90cb179b0fe5880 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Cie=C5=9Blak?= Date: Tue, 23 Jan 2024 11:08:53 +0100 Subject: [PATCH 2/5] feat: Use SDK in the storage integration (#2380) --- pkg/datasources/storage_integrations.go | 50 ++- .../storage_integrations_acceptance_test.go | 82 +++-- .../TestAcc_StorageIntegrations_basic/test.tf | 12 + .../variables.tf | 15 + pkg/resources/helpers_test.go | 21 ++ pkg/resources/storage_integration.go | 316 ++++++++-------- .../storage_integration_acceptance_test.go | 339 ++++++++++++++++-- pkg/resources/storage_integration_test.go | 188 ---------- .../AWSObjectACL_Update/after/test.tf | 7 + .../AWSObjectACL_Update/after/variables.tf | 12 + .../AWSObjectACL_Update/before/test.tf | 6 + .../AWSObjectACL_Update/before/variables.tf | 7 + .../AWS_Update/set/test.tf | 10 + .../AWS_Update/set/variables.tf | 23 ++ .../AWS_Update/unset/test.tf | 7 + .../AWS_Update/unset/variables.tf | 11 + .../Azure_Update/set/test.tf | 9 + .../Azure_Update/set/variables.tf | 19 + .../Azure_Update/unset/test.tf | 7 + .../Azure_Update/unset/variables.tf | 11 + .../Empty_StorageAllowedLocations/test.tf | 5 + .../GCP_Update/set/test.tf | 8 + .../GCP_Update/set/variables.tf | 15 + .../GCP_Update/unset/test.tf | 6 + .../GCP_Update/unset/variables.tf | 7 + pkg/sdk/storage_integration_def.go | 7 +- .../storage_integration_dto_builders_gen.go | 15 +- pkg/sdk/storage_integration_dto_gen.go | 7 +- pkg/sdk/storage_integration_gen.go | 20 +- pkg/sdk/storage_integration_gen_test.go | 11 +- pkg/sdk/storage_integration_impl_gen.go | 22 +- ...torage_integration_gen_integration_test.go | 5 +- pkg/snowflake/storage_integration.go | 61 ---- pkg/snowflake/storage_integration_test.go | 27 -- 34 files changed, 812 insertions(+), 556 deletions(-) create mode 100644 pkg/datasources/testdata/TestAcc_StorageIntegrations_basic/test.tf create mode 100644 pkg/datasources/testdata/TestAcc_StorageIntegrations_basic/variables.tf delete mode 100644 pkg/resources/storage_integration_test.go create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/after/test.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/after/variables.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/before/test.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/before/variables.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/set/test.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/set/variables.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/unset/test.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/unset/variables.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/set/test.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/set/variables.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/unset/test.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/unset/variables.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/Empty_StorageAllowedLocations/test.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/set/test.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/set/variables.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/unset/test.tf create mode 100644 pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/unset/variables.tf delete mode 100644 pkg/snowflake/storage_integration.go delete mode 100644 pkg/snowflake/storage_integration_test.go diff --git a/pkg/datasources/storage_integrations.go b/pkg/datasources/storage_integrations.go index 6b0e216554..2bb7bdf7b9 100644 --- a/pkg/datasources/storage_integrations.go +++ b/pkg/datasources/storage_integrations.go @@ -1,12 +1,11 @@ package datasources import ( + "context" "database/sql" - "errors" "fmt" - "log" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/snowflake" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -49,40 +48,39 @@ func StorageIntegrations() *schema.Resource { func ReadStorageIntegrations(d *schema.ResourceData, meta interface{}) error { db := meta.(*sql.DB) + ctx := context.Background() + client := sdk.NewClientFromDB(db) - account, err := snowflake.ReadCurrentAccount(db) + account, err := client.ContextFunctions.CurrentAccount(ctx) if err != nil { - log.Print("[DEBUG] unable to retrieve current account") d.SetId("") - return nil + return fmt.Errorf("[DEBUG] unable to retrieve current account") } - d.SetId(fmt.Sprintf("%s.%s", account.Account, account.Region)) - - currentStorageIntegrations, err := snowflake.ListStorageIntegrations(db) - if errors.Is(err, sql.ErrNoRows) { - // If not found, mark resource to be removed from state file during apply or refresh - log.Printf("[DEBUG] no storage integrations found in account (%s)", d.Id()) - d.SetId("") - return nil - } else if err != nil { - log.Printf("[DEBUG] unable to parse storage integrations in account (%s)", d.Id()) + region, err := client.ContextFunctions.CurrentRegion(ctx) + if err != nil { d.SetId("") - return nil + return fmt.Errorf("[DEBUG] unable to retrieve current region") } - storageIntegrations := []map[string]interface{}{} + d.SetId(fmt.Sprintf("%s.%s", account, region)) - for _, storageIntegration := range currentStorageIntegrations { - storageIntegrationMap := map[string]interface{}{} + storageIntegrations, err := client.StorageIntegrations.Show(ctx, sdk.NewShowStorageIntegrationRequest()) + if err != nil { + d.SetId("") + return fmt.Errorf("unable to retrieve storage integrations in account (%s), err = %w", d.Id(), err) + } - storageIntegrationMap["name"] = storageIntegration.Name.String - storageIntegrationMap["type"] = storageIntegration.IntegrationType.String - storageIntegrationMap["comment"] = storageIntegration.Comment.String - storageIntegrationMap["enabled"] = storageIntegration.Enabled.Bool + storageIntegrationMaps := make([]map[string]any, len(storageIntegrations)) - storageIntegrations = append(storageIntegrations, storageIntegrationMap) + for i, storageIntegration := range storageIntegrations { + storageIntegrationMaps[i] = map[string]any{ + "name": storageIntegration.Name, + "type": storageIntegration.StorageType, + "enabled": storageIntegration.Enabled, + "comment": storageIntegration.Comment, + } } - return d.Set("storage_integrations", storageIntegrations) + return d.Set("storage_integrations", storageIntegrationMaps) } diff --git a/pkg/datasources/storage_integrations_acceptance_test.go b/pkg/datasources/storage_integrations_acceptance_test.go index 16dbe6b699..4158cfe282 100644 --- a/pkg/datasources/storage_integrations_acceptance_test.go +++ b/pkg/datasources/storage_integrations_acceptance_test.go @@ -2,42 +2,84 @@ package datasources_test import ( "fmt" - "strings" + "strconv" "testing" + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/hashicorp/terraform-plugin-testing/config" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-plugin-testing/tfversion" + "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) -func TestAcc_StorageIntegrations(t *testing.T) { - storageIntegrationName := strings.ToUpper(acctest.RandStringFromCharSet(10, acctest.CharSetAlpha)) - resource.ParallelTest(t, resource.TestCase{ - Providers: providers(), - CheckDestroy: nil, +func TestAcc_StorageIntegrations_basic(t *testing.T) { + name := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, Steps: []resource.TestStep{ { - Config: storageIntegrations(storageIntegrationName), + ConfigVariables: config.Variables{ + "name": config.StringVariable(name), + "allowed_locations": config.SetVariable( + config.StringVariable("gcs://foo/"), + config.StringVariable("gcs://bar/"), + ), + "blocked_locations": config.SetVariable( + config.StringVariable("gcs://foo/"), + config.StringVariable("gcs://bar/"), + ), + "comment": config.StringVariable("some comment"), + }, + ConfigDirectory: config.TestNameDirectory(), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttrSet("data.snowflake_storage_integrations.s", "storage_integrations.#"), - resource.TestCheckResourceAttrSet("data.snowflake_storage_integrations.s", "storage_integrations.0.name"), + resource.TestCheckResourceAttrSet("data.snowflake_storage_integrations.test", "storage_integrations.#"), + containsStorageIntegration(name, true, "some comment"), ), }, }, }) } -func storageIntegrations(storageIntegrationName string) string { - return fmt.Sprintf(` +func containsStorageIntegration(name string, enabled bool, comment string) resource.TestCheckFunc { + return func(state *terraform.State) error { + for _, rs := range state.RootModule().Resources { + if rs.Type != "snowflake_storage_integrations" { + continue + } + iter, err := strconv.ParseInt(rs.Primary.Attributes["storage_integrations.#"], 10, 32) + if err != nil { + return err + } - resource snowflake_storage_integration i { - name = "%v" - storage_allowed_locations = ["s3://foo/"] - storage_provider = "S3" - storage_aws_role_arn = "arn:aws:iam::000000000001:/role/test" - } + for i := 0; i < int(iter); i++ { + if rs.Primary.Attributes[fmt.Sprintf("storage_integrations.%d.name", i)] == name { + actualEnabled, err := strconv.ParseBool(rs.Primary.Attributes[fmt.Sprintf("storage_integrations.%d.enabled", i)]) + if err != nil { + return err + } + + if actualEnabled != enabled { + return fmt.Errorf("expected comment: %v, but got: %v", enabled, actualEnabled) + } + + actualComment := rs.Primary.Attributes[fmt.Sprintf("storage_integrations.%d.comment", i)] + if actualComment != comment { + return fmt.Errorf("expected comment: %s, but got: %s", comment, actualComment) + } + + return nil + } + } - data snowflake_storage_integrations "s" { - depends_on = [snowflake_storage_integration.i] + return fmt.Errorf("storage integration (%s) not found", name) + } + return nil } - `, storageIntegrationName) } diff --git a/pkg/datasources/testdata/TestAcc_StorageIntegrations_basic/test.tf b/pkg/datasources/testdata/TestAcc_StorageIntegrations_basic/test.tf new file mode 100644 index 0000000000..93c562e0a0 --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_StorageIntegrations_basic/test.tf @@ -0,0 +1,12 @@ +resource "snowflake_storage_integration" "test" { + name = var.name + enabled = true + storage_provider = "GCS" + comment = var.comment + storage_allowed_locations = var.allowed_locations + storage_blocked_locations = var.blocked_locations +} + +data "snowflake_storage_integrations" "test" { + depends_on = [snowflake_storage_integration.test] +} diff --git a/pkg/datasources/testdata/TestAcc_StorageIntegrations_basic/variables.tf b/pkg/datasources/testdata/TestAcc_StorageIntegrations_basic/variables.tf new file mode 100644 index 0000000000..ad738d355d --- /dev/null +++ b/pkg/datasources/testdata/TestAcc_StorageIntegrations_basic/variables.tf @@ -0,0 +1,15 @@ +variable "name" { + type = string +} + +variable "comment" { + type = string +} + +variable "allowed_locations" { + type = set(string) +} + +variable "blocked_locations" { + type = set(string) +} diff --git a/pkg/resources/helpers_test.go b/pkg/resources/helpers_test.go index 78c07827b6..cf65bfff25 100644 --- a/pkg/resources/helpers_test.go +++ b/pkg/resources/helpers_test.go @@ -1,6 +1,7 @@ package resources_test import ( + "os" "testing" "github.com/stretchr/testify/assert" @@ -21,6 +22,26 @@ const ( onAll ) +var ( + awsBucketUrl, awsBucketUrlIsSet = os.LookupEnv("TEST_SF_TF_AWS_EXTERNAL_BUCKET_URL") + awsKeyId, awsKeyIdIsSet = os.LookupEnv("TEST_SF_TF_AWS_EXTERNAL_KEY_ID") + awsSecretKey, awsSecretKeyIsSet = os.LookupEnv("TEST_SF_TF_AWS_EXTERNAL_SECRET_KEY") + awsRoleARN, awsRoleARNIsSet = os.LookupEnv("TEST_SF_TF_AWS_EXTERNAL_ROLE_ARN") + + gcsBucketUrl, gcsBucketUrlIsSet = os.LookupEnv("TEST_SF_TF_GCS_EXTERNAL_BUCKET_URL") + + azureBucketUrl, azureBucketUrlIsSet = os.LookupEnv("TEST_SF_TF_AZURE_EXTERNAL_BUCKET_URL") + azureTenantId, azureTenantIdIsSet = os.LookupEnv("TEST_SF_TF_AZURE_EXTERNAL_TENANT_ID") + + hasExternalEnvironmentVariablesSet = awsBucketUrlIsSet && + awsKeyIdIsSet && + awsSecretKeyIsSet && + awsRoleARNIsSet && + gcsBucketUrlIsSet && + azureBucketUrlIsSet && + azureTenantIdIsSet +) + func TestGetPropertyAsPointer(t *testing.T) { d := schema.TestResourceDataRaw(t, map[string]*schema.Schema{ "integer": { diff --git a/pkg/resources/storage_integration.go b/pkg/resources/storage_integration.go index 64486bb463..d78394a076 100644 --- a/pkg/resources/storage_integration.go +++ b/pkg/resources/storage_integration.go @@ -1,19 +1,20 @@ package resources import ( + "context" "database/sql" - "errors" "fmt" "log" "strings" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/snowflake" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) var storageIntegrationSchema = map[string]*schema.Schema{ - // The first part of the schema is shared between all integration vendors "name": { Type: schema.TypeString, Required: true, @@ -49,10 +50,11 @@ var storageIntegrationSchema = map[string]*schema.Schema{ Optional: true, Description: "Explicitly prohibits external stages that use the integration from referencing one or more storage locations.", }, - // This part of the schema is the cloudProviderParams in the Snowflake documentation and differs between vendors + // TODO (SNOW-1015282): Remove S3gov option before going into V1 "storage_provider": { Type: schema.TypeString, Required: true, + ForceNew: true, ValidateFunc: validation.StringInSlice([]string{"S3", "S3gov", "GCS", "AZURE", "S3GOV"}, false), }, "storage_aws_external_id": { @@ -118,284 +120,262 @@ func StorageIntegration() *schema.Resource { } } -// CreateStorageIntegration implements schema.CreateFunc. -func CreateStorageIntegration(d *schema.ResourceData, meta interface{}) error { +func CreateStorageIntegration(d *schema.ResourceData, meta any) error { db := meta.(*sql.DB) - name := d.Get("name").(string) - - stmt := snowflake.NewStorageIntegrationBuilder(name).Create() - - // Set required fields - stmt.SetString(`TYPE`, d.Get("type").(string)) - stmt.SetBool(`ENABLED`, d.Get("enabled").(bool)) + ctx := context.Background() + client := sdk.NewClientFromDB(db) + + name := sdk.NewAccountObjectIdentifierFromFullyQualifiedName(d.Get("name").(string)) + enabled := d.Get("enabled").(bool) + stringStorageAllowedLocations := expandStringList(d.Get("storage_allowed_locations").([]any)) + storageAllowedLocations := make([]sdk.StorageLocation, len(stringStorageAllowedLocations)) + for i, loc := range stringStorageAllowedLocations { + storageAllowedLocations[i] = sdk.StorageLocation{ + Path: loc, + } + } - stmt.SetStringList("STORAGE_ALLOWED_LOCATIONS", expandStringList(d.Get("storage_allowed_locations").([]interface{}))) + req := sdk.NewCreateStorageIntegrationRequest(name, enabled, storageAllowedLocations) - // Set optional fields if v, ok := d.GetOk("comment"); ok { - stmt.SetString(`COMMENT`, v.(string)) + req.WithComment(sdk.String(v.(string))) } if _, ok := d.GetOk("storage_blocked_locations"); ok { - stmt.SetStringList("STORAGE_BLOCKED_LOCATIONS", expandStringList(d.Get("storage_blocked_locations").([]interface{}))) + stringStorageBlockedLocations := expandStringList(d.Get("storage_blocked_locations").([]any)) + storageBlockedLocations := make([]sdk.StorageLocation, len(stringStorageBlockedLocations)) + for i, loc := range stringStorageBlockedLocations { + storageBlockedLocations[i] = sdk.StorageLocation{ + Path: loc, + } + } } - if _, ok := d.GetOk("storage_aws_object_acl"); ok { - stmt.SetString("STORAGE_AWS_OBJECT_ACL", d.Get("storage_aws_object_acl").(string)) - } + storageProvider := d.Get("storage_provider").(string) - // Now, set the storage provider - if err := setStorageProviderSettings(d, stmt); err != nil { - return err + switch storageProvider { + case "S3", "S3GOV", "S3gov": + v, ok := d.GetOk("storage_aws_role_arn") + if !ok { + return fmt.Errorf("if you use the S3 storage provider you must specify a storage_aws_role_arn") + } + + s3Params := sdk.NewS3StorageParamsRequest(v.(string)) + if _, ok := d.GetOk("storage_aws_object_acl"); ok { + s3Params.WithStorageAwsObjectAcl(sdk.String(d.Get("storage_aws_object_acl").(string))) + } + req.WithS3StorageProviderParams(s3Params) + case "AZURE": + v, ok := d.GetOk("azure_tenant_id") + if !ok { + return fmt.Errorf("if you use the Azure storage provider you must specify an azure_tenant_id") + } + req.WithAzureStorageProviderParams(sdk.NewAzureStorageParamsRequest(sdk.String(v.(string)))) + case "GCS": + req.WithGCSStorageProviderParams(sdk.NewGCSStorageParamsRequest()) + default: + return fmt.Errorf("unexpected provider %v", storageProvider) } - if err := snowflake.Exec(db, stmt.Statement()); err != nil { + + if err := client.StorageIntegrations.Create(ctx, req); err != nil { return fmt.Errorf("error creating storage integration: %w", err) } - d.SetId(name) - + d.SetId(helpers.EncodeSnowflakeID(name)) return ReadStorageIntegration(d, meta) } -// ReadStorageIntegration implements schema.ReadFunc. -func ReadStorageIntegration(d *schema.ResourceData, meta interface{}) error { +func ReadStorageIntegration(d *schema.ResourceData, meta any) error { db := meta.(*sql.DB) - id := d.Id() - - stmt := snowflake.NewStorageIntegrationBuilder(d.Id()).Show() - row := snowflake.QueryRow(db, stmt) - - // Some properties can come from the SHOW INTEGRATION call + ctx := context.Background() + client := sdk.NewClientFromDB(db) + id, ok := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + if !ok { + return fmt.Errorf("storage integration read, error decoding id: %s as sdk.AccountObjectIdentifier, got: %T", d.Id(), id) + } - s, err := snowflake.ScanStorageIntegration(row) - if errors.Is(err, sql.ErrNoRows) { - // If not found, mark resource to be removed from state file during apply or refresh + s, err := client.StorageIntegrations.ShowByID(ctx, id) + if err != nil { log.Printf("[DEBUG] storage integration (%s) not found", d.Id()) d.SetId("") return nil } - if err != nil { - return fmt.Errorf("could not show storage integration: %w", err) - } - // Note: category must be STORAGE or something is broken - if c := s.Category.String; c != "STORAGE" { - return fmt.Errorf("expected %v to be a STORAGE integration, got %v", id, c) + if s.Category != "STORAGE" { + return fmt.Errorf("expected %v to be a STORAGE integration, got %v", d.Id(), s.Category) } - - if err := d.Set("name", s.Name.String); err != nil { + if err := d.Set("name", s.Name); err != nil { return err } - - if err := d.Set("type", s.IntegrationType.String); err != nil { + if err := d.Set("type", s.StorageType); err != nil { return err } - - if err := d.Set("created_on", s.CreatedOn.String); err != nil { + if err := d.Set("created_on", s.CreatedOn.String()); err != nil { return err } - - if err := d.Set("enabled", s.Enabled.Bool); err != nil { + if err := d.Set("enabled", s.Enabled); err != nil { + return err + } + if err := d.Set("comment", s.Comment); err != nil { return err } - // Some properties come from the DESCRIBE INTEGRATION call - // We need to grab them in a loop - var k, pType string - var v, unused interface{} - stmt = snowflake.NewStorageIntegrationBuilder(d.Id()).Describe() - rows, err := db.Query(stmt) + storageIntegrationProps, err := client.StorageIntegrations.Describe(ctx, id) if err != nil { - return fmt.Errorf("could not describe storage integration: %w", err) + return fmt.Errorf("could not describe storage integration (%s), err = %w", d.Id(), err) } - defer rows.Close() - for rows.Next() { - if err := rows.Scan(&k, &pType, &v, &unused); err != nil { - return err - } - switch k { - case "ENABLED": - // We set this using the SHOW INTEGRATION call so let's ignore it here - case "COMMENT": - if val := v.(string); val != "" { - if err := d.Set("comment", v.(string)); err != nil { - return err - } - } + + for _, prop := range storageIntegrationProps { + switch prop.Name { case "STORAGE_PROVIDER": - if err := d.Set("storage_provider", v.(string)); err != nil { + if err := d.Set("storage_provider", prop.Value); err != nil { return err } case "STORAGE_ALLOWED_LOCATIONS": - if err := d.Set("storage_allowed_locations", strings.Split(v.(string), ",")); err != nil { + if err := d.Set("storage_allowed_locations", strings.Split(prop.Value, ",")); err != nil { return err } case "STORAGE_BLOCKED_LOCATIONS": - if val := v.(string); val != "" { - if err := d.Set("storage_blocked_locations", strings.Split(val, ",")); err != nil { + if prop.Value != "" { + if err := d.Set("storage_blocked_locations", strings.Split(prop.Value, ",")); err != nil { return err } } case "STORAGE_AWS_IAM_USER_ARN": - if err := d.Set("storage_aws_iam_user_arn", v.(string)); err != nil { + if err := d.Set("storage_aws_iam_user_arn", prop.Value); err != nil { return err } case "STORAGE_AWS_OBJECT_ACL": - if val := v.(string); val != "" { - if err := d.Set("storage_aws_object_acl", v.(string)); err != nil { + if prop.Value != "" { + if err := d.Set("storage_aws_object_acl", prop.Value); err != nil { return err } } case "STORAGE_AWS_ROLE_ARN": - if err := d.Set("storage_aws_role_arn", v.(string)); err != nil { + if err := d.Set("storage_aws_role_arn", prop.Value); err != nil { return err } case "STORAGE_AWS_EXTERNAL_ID": - if err := d.Set("storage_aws_external_id", v.(string)); err != nil { + if err := d.Set("storage_aws_external_id", prop.Value); err != nil { return err } case "STORAGE_GCP_SERVICE_ACCOUNT": - if err := d.Set("storage_gcp_service_account", v.(string)); err != nil { + if err := d.Set("storage_gcp_service_account", prop.Value); err != nil { return err } case "AZURE_CONSENT_URL": - if err := d.Set("azure_consent_url", v.(string)); err != nil { + if err := d.Set("azure_consent_url", prop.Value); err != nil { return err } case "AZURE_MULTI_TENANT_APP_NAME": - if err := d.Set("azure_multi_tenant_app_name", v.(string)); err != nil { + if err := d.Set("azure_multi_tenant_app_name", prop.Value); err != nil { return err } - default: - log.Printf("[WARN] unexpected property %v returned from Snowflake", k) } } return err } -// UpdateStorageIntegration implements schema.UpdateFunc. -func UpdateStorageIntegration(d *schema.ResourceData, meta interface{}) error { +func UpdateStorageIntegration(d *schema.ResourceData, meta any) error { db := meta.(*sql.DB) - id := d.Id() - - stmt := snowflake.NewStorageIntegrationBuilder(id).Alter() + ctx := context.Background() + client := sdk.NewClientFromDB(db) + id, ok := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + if !ok { + return fmt.Errorf("storage integration update, error decoding id: %s as sdk.AccountObjectIdentifier, got: %T", d.Id(), id) + } - // This is required in case the only change is to UNSET STORAGE_ALLOWED_LOCATIONS. - // Not sure if there is a more elegant way of determining this var runSetStatement bool + setReq := sdk.NewStorageIntegrationSetRequest() if d.HasChange("comment") { runSetStatement = true - stmt.SetString("COMMENT", d.Get("comment").(string)) + setReq.WithComment(sdk.String(d.Get("comment").(string))) } if d.HasChange("enabled") { runSetStatement = true - stmt.SetBool(`ENABLED`, d.Get("enabled").(bool)) + setReq.WithEnabled(d.Get("enabled").(bool)) } if d.HasChange("storage_allowed_locations") { runSetStatement = true - stmt.SetStringList("STORAGE_ALLOWED_LOCATIONS", expandStringList(d.Get("storage_allowed_locations").([]interface{}))) + stringStorageAllowedLocations := expandStringList(d.Get("storage_allowed_locations").([]any)) + storageAllowedLocations := make([]sdk.StorageLocation, len(stringStorageAllowedLocations)) + for i, loc := range stringStorageAllowedLocations { + storageAllowedLocations[i] = sdk.StorageLocation{ + Path: loc, + } + } + setReq.WithStorageAllowedLocations(storageAllowedLocations) } - // We need to UNSET this if we remove all storage blocked locations. I don't think - // this is documented by Snowflake, but this is how it works. - // - // @TODO move the SQL back to the snowflake package + // We need to UNSET this if we remove all storage blocked locations, because Snowflake won't accept an empty list if d.HasChange("storage_blocked_locations") { v := d.Get("storage_blocked_locations").([]interface{}) if len(v) == 0 { - if err := unsetStorageIntegrationProp(db, d.Id(), "STORAGE_BLOCKED_LOCATIONS"); err != nil { - return fmt.Errorf("error unsetting storage_blocked_locations: %w", err) + if err := client.StorageIntegrations.Alter(ctx, sdk.NewAlterStorageIntegrationRequest(id). + WithUnset(sdk.NewStorageIntegrationUnsetRequest().WithStorageBlockedLocations(sdk.Bool(true)))); err != nil { + return fmt.Errorf("error unsetting storage_blocked_locations, err = %w", err) } } else { runSetStatement = true - stmt.SetStringList("STORAGE_BLOCKED_LOCATIONS", expandStringList(v)) + stringStorageBlockedLocations := expandStringList(d.Get("storage_allowed_locations").([]any)) + storageBlockedLocations := make([]sdk.StorageLocation, len(stringStorageBlockedLocations)) + for i, loc := range stringStorageBlockedLocations { + storageBlockedLocations[i] = sdk.StorageLocation{ + Path: loc, + } + } + setReq.WithStorageBlockedLocations(storageBlockedLocations) } } - // also need to UNSET STORAGE_AWS_OBJECT_ACL if removed - if d.HasChange("storage_aws_object_acl") { - if _, ok := d.GetOk("storage_aws_object_acl"); ok { - if err := setStorageIntegrationProp(db, d.Id(), "STORAGE_AWS_OBJECT_ACL", "bucket-owner-full-control"); err != nil { - return fmt.Errorf("error setting storage_aws_object_acl: %w", err) - } - } else { - if err := unsetStorageIntegrationProp(db, d.Id(), "STORAGE_AWS_OBJECT_ACL"); err != nil { - return fmt.Errorf("error unsetting storage_aws_object_acl: %w", err) + if d.HasChange("storage_aws_role_arn") || d.HasChange("storage_aws_object_acl") { + runSetStatement = true + s3SetParams := sdk.NewSetS3StorageParamsRequest(d.Get("storage_aws_role_arn").(string)) + + if d.HasChange("storage_aws_object_acl") { + if v, ok := d.GetOk("storage_aws_object_acl"); ok { + s3SetParams.WithStorageAwsObjectAcl(sdk.String(v.(string))) + } else { + if err := client.StorageIntegrations.Alter(ctx, sdk.NewAlterStorageIntegrationRequest(id). + WithUnset(sdk.NewStorageIntegrationUnsetRequest().WithStorageAwsObjectAcl(sdk.Bool(true)))); err != nil { + return fmt.Errorf("error unsetting storage_aws_object_acl, err = %w", err) + } } } + + setReq.WithS3Params(s3SetParams) } - if d.HasChange("storage_provider") { + if d.HasChange("azure_tenant_id") { runSetStatement = true - err := setStorageProviderSettings(d, stmt) - if err != nil { - return err - } - } else { - if d.HasChange("storage_aws_role_arn") { - runSetStatement = true - stmt.SetString("STORAGE_AWS_ROLE_ARN", d.Get("storage_aws_role_arn").(string)) - } - if d.HasChange("azure_tenant_id") { - runSetStatement = true - stmt.SetString("AZURE_TENANT_ID", d.Get("azure_tenant_id").(string)) - } - if d.HasChange("storage_gcp_service_account") { - runSetStatement = true - stmt.SetString("STORAGE_GCP_SERVICE_ACCOUNT", d.Get("storage_gcp_service_account").(string)) - } + setReq.WithAzureParams(sdk.NewSetAzureStorageParamsRequest(d.Get("azure_tenant_id").(string))) } if runSetStatement { - if err := snowflake.Exec(db, stmt.Statement()); err != nil { - return fmt.Errorf("error updating storage integration: %w", err) + if err := client.StorageIntegrations.Alter(ctx, sdk.NewAlterStorageIntegrationRequest(id).WithSet(setReq)); err != nil { + return fmt.Errorf("error updating storage integration, err = %w", err) } } return ReadStorageIntegration(d, meta) } -// DeleteStorageIntegration implements schema.DeleteFunc. -func DeleteStorageIntegration(d *schema.ResourceData, meta interface{}) error { - return DeleteResource("", snowflake.NewStorageIntegrationBuilder)(d, meta) -} - -func setStorageProviderSettings(data *schema.ResourceData, stmt snowflake.SettingBuilder) error { - storageProvider := data.Get("storage_provider").(string) - stmt.SetString("STORAGE_PROVIDER", storageProvider) - - switch storageProvider { - case "S3", "S3GOV": - v, ok := data.GetOk("storage_aws_role_arn") - if !ok { - return fmt.Errorf("if you use the S3 storage provider you must specify a storage_aws_role_arn") - } - stmt.SetString(`STORAGE_AWS_ROLE_ARN`, v.(string)) - case "AZURE": - v, ok := data.GetOk("azure_tenant_id") - if !ok { - return fmt.Errorf("if you use the Azure storage provider you must specify an azure_tenant_id") - } - stmt.SetString(`AZURE_TENANT_ID`, v.(string)) - case "GCS": - // nothing to set here - default: - return fmt.Errorf("unexpected provider %v", storageProvider) +func DeleteStorageIntegration(d *schema.ResourceData, meta any) error { + db := meta.(*sql.DB) + ctx := context.Background() + client := sdk.NewClientFromDB(db) + id, ok := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + if !ok { + return fmt.Errorf("storage integration delete, error decoding id: %s as sdk.AccountObjectIdentifier, got: %T", d.Id(), id) + } + if err := client.StorageIntegrations.Drop(ctx, sdk.NewDropStorageIntegrationRequest(id)); err != nil { + return fmt.Errorf("error dropping storage integration (%s), err = %w", d.Id(), err) } + d.SetId("") return nil } - -func setStorageIntegrationProp(db *sql.DB, name string, prop string, val string) error { - stmt := fmt.Sprintf(`ALTER STORAGE INTEGRATION "%s" SET %s = '%s'`, name, prop, val) - return snowflake.Exec(db, stmt) -} - -func unsetStorageIntegrationProp(db *sql.DB, name string, prop string) error { - stmt := fmt.Sprintf(`ALTER STORAGE INTEGRATION "%s" UNSET %s`, name, prop) - return snowflake.Exec(db, stmt) -} diff --git a/pkg/resources/storage_integration_acceptance_test.go b/pkg/resources/storage_integration_acceptance_test.go index 7b9d18bda5..dba8205cbe 100644 --- a/pkg/resources/storage_integration_acceptance_test.go +++ b/pkg/resources/storage_integration_acceptance_test.go @@ -1,70 +1,349 @@ package resources_test import ( + "context" + "database/sql" "fmt" "regexp" "testing" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/go-uuid" + "github.com/hashicorp/terraform-plugin-testing/config" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-plugin-testing/tfversion" + "github.com/stretchr/testify/require" + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) -func TestAcc_StorageIntegration_validation(t *testing.T) { +func TestAcc_StorageIntegration_Empty_StorageAllowedLocations(t *testing.T) { + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: testAccCheckStorageIntegrationDestroy, + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StorageIntegration/Empty_StorageAllowedLocations"), + ExpectError: regexp.MustCompile("Not enough list items"), + }, + }, + }) +} + +func TestAcc_StorageIntegration_AWSObjectACL_Update(t *testing.T) { name := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) + configVariables := func(awsObjectACLSet bool) config.Variables { + variables := config.Variables{ + "name": config.StringVariable(name), + "allowed_locations": config.SetVariable( + config.StringVariable("s3://foo/"), + ), + } + if awsObjectACLSet { + variables["aws_object_acl"] = config.StringVariable("bucket-owner-full-control") + } + return variables + } - resource.ParallelTest(t, resource.TestCase{ - Providers: acc.TestAccProviders(), - PreCheck: func() { acc.TestAccPreCheck(t) }, - CheckDestroy: nil, + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: testAccCheckStorageIntegrationDestroy, Steps: []resource.TestStep{ { - Config: storageIntegrationConfig(name, []string{}, false), - ExpectError: regexp.MustCompile("Not enough list items"), + ConfigVariables: configVariables(false), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StorageIntegration/AWSObjectACL_Update/before"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "name", name), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "enabled", "true"), + resource.TestCheckNoResourceAttr("snowflake_storage_integration.test", "storage_aws_object_acl"), + ), + }, + { + ConfigVariables: configVariables(true), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StorageIntegration/AWSObjectACL_Update/after"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "name", name), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "enabled", "true"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_aws_object_acl", "bucket-owner-full-control"), + ), + }, + { + ConfigVariables: configVariables(false), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StorageIntegration/AWSObjectACL_Update/before"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "name", name), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "enabled", "true"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_aws_object_acl", ""), + ), }, }, }) } -func TestAcc_StorageIntegration_aws(t *testing.T) { +func TestAcc_StorageIntegration_AWS_Update(t *testing.T) { name := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) + awsRoleArn := "arn:aws:iam::000000000001:/role/test" + configVariables := func(set bool) config.Variables { + variables := config.Variables{ + "name": config.StringVariable(name), + "aws_role_arn": config.StringVariable(awsRoleArn), + "allowed_locations": config.SetVariable( + config.StringVariable("s3://foo/"), + ), + } + if set { + variables["aws_object_acl"] = config.StringVariable("bucket-owner-full-control") + variables["comment"] = config.StringVariable("some comment") + variables["allowed_locations"] = config.SetVariable( + config.StringVariable("s3://foo/"), + config.StringVariable("s3://bar/"), + ) + variables["blocked_locations"] = config.SetVariable( + config.StringVariable("s3://foo/"), + config.StringVariable("s3://bar/"), + ) + } + return variables + } - resource.ParallelTest(t, resource.TestCase{ - Providers: acc.TestAccProviders(), - PreCheck: func() { acc.TestAccPreCheck(t) }, - CheckDestroy: nil, + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: testAccCheckStorageIntegrationDestroy, Steps: []resource.TestStep{ { - Config: storageIntegrationConfig(name, []string{"s3://foo/"}, false), + ConfigVariables: configVariables(false), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StorageIntegration/AWS_Update/unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "name", name), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "enabled", "false"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_aws_role_arn", awsRoleArn), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.#", "1"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.0", "s3://foo/"), + resource.TestCheckNoResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations"), + resource.TestCheckNoResourceAttr("snowflake_storage_integration.test", "storage_aws_object_acl"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "comment", ""), + ), + }, + { + ConfigVariables: configVariables(true), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StorageIntegration/AWS_Update/set"), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_storage_integration.i", "name", name), - resource.TestCheckNoResourceAttr("snowflake_storage_integration.i", "storage_aws_object_acl"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "name", name), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "enabled", "true"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "comment", "some comment"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_aws_role_arn", awsRoleArn), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.#", "2"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.0", "s3://bar/"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.1", "s3://foo/"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations.#", "2"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations.0", "s3://bar/"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations.1", "s3://foo/"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_aws_object_acl", "bucket-owner-full-control"), ), }, { - Config: storageIntegrationConfig(name, []string{"s3://foo/"}, true), + ConfigVariables: configVariables(false), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StorageIntegration/AWS_Update/unset"), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_storage_integration.i", "name", name), - resource.TestCheckResourceAttr("snowflake_storage_integration.i", "storage_aws_object_acl", "bucket-owner-full-control"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "name", name), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "enabled", "false"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_aws_role_arn", awsRoleArn), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.#", "1"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.0", "s3://foo/"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations.#", "0"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_aws_object_acl", ""), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "comment", ""), ), }, }, }) } -func storageIntegrationConfig(name string, locations []string, awsObjectACL bool) string { - awsObjectACLConfig := "" - if awsObjectACL { - awsObjectACLConfig = "storage_aws_object_acl = \"bucket-owner-full-control\"" +func TestAcc_StorageIntegration_Azure_Update(t *testing.T) { + if !azureBucketUrlIsSet { + t.Skip("Skipping TestAcc_StorageIntegration_Azure_Update (Azure bucket url is not set)") } - return fmt.Sprintf(` -resource snowflake_storage_integration i { - name = "%s" - storage_allowed_locations = %q - storage_provider = "S3" - storage_aws_role_arn = "arn:aws:iam::000000000001:/role/test" - %s + name := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) + azureTenantId, err := uuid.GenerateUUID() + require.NoError(t, err) + configVariables := func(set bool) config.Variables { + variables := config.Variables{ + "name": config.StringVariable(name), + "azure_tenant_id": config.StringVariable(azureTenantId), + "allowed_locations": config.SetVariable( + config.StringVariable(azureBucketUrl + "/foo"), + ), + } + if set { + variables["comment"] = config.StringVariable("some comment") + variables["allowed_locations"] = config.SetVariable( + config.StringVariable(azureBucketUrl+"/foo"), + config.StringVariable(azureBucketUrl+"/bar"), + ) + variables["blocked_locations"] = config.SetVariable( + config.StringVariable(azureBucketUrl+"/foo"), + config.StringVariable(azureBucketUrl+"/bar"), + ) + } + return variables + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: testAccCheckStorageIntegrationDestroy, + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables(false), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StorageIntegration/Azure_Update/unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "name", name), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "enabled", "false"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "azure_tenant_id", azureTenantId), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.#", "1"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.0", azureBucketUrl+"/foo"), + resource.TestCheckNoResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "comment", ""), + ), + }, + { + ConfigVariables: configVariables(true), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StorageIntegration/Azure_Update/set"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "name", name), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "enabled", "true"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "comment", "some comment"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "azure_tenant_id", azureTenantId), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.#", "2"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.0", azureBucketUrl+"/bar"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.1", azureBucketUrl+"/foo"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations.#", "2"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations.0", azureBucketUrl+"/bar"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations.1", azureBucketUrl+"/foo"), + ), + }, + { + ConfigVariables: configVariables(false), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StorageIntegration/Azure_Update/unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "name", name), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "enabled", "false"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "azure_tenant_id", azureTenantId), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.#", "1"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.0", azureBucketUrl+"/foo"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations.#", "0"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "comment", ""), + ), + }, + }, + }) +} + +func TestAcc_StorageIntegration_GCP_Update(t *testing.T) { + name := acctest.RandStringFromCharSet(10, acctest.CharSetAlpha) + configVariables := func(set bool) config.Variables { + variables := config.Variables{ + "name": config.StringVariable(name), + "allowed_locations": config.SetVariable( + config.StringVariable("gcs://foo/"), + ), + } + if set { + variables["comment"] = config.StringVariable("some comment") + variables["allowed_locations"] = config.SetVariable( + config.StringVariable("gcs://foo/"), + config.StringVariable("gcs://bar/"), + ) + variables["blocked_locations"] = config.SetVariable( + config.StringVariable("gcs://foo/"), + config.StringVariable("gcs://bar/"), + ) + } + return variables + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: testAccCheckStorageIntegrationDestroy, + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables(false), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StorageIntegration/GCP_Update/unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "name", name), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "enabled", "false"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.#", "1"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.0", "gcs://foo/"), + resource.TestCheckNoResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "comment", ""), + ), + }, + { + ConfigVariables: configVariables(true), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StorageIntegration/GCP_Update/set"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "name", name), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "enabled", "true"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "comment", "some comment"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.#", "2"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.0", "gcs://bar/"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.1", "gcs://foo/"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations.#", "2"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations.0", "gcs://bar/"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations.1", "gcs://foo/"), + ), + }, + { + ConfigVariables: configVariables(false), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_StorageIntegration/GCP_Update/unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "name", name), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "enabled", "false"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.#", "1"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_allowed_locations.0", "gcs://foo/"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "storage_blocked_locations.#", "0"), + resource.TestCheckResourceAttr("snowflake_storage_integration.test", "comment", ""), + ), + }, + }, + }) } -`, name, locations, awsObjectACLConfig) + +func testAccCheckStorageIntegrationDestroy(s *terraform.State) error { + db := acc.TestAccProvider.Meta().(*sql.DB) + client := sdk.NewClientFromDB(db) + for _, rs := range s.RootModule().Resources { + if rs.Type != "snowflake_storage_integration" { + continue + } + ctx := context.Background() + id := sdk.NewAccountObjectIdentifier(rs.Primary.Attributes["name"]) + storageInt, err := client.StorageIntegrations.ShowByID(ctx, id) + if err == nil { + return fmt.Errorf("storage integration %v still exists", storageInt.Name) + } + } + return nil } diff --git a/pkg/resources/storage_integration_test.go b/pkg/resources/storage_integration_test.go deleted file mode 100644 index e7a0ee1428..0000000000 --- a/pkg/resources/storage_integration_test.go +++ /dev/null @@ -1,188 +0,0 @@ -package resources_test - -import ( - "database/sql" - "testing" - - sqlmock "github.com/DATA-DOG/go-sqlmock" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" - . "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/testhelpers" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/stretchr/testify/require" -) - -func TestStorageIntegration(t *testing.T) { - r := require.New(t) - err := resources.StorageIntegration().InternalValidate(provider.Provider().Schema, true) - r.NoError(err) -} - -func TestStorageIntegrationCreate(t *testing.T) { - r := require.New(t) - - in := map[string]interface{}{ - "name": "test_storage_integration", - "comment": "great comment", - "storage_allowed_locations": []interface{}{"s3://great-bucket/great-path/"}, - "storage_provider": "S3", - "storage_aws_role_arn": "we-should-probably-validate-this-string", - "storage_aws_object_acl": "bucket-owner-full-control", - } - - in2 := map[string]interface{}{ - "name": "test_storage_integration_with_s3gov", - "comment": "great comment", - "storage_allowed_locations": []interface{}{"s3://great-bucket/great-path/"}, - "storage_provider": "S3GOV", - "storage_aws_role_arn": "we-should-probably-validate-this-string", - "storage_aws_object_acl": "bucket-owner-full-control", - } - - d := schema.TestResourceDataRaw(t, resources.StorageIntegration().Schema, in) - d2 := schema.TestResourceDataRaw(t, resources.StorageIntegration().Schema, in2) - - r.NotNil(d) - r.NotNil(d2) - - WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - mock.ExpectExec( - `^CREATE STORAGE INTEGRATION "test_storage_integration" COMMENT='great comment' STORAGE_AWS_OBJECT_ACL='bucket-owner-full-control' STORAGE_AWS_ROLE_ARN='we-should-probably-validate-this-string' STORAGE_PROVIDER='S3' TYPE='EXTERNAL_STAGE' STORAGE_ALLOWED_LOCATIONS=\('s3://great-bucket/great-path/'\) ENABLED=true$`, - ).WillReturnResult(sqlmock.NewResult(1, 1)) - expectReadStorageIntegration(mock) - - err := resources.CreateStorageIntegration(d, db) - r.NoError(err) - }) - - WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - mock.ExpectExec( - `^CREATE STORAGE INTEGRATION "test_storage_integration_with_s3gov" COMMENT='great comment' STORAGE_AWS_OBJECT_ACL='bucket-owner-full-control' STORAGE_AWS_ROLE_ARN='we-should-probably-validate-this-string' STORAGE_PROVIDER='S3GOV' TYPE='EXTERNAL_STAGE' STORAGE_ALLOWED_LOCATIONS=\('s3://great-bucket/great-path/'\) ENABLED=true$`, - ).WillReturnResult(sqlmock.NewResult(1, 1)) - expectReadStorageIntegrationWithS3GOV(mock) - - err := resources.CreateStorageIntegration(d2, db) - r.NoError(err) - }) -} - -func TestStorageIntegrationRead(t *testing.T) { - r := require.New(t) - - d := storageIntegration(t, "test_storage_integration", map[string]interface{}{"name": "test_storage_integration"}) - - WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - expectReadStorageIntegration(mock) - - err := resources.ReadStorageIntegration(d, db) - r.NoError(err) - }) -} - -func TestStorageIntegrationReadEmpty(t *testing.T) { - r := require.New(t) - - d := storageIntegration(t, "test_storage_integration", map[string]interface{}{"name": "not_existing_storage_integration"}) - - WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - expectReadStorageIntegrationEmpty(mock) - - err := resources.ReadStorageIntegration(d, db) - r.Nil(err) - }) -} - -func TestStorageIntegrationUpdate(t *testing.T) { - r := require.New(t) - - in := map[string]interface{}{ - "name": "test_storage_integration_acl", - "storage_aws_object_acl": "bucket-owner-full-control", - } - - d := storageIntegration(t, "test_storage_integration_acl", in) - - showRows := sqlmock.NewRows([]string{ - "name", "type", "category", "enabled", "created_on", - }, - ).AddRow("test_storage_integration_acl", "EXTERNAL_STAGE", "STORAGE", true, "now") - - descRows := sqlmock.NewRows([]string{ - "property", "property_type", "property_value", "property_default", - }).AddRow("ENABLED", "Boolean", true, false). - AddRow("STORAGE_AWS_OBJECT_ACL", "String", "bucket-owner-full-control", nil) - - WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - mock.ExpectExec(`^ALTER STORAGE INTEGRATION "test_storage_integration_acl" SET STORAGE_AWS_OBJECT_ACL = 'bucket-owner-full-control'`).WillReturnResult(sqlmock.NewResult(1, 1)) - mock.ExpectExec(`^ALTER STORAGE INTEGRATION "test_storage_integration_acl" SET ENABLED=true`).WillReturnResult(sqlmock.NewResult(1, 1)) - mock.ExpectQuery(`^SHOW STORAGE INTEGRATIONS LIKE 'test_storage_integration_acl'$`).WillReturnRows(showRows) - mock.ExpectQuery(`DESCRIBE STORAGE INTEGRATION "test_storage_integration_acl"$`).WillReturnRows(descRows) - - err := resources.UpdateStorageIntegration(d, db) - r.NoError(err) - }) -} - -func TestStorageIntegrationDelete(t *testing.T) { - r := require.New(t) - - d := storageIntegration(t, "drop_it", map[string]interface{}{"name": "drop_it"}) - - WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { - mock.ExpectExec(`DROP STORAGE INTEGRATION "drop_it"`).WillReturnResult(sqlmock.NewResult(1, 1)) - err := resources.DeleteStorageIntegration(d, db) - r.NoError(err) - }) -} - -func expectReadStorageIntegration(mock sqlmock.Sqlmock) { - showRows := sqlmock.NewRows([]string{ - "name", "type", "category", "enabled", "created_on", - }, - ).AddRow("test_storage_integration", "EXTERNAL_STAGE", "STORAGE", true, "now") - mock.ExpectQuery(`^SHOW STORAGE INTEGRATIONS LIKE 'test_storage_integration'$`).WillReturnRows(showRows) - - descRows := sqlmock.NewRows([]string{ - "property", "property_type", "property_value", "property_default", - }). - AddRow("ENABLED", "Boolean", true, false). - AddRow("STORAGE_PROVIDER", "String", "S3", nil). - AddRow("STORAGE_ALLOWED_LOCATIONS", "List", "s3://bucket-a/path-a/,s3://bucket-b/", nil). - AddRow("STORAGE_BLOCKED_LOCATIONS", "List", "s3://bucket-c/path-c/,s3://bucket-d/", nil). - AddRow("STORAGE_AWS_IAM_USER_ARN", "String", "arn:aws:iam::000000000000:/user/test", nil). - AddRow("STORAGE_AWS_ROLE_ARN", "String", "arn:aws:iam::000000000001:/role/test", nil). - AddRow("STORAGE_AWS_OBJECT_ACL", "String", "bucket-owner-full-control", nil). - AddRow("STORAGE_AWS_EXTERNAL_ID", "String", "AGreatExternalID", nil) - - mock.ExpectQuery(`DESCRIBE STORAGE INTEGRATION "test_storage_integration"$`).WillReturnRows(descRows) -} - -func expectReadStorageIntegrationWithS3GOV(mock sqlmock.Sqlmock) { - showRows := sqlmock.NewRows([]string{ - "name", "type", "category", "enabled", "created_on", - }, - ).AddRow("test_storage_integration_with_s3gov", "EXTERNAL_STAGE", "STORAGE", true, "now") - mock.ExpectQuery(`^SHOW STORAGE INTEGRATIONS LIKE 'test_storage_integration_with_s3gov'$`).WillReturnRows(showRows) - - descRows := sqlmock.NewRows([]string{ - "property", "property_type", "property_value", "property_default", - }). - AddRow("ENABLED", "Boolean", true, false). - AddRow("STORAGE_PROVIDER", "String", "S3GOV", nil). - AddRow("STORAGE_ALLOWED_LOCATIONS", "List", "s3://bucket-a/path-a/,s3://bucket-b/", nil). - AddRow("STORAGE_BLOCKED_LOCATIONS", "List", "s3://bucket-c/path-c/,s3://bucket-d/", nil). - AddRow("STORAGE_AWS_IAM_USER_ARN", "String", "arn:aws:iam::000000000000:/user/test", nil). - AddRow("STORAGE_AWS_ROLE_ARN", "String", "arn:aws:iam::000000000001:/role/test", nil). - AddRow("STORAGE_AWS_OBJECT_ACL", "String", "bucket-owner-full-control", nil). - AddRow("STORAGE_AWS_EXTERNAL_ID", "String", "AGreatExternalID", nil) - - mock.ExpectQuery(`DESCRIBE STORAGE INTEGRATION "test_storage_integration_with_s3gov"$`).WillReturnRows(descRows) -} - -func expectReadStorageIntegrationEmpty(mock sqlmock.Sqlmock) { - noRows := sqlmock.NewRows([]string{ - "name", "type", "category", "enabled", "created_on", - }, - ) - mock.ExpectQuery(`^SHOW STORAGE INTEGRATIONS.*`).WillReturnRows(noRows) -} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/after/test.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/after/test.tf new file mode 100644 index 0000000000..743c1033f5 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/after/test.tf @@ -0,0 +1,7 @@ +resource "snowflake_storage_integration" "test" { + name = var.name + storage_allowed_locations = var.allowed_locations + storage_provider = "S3" + storage_aws_role_arn = "arn:aws:iam::000000000001:/role/test" + storage_aws_object_acl = var.aws_object_acl +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/after/variables.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/after/variables.tf new file mode 100644 index 0000000000..81fc0a48bb --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/after/variables.tf @@ -0,0 +1,12 @@ +variable "name" { + type = string +} + +variable "allowed_locations" { + type = set(string) +} + +variable "aws_object_acl" { + type = string +} + diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/before/test.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/before/test.tf new file mode 100644 index 0000000000..e6c7b2dfd0 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/before/test.tf @@ -0,0 +1,6 @@ +resource "snowflake_storage_integration" "test" { + name = var.name + storage_allowed_locations = var.allowed_locations + storage_provider = "S3" + storage_aws_role_arn = "arn:aws:iam::000000000001:/role/test" +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/before/variables.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/before/variables.tf new file mode 100644 index 0000000000..7e1040ffea --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/AWSObjectACL_Update/before/variables.tf @@ -0,0 +1,7 @@ +variable "name" { + type = string +} + +variable "allowed_locations" { + type = set(string) +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/set/test.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/set/test.tf new file mode 100644 index 0000000000..fac1c5e11f --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/set/test.tf @@ -0,0 +1,10 @@ +resource "snowflake_storage_integration" "test" { + name = var.name + enabled = true + storage_provider = "S3" + comment = var.comment + storage_allowed_locations = var.allowed_locations + storage_blocked_locations = var.blocked_locations + storage_aws_role_arn = var.aws_role_arn + storage_aws_object_acl = var.aws_object_acl +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/set/variables.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/set/variables.tf new file mode 100644 index 0000000000..ac06ebf51f --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/set/variables.tf @@ -0,0 +1,23 @@ +variable "name" { + type = string +} + +variable "comment" { + type = string +} + +variable "allowed_locations" { + type = set(string) +} + +variable "blocked_locations" { + type = set(string) +} + +variable "aws_object_acl" { + type = string +} + +variable "aws_role_arn" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/unset/test.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/unset/test.tf new file mode 100644 index 0000000000..7fcfa04a0f --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/unset/test.tf @@ -0,0 +1,7 @@ +resource "snowflake_storage_integration" "test" { + name = var.name + enabled = false + storage_provider = "S3" + storage_allowed_locations = var.allowed_locations + storage_aws_role_arn = var.aws_role_arn +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/unset/variables.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/unset/variables.tf new file mode 100644 index 0000000000..0de6beabed --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/AWS_Update/unset/variables.tf @@ -0,0 +1,11 @@ +variable "name" { + type = string +} + +variable "allowed_locations" { + type = set(string) +} + +variable "aws_role_arn" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/set/test.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/set/test.tf new file mode 100644 index 0000000000..7a8165572c --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/set/test.tf @@ -0,0 +1,9 @@ +resource "snowflake_storage_integration" "test" { + name = var.name + enabled = true + storage_provider = "AZURE" + comment = var.comment + storage_allowed_locations = var.allowed_locations + storage_blocked_locations = var.blocked_locations + azure_tenant_id = var.azure_tenant_id +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/set/variables.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/set/variables.tf new file mode 100644 index 0000000000..1ae2540d94 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/set/variables.tf @@ -0,0 +1,19 @@ +variable "name" { + type = string +} + +variable "comment" { + type = string +} + +variable "allowed_locations" { + type = set(string) +} + +variable "blocked_locations" { + type = set(string) +} + +variable "azure_tenant_id" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/unset/test.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/unset/test.tf new file mode 100644 index 0000000000..9221335eb5 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/unset/test.tf @@ -0,0 +1,7 @@ +resource "snowflake_storage_integration" "test" { + name = var.name + enabled = false + storage_provider = "AZURE" + storage_allowed_locations = var.allowed_locations + azure_tenant_id = var.azure_tenant_id +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/unset/variables.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/unset/variables.tf new file mode 100644 index 0000000000..28210a160e --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/Azure_Update/unset/variables.tf @@ -0,0 +1,11 @@ +variable "name" { + type = string +} + +variable "allowed_locations" { + type = set(string) +} + +variable "azure_tenant_id" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/Empty_StorageAllowedLocations/test.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/Empty_StorageAllowedLocations/test.tf new file mode 100644 index 0000000000..a0d5f5cfcd --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/Empty_StorageAllowedLocations/test.tf @@ -0,0 +1,5 @@ +resource "snowflake_storage_integration" "test" { + storage_allowed_locations = [] + storage_provider = "S3" + name = "empty_storage_allowed_locations_test" +} \ No newline at end of file diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/set/test.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/set/test.tf new file mode 100644 index 0000000000..cb12183c2c --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/set/test.tf @@ -0,0 +1,8 @@ +resource "snowflake_storage_integration" "test" { + name = var.name + enabled = true + storage_provider = "GCS" + comment = var.comment + storage_allowed_locations = var.allowed_locations + storage_blocked_locations = var.blocked_locations +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/set/variables.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/set/variables.tf new file mode 100644 index 0000000000..ad738d355d --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/set/variables.tf @@ -0,0 +1,15 @@ +variable "name" { + type = string +} + +variable "comment" { + type = string +} + +variable "allowed_locations" { + type = set(string) +} + +variable "blocked_locations" { + type = set(string) +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/unset/test.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/unset/test.tf new file mode 100644 index 0000000000..d2c8e18ba9 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/unset/test.tf @@ -0,0 +1,6 @@ +resource "snowflake_storage_integration" "test" { + name = var.name + enabled = false + storage_provider = "GCS" + storage_allowed_locations = var.allowed_locations +} diff --git a/pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/unset/variables.tf b/pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/unset/variables.tf new file mode 100644 index 0000000000..7e1040ffea --- /dev/null +++ b/pkg/resources/testdata/TestAcc_StorageIntegration/GCP_Update/unset/variables.tf @@ -0,0 +1,7 @@ +variable "name" { + type = string +} + +variable "allowed_locations" { + type = set(string) +} diff --git a/pkg/sdk/storage_integration_def.go b/pkg/sdk/storage_integration_def.go index e626a6e325..bd41ad54b1 100644 --- a/pkg/sdk/storage_integration_def.go +++ b/pkg/sdk/storage_integration_def.go @@ -61,14 +61,14 @@ var StorageIntegrationDef = g.NewInterface( "Set", g.NewQueryStruct("StorageIntegrationSet"). OptionalQueryStructField( - "SetS3Params", + "S3Params", g.NewQueryStruct("SetS3StorageParams"). TextAssignment("STORAGE_AWS_ROLE_ARN", g.ParameterOptions().SingleQuotes().Required()). OptionalTextAssignment("STORAGE_AWS_OBJECT_ACL", g.ParameterOptions().SingleQuotes()), g.KeywordOptions(), ). OptionalQueryStructField( - "SetAzureParams", + "AzureParams", g.NewQueryStruct("SetAzureStorageParams"). TextAssignment("AZURE_TENANT_ID", g.ParameterOptions().SingleQuotes().Required()), g.KeywordOptions(), @@ -82,6 +82,7 @@ var StorageIntegrationDef = g.NewInterface( OptionalQueryStructField( "Unset", g.NewQueryStruct("StorageIntegrationUnset"). + OptionalSQL("STORAGE_AWS_OBJECT_ACL"). OptionalSQL("ENABLED"). OptionalSQL("STORAGE_BLOCKED_LOCATIONS"). OptionalSQL("COMMENT"), @@ -109,7 +110,7 @@ var StorageIntegrationDef = g.NewInterface( Text("type"). Text("category"). Bool("enabled"). - Text("comment"). + OptionalText("comment"). Time("created_on"), g.PlainStruct("StorageIntegration"). Text("Name"). diff --git a/pkg/sdk/storage_integration_dto_builders_gen.go b/pkg/sdk/storage_integration_dto_builders_gen.go index 211c61a426..e9fedd8c01 100644 --- a/pkg/sdk/storage_integration_dto_builders_gen.go +++ b/pkg/sdk/storage_integration_dto_builders_gen.go @@ -113,18 +113,18 @@ func NewStorageIntegrationSetRequest() *StorageIntegrationSetRequest { return &StorageIntegrationSetRequest{} } -func (s *StorageIntegrationSetRequest) WithSetS3Params(SetS3Params *SetS3StorageParamsRequest) *StorageIntegrationSetRequest { - s.SetS3Params = SetS3Params +func (s *StorageIntegrationSetRequest) WithS3Params(S3Params *SetS3StorageParamsRequest) *StorageIntegrationSetRequest { + s.S3Params = S3Params return s } -func (s *StorageIntegrationSetRequest) WithSetAzureParams(SetAzureParams *SetAzureStorageParamsRequest) *StorageIntegrationSetRequest { - s.SetAzureParams = SetAzureParams +func (s *StorageIntegrationSetRequest) WithAzureParams(AzureParams *SetAzureStorageParamsRequest) *StorageIntegrationSetRequest { + s.AzureParams = AzureParams return s } func (s *StorageIntegrationSetRequest) WithEnabled(Enabled bool) *StorageIntegrationSetRequest { - s.Enabled = Enabled + s.Enabled = &Enabled return s } @@ -168,6 +168,11 @@ func NewStorageIntegrationUnsetRequest() *StorageIntegrationUnsetRequest { return &StorageIntegrationUnsetRequest{} } +func (s *StorageIntegrationUnsetRequest) WithStorageAwsObjectAcl(StorageAwsObjectAcl *bool) *StorageIntegrationUnsetRequest { + s.StorageAwsObjectAcl = StorageAwsObjectAcl + return s +} + func (s *StorageIntegrationUnsetRequest) WithEnabled(Enabled *bool) *StorageIntegrationUnsetRequest { s.Enabled = Enabled return s diff --git a/pkg/sdk/storage_integration_dto_gen.go b/pkg/sdk/storage_integration_dto_gen.go index 271ec9905a..7612817825 100644 --- a/pkg/sdk/storage_integration_dto_gen.go +++ b/pkg/sdk/storage_integration_dto_gen.go @@ -44,9 +44,9 @@ type AlterStorageIntegrationRequest struct { } type StorageIntegrationSetRequest struct { - SetS3Params *SetS3StorageParamsRequest - SetAzureParams *SetAzureStorageParamsRequest - Enabled bool + S3Params *SetS3StorageParamsRequest + AzureParams *SetAzureStorageParamsRequest + Enabled *bool StorageAllowedLocations []StorageLocation StorageBlockedLocations []StorageLocation Comment *string @@ -62,6 +62,7 @@ type SetAzureStorageParamsRequest struct { } type StorageIntegrationUnsetRequest struct { + StorageAwsObjectAcl *bool Enabled *bool StorageBlockedLocations *bool Comment *bool diff --git a/pkg/sdk/storage_integration_gen.go b/pkg/sdk/storage_integration_gen.go index 9aa15ffa01..285f35092d 100644 --- a/pkg/sdk/storage_integration_gen.go +++ b/pkg/sdk/storage_integration_gen.go @@ -2,6 +2,7 @@ package sdk import ( "context" + "database/sql" "time" ) @@ -63,9 +64,9 @@ type AlterStorageIntegrationOptions struct { } type StorageIntegrationSet struct { - SetS3Params *SetS3StorageParams `ddl:"keyword"` - SetAzureParams *SetAzureStorageParams `ddl:"keyword"` - Enabled bool `ddl:"parameter" sql:"ENABLED"` + S3Params *SetS3StorageParams `ddl:"keyword"` + AzureParams *SetAzureStorageParams `ddl:"keyword"` + Enabled *bool `ddl:"parameter" sql:"ENABLED"` StorageAllowedLocations []StorageLocation `ddl:"parameter,parentheses" sql:"STORAGE_ALLOWED_LOCATIONS"` StorageBlockedLocations []StorageLocation `ddl:"parameter,parentheses" sql:"STORAGE_BLOCKED_LOCATIONS"` Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` @@ -81,6 +82,7 @@ type SetAzureStorageParams struct { } type StorageIntegrationUnset struct { + StorageAwsObjectAcl *bool `ddl:"keyword" sql:"STORAGE_AWS_OBJECT_ACL"` Enabled *bool `ddl:"keyword" sql:"ENABLED"` StorageBlockedLocations *bool `ddl:"keyword" sql:"STORAGE_BLOCKED_LOCATIONS"` Comment *bool `ddl:"keyword" sql:"COMMENT"` @@ -102,12 +104,12 @@ type ShowStorageIntegrationOptions struct { } type showStorageIntegrationsDbRow struct { - Name string `db:"name"` - Type string `db:"type"` - Category string `db:"category"` - Enabled bool `db:"enabled"` - Comment string `db:"comment"` - CreatedOn time.Time `db:"created_on"` + Name string `db:"name"` + Type string `db:"type"` + Category string `db:"category"` + Enabled bool `db:"enabled"` + Comment sql.NullString `db:"comment"` + CreatedOn time.Time `db:"created_on"` } type StorageIntegration struct { diff --git a/pkg/sdk/storage_integration_gen_test.go b/pkg/sdk/storage_integration_gen_test.go index a321104d54..cc559aed7d 100644 --- a/pkg/sdk/storage_integration_gen_test.go +++ b/pkg/sdk/storage_integration_gen_test.go @@ -139,11 +139,11 @@ func TestStorageIntegrations_Alter(t *testing.T) { t.Run("set - s3", func(t *testing.T) { opts := defaultOpts() opts.Set = &StorageIntegrationSet{ - SetS3Params: &SetS3StorageParams{ + S3Params: &SetS3StorageParams{ StorageAwsRoleArn: "new-aws-role-arn", StorageAwsObjectAcl: String("new-aws-object-acl"), }, - Enabled: false, + Enabled: Bool(false), StorageAllowedLocations: []StorageLocation{{Path: "new-allowed-location"}}, StorageBlockedLocations: []StorageLocation{{Path: "new-blocked-location"}}, Comment: String("changed comment"), @@ -154,10 +154,10 @@ func TestStorageIntegrations_Alter(t *testing.T) { t.Run("set - azure", func(t *testing.T) { opts := defaultOpts() opts.Set = &StorageIntegrationSet{ - SetAzureParams: &SetAzureStorageParams{ + AzureParams: &SetAzureStorageParams{ AzureTenantId: "new-azure-tenant-id", }, - Enabled: false, + Enabled: Bool(false), StorageAllowedLocations: []StorageLocation{{Path: "new-allowed-location"}}, StorageBlockedLocations: []StorageLocation{{Path: "new-blocked-location"}}, Comment: String("changed comment"), @@ -184,11 +184,12 @@ func TestStorageIntegrations_Alter(t *testing.T) { t.Run("unset", func(t *testing.T) { opts := defaultOpts() opts.Unset = &StorageIntegrationUnset{ + StorageAwsObjectAcl: Bool(true), Enabled: Bool(true), StorageBlockedLocations: Bool(true), Comment: Bool(true), } - assertOptsValidAndSQLEquals(t, opts, "ALTER STORAGE INTEGRATION %s UNSET ENABLED, STORAGE_BLOCKED_LOCATIONS, COMMENT", id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, "ALTER STORAGE INTEGRATION %s UNSET STORAGE_AWS_OBJECT_ACL, ENABLED, STORAGE_BLOCKED_LOCATIONS, COMMENT", id.FullyQualifiedName()) }) t.Run("unset tags", func(t *testing.T) { diff --git a/pkg/sdk/storage_integration_impl_gen.go b/pkg/sdk/storage_integration_impl_gen.go index c003747e75..8678cbe487 100644 --- a/pkg/sdk/storage_integration_impl_gen.go +++ b/pkg/sdk/storage_integration_impl_gen.go @@ -101,20 +101,21 @@ func (r *AlterStorageIntegrationRequest) toOpts() *AlterStorageIntegrationOption StorageBlockedLocations: r.Set.StorageBlockedLocations, Comment: r.Set.Comment, } - if r.Set.SetS3Params != nil { - opts.Set.SetS3Params = &SetS3StorageParams{ - StorageAwsRoleArn: r.Set.SetS3Params.StorageAwsRoleArn, - StorageAwsObjectAcl: r.Set.SetS3Params.StorageAwsObjectAcl, + if r.Set.S3Params != nil { + opts.Set.S3Params = &SetS3StorageParams{ + StorageAwsRoleArn: r.Set.S3Params.StorageAwsRoleArn, + StorageAwsObjectAcl: r.Set.S3Params.StorageAwsObjectAcl, } } - if r.Set.SetAzureParams != nil { - opts.Set.SetAzureParams = &SetAzureStorageParams{ - AzureTenantId: r.Set.SetAzureParams.AzureTenantId, + if r.Set.AzureParams != nil { + opts.Set.AzureParams = &SetAzureStorageParams{ + AzureTenantId: r.Set.AzureParams.AzureTenantId, } } } if r.Unset != nil { opts.Unset = &StorageIntegrationUnset{ + StorageAwsObjectAcl: r.Unset.StorageAwsObjectAcl, Enabled: r.Unset.Enabled, StorageBlockedLocations: r.Unset.StorageBlockedLocations, Comment: r.Unset.Comment, @@ -139,14 +140,17 @@ func (r *ShowStorageIntegrationRequest) toOpts() *ShowStorageIntegrationOptions } func (r showStorageIntegrationsDbRow) convert() *StorageIntegration { - return &StorageIntegration{ + s := &StorageIntegration{ Name: r.Name, StorageType: r.Type, Category: r.Category, Enabled: r.Enabled, - Comment: r.Comment, CreatedOn: r.CreatedOn, } + if r.Comment.Valid { + s.Comment = r.Comment.String + } + return s } func (r *DescribeStorageIntegrationRequest) toOpts() *DescribeStorageIntegrationOptions { diff --git a/pkg/sdk/testint/storage_integration_gen_integration_test.go b/pkg/sdk/testint/storage_integration_gen_integration_test.go index 4240b993d0..bd527895a3 100644 --- a/pkg/sdk/testint/storage_integration_gen_integration_test.go +++ b/pkg/sdk/testint/storage_integration_gen_integration_test.go @@ -246,7 +246,7 @@ func TestInt_StorageIntegrations(t *testing.T) { req := sdk.NewAlterStorageIntegrationRequest(id). WithSet( sdk.NewStorageIntegrationSetRequest(). - WithSetS3Params(sdk.NewSetS3StorageParamsRequest(awsRoleARN)). + WithS3Params(sdk.NewSetS3StorageParamsRequest(awsRoleARN)). WithEnabled(true). WithStorageAllowedLocations(changedS3AllowedLocations). WithStorageBlockedLocations(changedS3BlockedLocations). @@ -269,7 +269,7 @@ func TestInt_StorageIntegrations(t *testing.T) { req := sdk.NewAlterStorageIntegrationRequest(id). WithSet( sdk.NewStorageIntegrationSetRequest(). - WithSetAzureParams(sdk.NewSetAzureStorageParamsRequest(azureTenantId)). + WithAzureParams(sdk.NewSetAzureStorageParamsRequest(azureTenantId)). WithEnabled(true). WithStorageAllowedLocations(changedAzureAllowedLocations). WithStorageBlockedLocations(changedAzureBlockedLocations). @@ -290,6 +290,7 @@ func TestInt_StorageIntegrations(t *testing.T) { req := sdk.NewAlterStorageIntegrationRequest(id). WithUnset( sdk.NewStorageIntegrationUnsetRequest(). + WithStorageAwsObjectAcl(sdk.Bool(true)). WithEnabled(sdk.Bool(true)). WithStorageBlockedLocations(sdk.Bool(true)). WithComment(sdk.Bool(true)), diff --git a/pkg/snowflake/storage_integration.go b/pkg/snowflake/storage_integration.go deleted file mode 100644 index 7481be8e6a..0000000000 --- a/pkg/snowflake/storage_integration.go +++ /dev/null @@ -1,61 +0,0 @@ -package snowflake - -import ( - "database/sql" - "errors" - "fmt" - "log" - - "github.com/jmoiron/sqlx" -) - -// StorageIntegration returns a pointer to a Builder that abstracts the DDL operations for a storage integration. -// -// Supported DDL operations are: -// - CREATE STORAGE INTEGRATION -// - ALTER STORAGE INTEGRATION -// - DROP INTEGRATION -// - SHOW INTEGRATIONS -// - DESCRIBE INTEGRATION -// -// [Snowflake Reference](https://docs.snowflake.net/manuals/sql-reference/ddl-user-security.html#storage-integrations) -func NewStorageIntegrationBuilder(name string) *Builder { - return &Builder{ - entityType: StorageIntegrationType, - name: name, - } -} - -type StorageIntegration struct { - Name sql.NullString `db:"name"` - Category sql.NullString `db:"category"` - IntegrationType sql.NullString `db:"type"` - CreatedOn sql.NullString `db:"created_on"` - Enabled sql.NullBool `db:"enabled"` - Comment sql.NullString `db:"comment"` -} - -func ScanStorageIntegration(row *sqlx.Row) (*StorageIntegration, error) { - r := &StorageIntegration{} - err := row.StructScan(r) - return r, err -} - -func ListStorageIntegrations(db *sql.DB) ([]StorageIntegration, error) { - stmt := "SHOW STORAGE INTEGRATIONS" - rows, err := Query(db, stmt) - if err != nil { - return nil, err - } - defer rows.Close() - - dbs := []StorageIntegration{} - if err := sqlx.StructScan(rows, &dbs); err != nil { - if errors.Is(err, sql.ErrNoRows) { - log.Println("[DEBUG] no resource monitors found") - return nil, nil - } - return nil, fmt.Errorf("unable to scan row for %s err = %w", stmt, err) - } - return dbs, nil -} diff --git a/pkg/snowflake/storage_integration_test.go b/pkg/snowflake/storage_integration_test.go deleted file mode 100644 index 12d4424d0a..0000000000 --- a/pkg/snowflake/storage_integration_test.go +++ /dev/null @@ -1,27 +0,0 @@ -package snowflake_test - -import ( - "testing" - - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/snowflake" - "github.com/stretchr/testify/require" -) - -func TestStorageIntegration(t *testing.T) { - r := require.New(t) - builder := snowflake.NewStorageIntegrationBuilder("aws") - r.NotNil(builder) - - q := builder.Show() - r.Equal("SHOW STORAGE INTEGRATIONS LIKE 'aws'", q) - - c := builder.Create() - - c.SetString(`type`, `EXTERNAL_STAGE`) - c.SetStringList(`storage_allowed_locations`, []string{"s3://my-bucket/my-path/", "s3://another-bucket/"}) - c.SetString(`storage_aws_object_acl`, `bucket-owner-full-control`) - c.SetBool(`enabled`, true) - q = c.Statement() - - r.Equal(`CREATE STORAGE INTEGRATION "aws" STORAGE_AWS_OBJECT_ACL='bucket-owner-full-control' TYPE='EXTERNAL_STAGE' STORAGE_ALLOWED_LOCATIONS=('s3://my-bucket/my-path/', 's3://another-bucket/') ENABLED=true`, q) -} From 3939dbe2f9189968c087a883ed97dd3b7350787f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Cie=C5=9Blak?= Date: Tue, 23 Jan 2024 11:13:08 +0100 Subject: [PATCH 3/5] feat: stages migration follow-up (#2372) --- pkg/sdk/file_format.go | 7 +- pkg/sdk/file_format_test.go | 15 + .../application_roles_gen_integration_test.go | 2 +- .../external_tables_integration_test.go | 4 +- .../testint/file_format_integration_test.go | 4 +- pkg/sdk/testint/helpers_test.go | 87 ++--- pkg/sdk/testint/pipes_integration_test.go | 10 +- .../testint/stages_gen_integration_test.go | 310 ++++++++++++++++-- .../testint/streams_gen_integration_test.go | 4 +- pkg/sdk/testint/tables_integration_test.go | 6 +- 10 files changed, 353 insertions(+), 96 deletions(-) diff --git a/pkg/sdk/file_format.go b/pkg/sdk/file_format.go index 451a7203f2..be12681138 100644 --- a/pkg/sdk/file_format.go +++ b/pkg/sdk/file_format.go @@ -375,9 +375,8 @@ type AlterFileFormatOptions struct { IfExists *bool `ddl:"keyword" sql:"IF EXISTS"` name SchemaObjectIdentifier `ddl:"identifier"` - Rename *AlterFileFormatRenameOptions - Set *FileFormatTypeOptions `ddl:"list,no_comma" sql:"SET"` - SetComment *string `ddl:"parameter,single_quotes" sql:"SET COMMENT"` + Rename *AlterFileFormatRenameOptions + Set *FileFormatTypeOptions `ddl:"list,no_comma" sql:"SET"` } func (opts *AlterFileFormatOptions) validate() error { @@ -398,6 +397,8 @@ type AlterFileFormatRenameOptions struct { } type FileFormatTypeOptions struct { + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` + // CSV type options CSVCompression *CSVCompression `ddl:"parameter" sql:"COMPRESSION"` CSVRecordDelimiter *string `ddl:"parameter,single_quotes" sql:"RECORD_DELIMITER"` diff --git a/pkg/sdk/file_format_test.go b/pkg/sdk/file_format_test.go index f364636c66..ff99155de8 100644 --- a/pkg/sdk/file_format_test.go +++ b/pkg/sdk/file_format_test.go @@ -200,6 +200,21 @@ func TestFileFormatsAlter(t *testing.T) { } assertOptsValidAndSQLEquals(t, opts, `ALTER FILE FORMAT IF EXISTS "db"."schema"."fileformat" SET COMPRESSION = BROTLI TRIM_SPACE = true REPLACE_INVALID_CHARACTERS = true NULL_IF = ('nil')`) }) + + t.Run("set comment", func(t *testing.T) { + opts := &AlterFileFormatOptions{ + IfExists: Bool(true), + name: NewSchemaObjectIdentifier("db", "schema", "fileformat"), + Set: &FileFormatTypeOptions{ + AvroCompression: &AvroCompressionBrotli, + AvroTrimSpace: Bool(true), + AvroReplaceInvalidCharacters: Bool(true), + AvroNullIf: &[]NullString{{"nil"}}, + Comment: String("some comment"), + }, + } + assertOptsValidAndSQLEquals(t, opts, `ALTER FILE FORMAT IF EXISTS "db"."schema"."fileformat" SET COMMENT = 'some comment' COMPRESSION = BROTLI TRIM_SPACE = true REPLACE_INVALID_CHARACTERS = true NULL_IF = ('nil')`) + }) } func TestFileFormatsDrop(t *testing.T) { diff --git a/pkg/sdk/testint/application_roles_gen_integration_test.go b/pkg/sdk/testint/application_roles_gen_integration_test.go index e87f0f209d..dd712a5e85 100644 --- a/pkg/sdk/testint/application_roles_gen_integration_test.go +++ b/pkg/sdk/testint/application_roles_gen_integration_test.go @@ -22,7 +22,7 @@ func TestInt_ApplicationRoles(t *testing.T) { client := testClient(t) stageName := "stage_name" - stage, cleanupStage := createStage(t, client, testDb(t), testSchema(t), stageName) + stage, cleanupStage := createStage(t, client, sdk.NewSchemaObjectIdentifier(TestDatabaseName, TestSchemaName, stageName)) t.Cleanup(cleanupStage) putOnStage(t, client, stage, "manifest.yml") diff --git a/pkg/sdk/testint/external_tables_integration_test.go b/pkg/sdk/testint/external_tables_integration_test.go index 45542dede1..e3fed9336d 100644 --- a/pkg/sdk/testint/external_tables_integration_test.go +++ b/pkg/sdk/testint/external_tables_integration_test.go @@ -15,8 +15,8 @@ func TestInt_ExternalTables(t *testing.T) { client := testClient(t) ctx := testContext(t) - stageID := sdk.NewAccountObjectIdentifier("EXTERNAL_TABLE_STAGE") - stageLocation := "@external_table_stage" + stageID := sdk.NewSchemaObjectIdentifier(TestDatabaseName, TestSchemaName, "EXTERNAL_TABLE_STAGE") + stageLocation := fmt.Sprintf("@%s", stageID.FullyQualifiedName()) _, _ = createStageWithURL(t, client, stageID, nycWeatherDataURL) tag, _ := createTag(t, client, testDb(t), testSchema(t)) diff --git a/pkg/sdk/testint/file_format_integration_test.go b/pkg/sdk/testint/file_format_integration_test.go index fe96d78818..63601af402 100644 --- a/pkg/sdk/testint/file_format_integration_test.go +++ b/pkg/sdk/testint/file_format_integration_test.go @@ -381,7 +381,7 @@ func TestInt_FileFormatsAlter(t *testing.T) { require.NoError(t, err) }) - t.Run("set", func(t *testing.T) { + t.Run("set + set comment", func(t *testing.T) { fileFormat, fileFormatCleanup := createFileFormatWithOptions(t, client, testSchema(t).ID(), &sdk.CreateFileFormatOptions{ Type: sdk.FileFormatTypeCSV, FileFormatTypeOptions: sdk.FileFormatTypeOptions{ @@ -393,6 +393,7 @@ func TestInt_FileFormatsAlter(t *testing.T) { err := client.FileFormats.Alter(ctx, fileFormat.ID(), &sdk.AlterFileFormatOptions{ Set: &sdk.FileFormatTypeOptions{ + Comment: sdk.String("some comment"), CSVCompression: &sdk.CSVCompressionBz2, CSVParseHeader: sdk.Bool(true), }, @@ -403,6 +404,7 @@ func TestInt_FileFormatsAlter(t *testing.T) { require.NoError(t, err) assert.Equal(t, sdk.CSVCompressionBz2, *result.Options.CSVCompression) assert.Equal(t, true, *result.Options.CSVParseHeader) + assert.Equal(t, "some comment", result.Comment) }) } diff --git a/pkg/sdk/testint/helpers_test.go b/pkg/sdk/testint/helpers_test.go index 80d4c940fb..130a626ff6 100644 --- a/pkg/sdk/testint/helpers_test.go +++ b/pkg/sdk/testint/helpers_test.go @@ -19,15 +19,16 @@ const ( ) var ( - awsBucketUrl, awsBucketUrlIsSet = os.LookupEnv("AWS_EXTERNAL_BUCKET_URL") - awsKeyId, awsKeyIdIsSet = os.LookupEnv("AWS_EXTERNAL_KEY_ID") - awsSecretKey, awsSecretKeyIsSet = os.LookupEnv("AWS_EXTERNAL_SECRET_KEY") - awsRoleARN, awsRoleARNIsSet = os.LookupEnv("AWS_EXTERNAL_ROLE_ARN") + awsBucketUrl, awsBucketUrlIsSet = os.LookupEnv("TEST_SF_TF_AWS_EXTERNAL_BUCKET_URL") + awsKeyId, awsKeyIdIsSet = os.LookupEnv("TEST_SF_TF_AWS_EXTERNAL_KEY_ID") + awsSecretKey, awsSecretKeyIsSet = os.LookupEnv("TEST_SF_TF_AWS_EXTERNAL_SECRET_KEY") + awsRoleARN, awsRoleARNIsSet = os.LookupEnv("TEST_SF_TF_AWS_EXTERNAL_ROLE_ARN") - gcsBucketUrl, gcsBucketUrlIsSet = os.LookupEnv("GCS_EXTERNAL_BUCKET_URL") + gcsBucketUrl, gcsBucketUrlIsSet = os.LookupEnv("TEST_SF_TF_GCS_EXTERNAL_BUCKET_URL") - azureBucketUrl, azureBucketUrlIsSet = os.LookupEnv("AZURE_EXTERNAL_BUCKET_URL") - azureTenantId, azureTenantIdIsSet = os.LookupEnv("AZURE_EXTERNAL_TENANT_ID") + azureBucketUrl, azureBucketUrlIsSet = os.LookupEnv("TEST_SF_TF_AZURE_EXTERNAL_BUCKET_URL") + azureTenantId, azureTenantIdIsSet = os.LookupEnv("TEST_SF_TF_AZURE_EXTERNAL_TENANT_ID") + azureSasToken, azureSasTokenIsSet = os.LookupEnv("TEST_SF_TF_AZURE_EXTERNAL_SAS_TOKEN") hasExternalEnvironmentVariablesSet = awsBucketUrlIsSet && awsKeyIdIsSet && @@ -35,7 +36,8 @@ var ( awsRoleARNIsSet && gcsBucketUrlIsSet && azureBucketUrlIsSet && - azureTenantIdIsSet + azureTenantIdIsSet && + azureSasTokenIsSet ) // there is no direct way to get the account identifier from Snowflake API, but you can get it if you know @@ -254,69 +256,44 @@ func createTag(t *testing.T, client *sdk.Client, database *sdk.Database, schema func createStageWithDirectory(t *testing.T, client *sdk.Client, database *sdk.Database, schema *sdk.Schema, name string) (*sdk.Stage, func()) { t.Helper() - ctx := context.Background() - _, err := client.ExecForTests(ctx, fmt.Sprintf(`CREATE STAGE "%s" DIRECTORY = (ENABLE = TRUE)`, name)) - require.NoError(t, err) - - return &sdk.Stage{ - DatabaseName: database.Name, - SchemaName: schema.Name, - Name: name, - }, func() { - _, err := client.ExecForTests(ctx, fmt.Sprintf(`DROP STAGE "%s"`, name)) - require.NoError(t, err) - } + id := sdk.NewSchemaObjectIdentifier(database.Name, schema.Name, name) + return createStageWithOptions(t, client, id, func(request *sdk.CreateInternalStageRequest) *sdk.CreateInternalStageRequest { + return request.WithDirectoryTableOptions(sdk.NewInternalDirectoryTableOptionsRequest().WithEnable(sdk.Bool(true))) + }) } -func createStageWithName(t *testing.T, client *sdk.Client, name string) (*string, func()) { +func createStage(t *testing.T, client *sdk.Client, id sdk.SchemaObjectIdentifier) (*sdk.Stage, func()) { t.Helper() - ctx := context.Background() - stageCleanup := func() { - _, err := client.ExecForTests(ctx, fmt.Sprintf("DROP STAGE %s", name)) - require.NoError(t, err) - } - _, err := client.ExecForTests(ctx, fmt.Sprintf("CREATE STAGE %s", name)) - if err != nil { - return nil, stageCleanup - } - require.NoError(t, err) - return &name, stageCleanup + return createStageWithOptions(t, client, id, func(request *sdk.CreateInternalStageRequest) *sdk.CreateInternalStageRequest { return request }) } -func createStage(t *testing.T, client *sdk.Client, database *sdk.Database, schema *sdk.Schema, name string) (*sdk.Stage, func()) { +func createStageWithURL(t *testing.T, client *sdk.Client, id sdk.SchemaObjectIdentifier, url string) (*sdk.Stage, func()) { t.Helper() - require.NotNil(t, database, "database has to be created") - require.NotNil(t, schema, "schema has to be created") - - id := sdk.NewSchemaObjectIdentifier(database.Name, schema.Name, name) ctx := context.Background() + err := client.Stages.CreateOnS3(ctx, sdk.NewCreateOnS3StageRequest(id). + WithExternalStageParams(sdk.NewExternalS3StageParamsRequest(url))) + require.NoError(t, err) - stageCleanup := func() { - _, err := client.ExecForTests(ctx, fmt.Sprintf("DROP STAGE %s", id.FullyQualifiedName())) - require.NoError(t, err) - } - - _, err := client.ExecForTests(ctx, fmt.Sprintf("CREATE STAGE %s", id.FullyQualifiedName())) - if err != nil { - return nil, stageCleanup - } + stage, err := client.Stages.ShowByID(ctx, id) require.NoError(t, err) - return &sdk.Stage{ - DatabaseName: database.Name, - SchemaName: schema.Name, - Name: name, - }, stageCleanup + return stage, func() { + err := client.Stages.Drop(ctx, sdk.NewDropStageRequest(id)) + require.NoError(t, err) + } } -func createStageWithURL(t *testing.T, client *sdk.Client, name sdk.AccountObjectIdentifier, url string) (*sdk.Stage, func()) { +func createStageWithOptions(t *testing.T, client *sdk.Client, id sdk.SchemaObjectIdentifier, reqMapping func(*sdk.CreateInternalStageRequest) *sdk.CreateInternalStageRequest) (*sdk.Stage, func()) { t.Helper() ctx := context.Background() - _, err := client.ExecForTests(ctx, fmt.Sprintf(`CREATE STAGE "%s" URL = '%s'`, name.Name(), url)) + err := client.Stages.CreateInternal(ctx, reqMapping(sdk.NewCreateInternalStageRequest(id))) + require.NoError(t, err) + + stage, err := client.Stages.ShowByID(ctx, id) require.NoError(t, err) - return nil, func() { - _, err := client.ExecForTests(ctx, fmt.Sprintf(`DROP STAGE "%s"`, name.Name())) + return stage, func() { + err := client.Stages.Drop(ctx, sdk.NewDropStageRequest(id)) require.NoError(t, err) } } diff --git a/pkg/sdk/testint/pipes_integration_test.go b/pkg/sdk/testint/pipes_integration_test.go index a440dbba0b..f3b6a289e1 100644 --- a/pkg/sdk/testint/pipes_integration_test.go +++ b/pkg/sdk/testint/pipes_integration_test.go @@ -28,7 +28,7 @@ func TestInt_IncorrectCreatePipeBehaviour(t *testing.T) { t.Cleanup(tableCleanup) stageName := random.AlphanumericN(20) - stage, stageCleanup := createStage(t, itc.client, testDb(t), schema, stageName) + stage, stageCleanup := createStage(t, itc.client, sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, stageName)) t.Cleanup(stageCleanup) t.Run("if we have special characters in db or schema name, create pipe returns error in copy <> from <> section", func(t *testing.T) { @@ -71,7 +71,7 @@ func TestInt_PipesShowAndDescribe(t *testing.T) { t.Cleanup(table2Cleanup) stageName := random.AlphanumericN(20) - stage, stageCleanup := createStage(t, itc.client, testDb(t), testSchema(t), stageName) + stage, stageCleanup := createStage(t, itc.client, sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, stageName)) t.Cleanup(stageCleanup) pipe1Name := random.AlphanumericN(20) @@ -152,7 +152,7 @@ func TestInt_PipeCreate(t *testing.T) { t.Cleanup(tableCleanup) stageName := random.AlphanumericN(20) - stage, stageCleanup := createStage(t, itc.client, testDb(t), testSchema(t), stageName) + stage, stageCleanup := createStage(t, itc.client, sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, stageName)) t.Cleanup(stageCleanup) copyStatement := createPipeCopyStatement(t, table, stage) @@ -224,7 +224,7 @@ func TestInt_PipeDrop(t *testing.T) { t.Cleanup(tableCleanup) stageName := random.AlphanumericN(20) - stage, stageCleanup := createStage(t, itc.client, testDb(t), testSchema(t), stageName) + stage, stageCleanup := createStage(t, itc.client, sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, stageName)) t.Cleanup(stageCleanup) t.Run("pipe exists", func(t *testing.T) { @@ -252,7 +252,7 @@ func TestInt_PipeAlter(t *testing.T) { t.Cleanup(tableCleanup) stageName := random.AlphanumericN(20) - stage, stageCleanup := createStage(t, itc.client, testDb(t), testSchema(t), stageName) + stage, stageCleanup := createStage(t, itc.client, sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, stageName)) t.Cleanup(stageCleanup) pipeCopyStatement := createPipeCopyStatement(t, table, stage) diff --git a/pkg/sdk/testint/stages_gen_integration_test.go b/pkg/sdk/testint/stages_gen_integration_test.go index 634c5a9302..c86c3963b5 100644 --- a/pkg/sdk/testint/stages_gen_integration_test.go +++ b/pkg/sdk/testint/stages_gen_integration_test.go @@ -1,6 +1,7 @@ package testint import ( + "fmt" "testing" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" @@ -13,37 +14,192 @@ func TestInt_Stages(t *testing.T) { client := testClient(t) ctx := testContext(t) - t.Run("CreateInternal", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + if !hasExternalEnvironmentVariablesSet { + t.Skip("Skipping TestInt_Stages (External env variables are not set)") + } - err := client.Stages.CreateInternal(ctx, sdk.NewCreateInternalStageRequest(id)) - require.NoError(t, err) + s3StorageIntegration, err := client.StorageIntegrations.ShowByID(ctx, sdk.NewAccountObjectIdentifier("S3_STORAGE_INTEGRATION")) + require.NoError(t, err) + gcpStorageIntegration, err := client.StorageIntegrations.ShowByID(ctx, sdk.NewAccountObjectIdentifier("GCP_STORAGE_INTEGRATION")) + require.NoError(t, err) + azureStorageIntegration, err := client.StorageIntegrations.ShowByID(ctx, sdk.NewAccountObjectIdentifier("AZURE_STORAGE_INTEGRATION")) + require.NoError(t, err) + + cleanupStage := func(t *testing.T, id sdk.SchemaObjectIdentifier) { + t.Helper() t.Cleanup(func() { err := client.Stages.Drop(ctx, sdk.NewDropStageRequest(id)) require.NoError(t, err) }) + } + + createBasicS3Stage := func(t *testing.T, stageId sdk.SchemaObjectIdentifier) { + t.Helper() + s3Req := sdk.NewExternalS3StageParamsRequest(awsBucketUrl). + WithCredentials(sdk.NewExternalStageS3CredentialsRequest(). + WithAwsKeyId(&awsKeyId). + WithAwsSecretKey(&awsSecretKey)) + err := client.Stages.CreateOnS3(ctx, sdk.NewCreateOnS3StageRequest(stageId). + WithFileFormat(sdk.NewStageFileFormatRequest().WithType(&sdk.FileFormatTypeJSON)). + WithExternalStageParams(s3Req)) + require.NoError(t, err) + cleanupStage(t, stageId) + } - stage, err := client.Stages.ShowByID(ctx, id) + createBasicGcsStage := func(t *testing.T, stageId sdk.SchemaObjectIdentifier) { + t.Helper() + err := client.Stages.CreateOnGCS(ctx, sdk.NewCreateOnGCSStageRequest(stageId). + WithFileFormat(sdk.NewStageFileFormatRequest().WithType(&sdk.FileFormatTypeJSON)). + WithExternalStageParams(sdk.NewExternalGCSStageParamsRequest(gcsBucketUrl). + WithStorageIntegration(sdk.Pointer(sdk.NewAccountObjectIdentifier(gcpStorageIntegration.Name))))) + require.NoError(t, err) + cleanupStage(t, stageId) + } + + createBasicAzureStage := func(t *testing.T, stageId sdk.SchemaObjectIdentifier) { + t.Helper() + err := client.Stages.CreateOnAzure(ctx, sdk.NewCreateOnAzureStageRequest(stageId). + WithFileFormat(sdk.NewStageFileFormatRequest().WithType(&sdk.FileFormatTypeJSON)). + WithExternalStageParams(sdk.NewExternalAzureStageParamsRequest(azureBucketUrl). + WithCredentials(sdk.NewExternalStageAzureCredentialsRequest(azureSasToken)))) require.NoError(t, err) + cleanupStage(t, stageId) + } + + assertStage := func(t *testing.T, stage *sdk.Stage, id sdk.SchemaObjectIdentifier, stageType string, comment string, cloud string, url string, storageIntegration string) { + t.Helper() assert.Equal(t, id.DatabaseName(), stage.DatabaseName) assert.Equal(t, id.SchemaName(), stage.SchemaName) assert.Equal(t, id.Name(), stage.Name) + assert.Equal(t, comment, stage.Comment) + if len(url) > 0 { + assert.Equal(t, url, stage.Url) + } + assert.Equal(t, stageType, stage.Type) + if len(cloud) > 0 { + assert.Equal(t, cloud, *stage.Cloud) + } + if len(storageIntegration) > 0 { + assert.Equal(t, storageIntegration, *stage.StorageIntegration) + } + } + + t.Run("CreateInternal", func(t *testing.T) { + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + + err := client.Stages.CreateInternal(ctx, sdk.NewCreateInternalStageRequest(id). + WithFileFormat(sdk.NewStageFileFormatRequest().WithType(&sdk.FileFormatTypeJSON)). + WithComment(sdk.String("some comment"))) + require.NoError(t, err) + cleanupStage(t, id) + + stage, err := client.Stages.ShowByID(ctx, id) + require.NoError(t, err) + assertStage(t, stage, id, "INTERNAL", "some comment", "", "", "") + }) + + t.Run("CreateInternal - temporary", func(t *testing.T) { + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + + err := client.Stages.CreateInternal(ctx, sdk.NewCreateInternalStageRequest(id). + WithTemporary(sdk.Bool(true)). + WithFileFormat(sdk.NewStageFileFormatRequest().WithType(&sdk.FileFormatTypeJSON)). + WithComment(sdk.String("some comment"))) + require.NoError(t, err) + cleanupStage(t, id) + + stage, err := client.Stages.ShowByID(ctx, id) + require.NoError(t, err) + assertStage(t, stage, id, "INTERNAL TEMPORARY", "some comment", "", "", "") + }) + + t.Run("CreateOnS3 - IAM User", func(t *testing.T) { + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + + s3Req := sdk.NewExternalS3StageParamsRequest(awsBucketUrl). + WithCredentials(sdk.NewExternalStageS3CredentialsRequest(). + WithAwsKeyId(&awsKeyId). + WithAwsSecretKey(&awsSecretKey)) + err := client.Stages.CreateOnS3(ctx, sdk.NewCreateOnS3StageRequest(id). + WithFileFormat(sdk.NewStageFileFormatRequest().WithType(&sdk.FileFormatTypeJSON)). + WithExternalStageParams(s3Req). + WithComment(sdk.String("some comment"))) + require.NoError(t, err) + cleanupStage(t, id) + + stage, err := client.Stages.ShowByID(ctx, id) + require.NoError(t, err) + assertStage(t, stage, id, "EXTERNAL", "some comment", "AWS", awsBucketUrl, "") }) - t.Run("CreateOnS3", func(t *testing.T) { - // TODO: fill me + t.Run("CreateOnS3 - temporary - Storage Integration", func(t *testing.T) { + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + + s3Req := sdk.NewExternalS3StageParamsRequest(awsBucketUrl). + WithStorageIntegration(sdk.Pointer(sdk.NewAccountObjectIdentifier(s3StorageIntegration.Name))) + err := client.Stages.CreateOnS3(ctx, sdk.NewCreateOnS3StageRequest(id). + WithTemporary(sdk.Bool(true)). + WithFileFormat(sdk.NewStageFileFormatRequest().WithType(&sdk.FileFormatTypeJSON)). + WithExternalStageParams(s3Req). + WithComment(sdk.String("some comment"))) + require.NoError(t, err) + cleanupStage(t, id) + + stage, err := client.Stages.ShowByID(ctx, id) + require.NoError(t, err) + assertStage(t, stage, id, "EXTERNAL TEMPORARY", "some comment", "AWS", awsBucketUrl, s3StorageIntegration.Name) }) t.Run("CreateOnGCS", func(t *testing.T) { - // TODO: fill me + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + + err := client.Stages.CreateOnGCS(ctx, sdk.NewCreateOnGCSStageRequest(id). + WithFileFormat(sdk.NewStageFileFormatRequest().WithType(&sdk.FileFormatTypeJSON)). + WithExternalStageParams(sdk.NewExternalGCSStageParamsRequest(gcsBucketUrl). + WithStorageIntegration(sdk.Pointer(sdk.NewAccountObjectIdentifier(gcpStorageIntegration.Name)))). + WithComment(sdk.String("some comment"))) + require.NoError(t, err) + cleanupStage(t, id) + + stage, err := client.Stages.ShowByID(ctx, id) + require.NoError(t, err) + assertStage(t, stage, id, "EXTERNAL", "some comment", "GCP", gcsBucketUrl, gcpStorageIntegration.Name) }) - t.Run("CreateOnAzure", func(t *testing.T) { - // TODO: fill me + t.Run("CreateOnAzure - Storage Integration", func(t *testing.T) { + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + + err := client.Stages.CreateOnAzure(ctx, sdk.NewCreateOnAzureStageRequest(id). + WithFileFormat(sdk.NewStageFileFormatRequest().WithType(&sdk.FileFormatTypeJSON)). + WithExternalStageParams(sdk.NewExternalAzureStageParamsRequest(azureBucketUrl). + WithStorageIntegration(sdk.Pointer(sdk.NewAccountObjectIdentifier(azureStorageIntegration.Name)))). + WithComment(sdk.String("some comment"))) + require.NoError(t, err) + cleanupStage(t, id) + + stage, err := client.Stages.ShowByID(ctx, id) + require.NoError(t, err) + assertStage(t, stage, id, "EXTERNAL", "some comment", "AZURE", azureBucketUrl, azureStorageIntegration.Name) + }) + + t.Run("CreateOnAzure - Shared Access Signature", func(t *testing.T) { + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + + err := client.Stages.CreateOnAzure(ctx, sdk.NewCreateOnAzureStageRequest(id). + WithFileFormat(sdk.NewStageFileFormatRequest().WithType(&sdk.FileFormatTypeJSON)). + WithExternalStageParams(sdk.NewExternalAzureStageParamsRequest(azureBucketUrl). + WithCredentials(sdk.NewExternalStageAzureCredentialsRequest(azureSasToken))). + WithComment(sdk.String("some comment"))) + require.NoError(t, err) + cleanupStage(t, id) + + stage, err := client.Stages.ShowByID(ctx, id) + require.NoError(t, err) + assertStage(t, stage, id, "EXTERNAL", "some comment", "AZURE", azureBucketUrl, "") }) t.Run("CreateOnS3Compatible", func(t *testing.T) { - // TODO: fill me + // TODO: (SNOW-1012064) create s3 compat service for tests }) t.Run("Alter - rename", func(t *testing.T) { @@ -174,19 +330,81 @@ func TestInt_Stages(t *testing.T) { }) t.Run("AlterExternalS3Stage", func(t *testing.T) { - // TODO: fill me + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + createBasicS3Stage(t, id) + + err := client.Stages.AlterExternalS3Stage(ctx, sdk.NewAlterExternalS3StageStageRequest(id). + WithExternalStageParams(sdk.NewExternalS3StageParamsRequest(awsBucketUrl). + WithStorageIntegration(sdk.Pointer(sdk.NewAccountObjectIdentifier(s3StorageIntegration.Name)))). + WithComment(sdk.String("Updated comment"))) + require.NoError(t, err) + + stage, err := client.Stages.ShowByID(ctx, id) + require.NoError(t, err) + assertStage(t, stage, id, "EXTERNAL", "Updated comment", "AWS", awsBucketUrl, s3StorageIntegration.Name) }) t.Run("AlterExternalGCSStage", func(t *testing.T) { - // TODO: fill me + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + createBasicGcsStage(t, id) + + err := client.Stages.AlterExternalGCSStage(ctx, sdk.NewAlterExternalGCSStageStageRequest(id). + WithExternalStageParams(sdk.NewExternalGCSStageParamsRequest(gcsBucketUrl). + WithStorageIntegration(sdk.Pointer(sdk.NewAccountObjectIdentifier(gcpStorageIntegration.Name)))). + WithComment(sdk.String("Updated comment"))) + require.NoError(t, err) + + stage, err := client.Stages.ShowByID(ctx, id) + require.NoError(t, err) + assertStage(t, stage, id, "EXTERNAL", "Updated comment", "GCP", gcsBucketUrl, gcpStorageIntegration.Name) }) t.Run("AlterExternalAzureStage", func(t *testing.T) { - // TODO: fill me + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + createBasicAzureStage(t, id) + + err := client.Stages.AlterExternalAzureStage(ctx, sdk.NewAlterExternalAzureStageStageRequest(id). + WithExternalStageParams(sdk.NewExternalAzureStageParamsRequest(azureBucketUrl). + WithStorageIntegration(sdk.Pointer(sdk.NewAccountObjectIdentifier(azureStorageIntegration.Name)))). + WithComment(sdk.String("Updated comment"))) + require.NoError(t, err) + + stage, err := client.Stages.ShowByID(ctx, id) + require.NoError(t, err) + assertStage(t, stage, id, "EXTERNAL", "Updated comment", "AZURE", azureBucketUrl, azureStorageIntegration.Name) }) t.Run("AlterDirectoryTable", func(t *testing.T) { - // TODO: fill me + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + createBasicS3Stage(t, id) + + stageProperties, err := client.Stages.Describe(ctx, id) + require.NoError(t, err) + assert.Contains(t, stageProperties, sdk.StageProperty{ + Parent: "DIRECTORY", + Name: "ENABLE", + Type: "Boolean", + Value: "false", + Default: "false", + }) + + err = client.Stages.AlterDirectoryTable(ctx, sdk.NewAlterDirectoryTableStageRequest(id). + WithSetDirectory(sdk.NewDirectoryTableSetRequest(true))) + require.NoError(t, err) + + err = client.Stages.AlterDirectoryTable(ctx, sdk.NewAlterDirectoryTableStageRequest(id). + WithRefresh(sdk.NewDirectoryTableRefreshRequest().WithSubpath(sdk.String("/")))) + require.NoError(t, err) + + stageProperties, err = client.Stages.Describe(ctx, id) + require.NoError(t, err) + assert.Contains(t, stageProperties, sdk.StageProperty{ + Parent: "DIRECTORY", + Name: "ENABLE", + Type: "Boolean", + Value: "true", + Default: "false", + }) }) t.Run("Drop", func(t *testing.T) { @@ -237,12 +455,65 @@ func TestInt_Stages(t *testing.T) { }) t.Run("Describe external s3", func(t *testing.T) { + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + createBasicS3Stage(t, id) + + stageProperties, err := client.Stages.Describe(ctx, id) + require.NoError(t, err) + require.NotEmpty(t, stageProperties) + assert.Contains(t, stageProperties, sdk.StageProperty{ + Parent: "STAGE_CREDENTIALS", + Name: "AWS_KEY_ID", + Type: "String", + Value: awsKeyId, + Default: "", + }) + assert.Contains(t, stageProperties, sdk.StageProperty{ + Parent: "STAGE_LOCATION", + Name: "URL", + Type: "String", + Value: fmt.Sprintf("[\"%s\"]", awsBucketUrl), + Default: "", + }) }) t.Run("Describe external gcs", func(t *testing.T) { + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + createBasicGcsStage(t, id) + + stageProperties, err := client.Stages.Describe(ctx, id) + require.NoError(t, err) + require.NotEmpty(t, stageProperties) + assert.Contains(t, stageProperties, sdk.StageProperty{ + Parent: "STAGE_LOCATION", + Name: "URL", + Type: "String", + Value: fmt.Sprintf("[\"%s\"]", gcsBucketUrl), + Default: "", + }) }) t.Run("Describe external azure", func(t *testing.T) { + id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, random.AlphanumericN(32)) + createBasicAzureStage(t, id) + + stageProperties, err := client.Stages.Describe(ctx, id) + require.NoError(t, err) + require.NotEmpty(t, stageProperties) + assert.Contains(t, stageProperties, sdk.StageProperty{ + Parent: "DIRECTORY", + Name: "ENABLE", + Type: "Boolean", + Value: "false", + Default: "false", + }) + assert.Contains(t, stageProperties, sdk.StageProperty{ + Parent: "STAGE_LOCATION", + Name: "URL", + Type: "String", + Value: fmt.Sprintf("[\"%s\"]", azureBucketUrl), + Default: "", + }) }) t.Run("Show internal", func(t *testing.T) { @@ -273,13 +544,4 @@ func TestInt_Stages(t *testing.T) { assert.Nil(t, stage.Endpoint) assert.True(t, stage.DirectoryEnabled) }) - - t.Run("Show external s3", func(t *testing.T) { - }) - - t.Run("Show external gcs", func(t *testing.T) { - }) - - t.Run("Show external azure", func(t *testing.T) { - }) } diff --git a/pkg/sdk/testint/streams_gen_integration_test.go b/pkg/sdk/testint/streams_gen_integration_test.go index 93a03bd49a..1ce5d59bad 100644 --- a/pkg/sdk/testint/streams_gen_integration_test.go +++ b/pkg/sdk/testint/streams_gen_integration_test.go @@ -55,8 +55,8 @@ func TestInt_Streams(t *testing.T) { }) t.Run("CreateOnExternalTable", func(t *testing.T) { - stageID := sdk.NewAccountObjectIdentifier("EXTERNAL_TABLE_STAGE") - stageLocation := "@external_table_stage" + stageID := sdk.NewSchemaObjectIdentifier(TestDatabaseName, TestSchemaName, "EXTERNAL_TABLE_STAGE") + stageLocation := fmt.Sprintf("@%s", stageID.FullyQualifiedName()) _, _ = createStageWithURL(t, client, stageID, nycWeatherDataURL) externalTableId := sdk.NewSchemaObjectIdentifier(db.Name, schema.Name, random.AlphanumericN(32)) diff --git a/pkg/sdk/testint/tables_integration_test.go b/pkg/sdk/testint/tables_integration_test.go index 5198b5eec1..2b4b850792 100644 --- a/pkg/sdk/testint/tables_integration_test.go +++ b/pkg/sdk/testint/tables_integration_test.go @@ -213,7 +213,7 @@ func TestInt_Table(t *testing.T) { t.Run("create table using template", func(t *testing.T) { fileFormat, fileFormatCleanup := createFileFormat(t, client, schema.ID()) t.Cleanup(fileFormatCleanup) - stage, stageCleanup := createStageWithName(t, client, "new_stage") + stage, stageCleanup := createStage(t, client, sdk.NewSchemaObjectIdentifier(database.Name, schema.Name, "new_stage")) t.Cleanup(stageCleanup) f, err := os.CreateTemp("/tmp", "data.csv") @@ -224,14 +224,14 @@ func TestInt_Table(t *testing.T) { require.NoError(t, err) err = w.Flush() require.NoError(t, err) - _, err = client.ExecForTests(ctx, fmt.Sprintf("PUT file://%s @%s", f.Name(), *stage)) + _, err = client.ExecForTests(ctx, fmt.Sprintf("PUT file://%s @%s", f.Name(), stage.ID().FullyQualifiedName())) require.NoError(t, err) err = os.Remove(f.Name()) require.NoError(t, err) name := random.String() id := sdk.NewSchemaObjectIdentifier(database.Name, schema.Name, name) - query := fmt.Sprintf(`SELECT ARRAY_AGG(OBJECT_CONSTRUCT(*)) WITHIN GROUP (ORDER BY order_id) FROM TABLE (INFER_SCHEMA(location => '@%s', FILE_FORMAT=>'%s', ignore_case => true))`, *stage, fileFormat.ID().FullyQualifiedName()) + query := fmt.Sprintf(`SELECT ARRAY_AGG(OBJECT_CONSTRUCT(*)) WITHIN GROUP (ORDER BY order_id) FROM TABLE (INFER_SCHEMA(location => '@%s', FILE_FORMAT=>'%s', ignore_case => true))`, stage.ID().FullyQualifiedName(), fileFormat.ID().FullyQualifiedName()) request := sdk.NewCreateTableUsingTemplateRequest(id, query) err = client.Tables.CreateUsingTemplate(ctx, request) From 8c03ffb0430445c903168da9706e1ce2630675da Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Tue, 23 Jan 2024 16:52:34 +0100 Subject: [PATCH 4/5] fix: Adjust tests after Snowflake behavior change (#2404) Changes in tests: - pipe test, that were intentionally documenting the incorrect behavior; they started to fail after Snowflake 8.3.1 release; they have been adjusted - stream test was failing because of multiple objects named the same; random name added --- pkg/sdk/testint/pipes_integration_test.go | 15 ++++++++------- pkg/sdk/testint/streams_gen_integration_test.go | 3 ++- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/pkg/sdk/testint/pipes_integration_test.go b/pkg/sdk/testint/pipes_integration_test.go index f3b6a289e1..da1faf02ab 100644 --- a/pkg/sdk/testint/pipes_integration_test.go +++ b/pkg/sdk/testint/pipes_integration_test.go @@ -17,7 +17,9 @@ func createPipeCopyStatement(t *testing.T, table *sdk.Table, stage *sdk.Stage) s return fmt.Sprintf("COPY INTO %s\nFROM @%s", table.ID().FullyQualifiedName(), stage.ID().FullyQualifiedName()) } -func TestInt_IncorrectCreatePipeBehaviour(t *testing.T) { +// TestInt_CreatePipeWithStrangeSchemaName documented previous bad behavior. It changed with Snowflake 8.3.1 release. +// We leave the test for future reference. +func TestInt_CreatePipeWithStrangeSchemaName(t *testing.T) { schemaIdentifier := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, "tcK1>AJ+") // creating a new schema on purpose @@ -31,7 +33,7 @@ func TestInt_IncorrectCreatePipeBehaviour(t *testing.T) { stage, stageCleanup := createStage(t, itc.client, sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, stageName)) t.Cleanup(stageCleanup) - t.Run("if we have special characters in db or schema name, create pipe returns error in copy <> from <> section", func(t *testing.T) { + t.Run("if we have special characters in db or schema name, create pipe succeeds", func(t *testing.T) { err := itc.client.Pipes.Create( itc.ctx, sdk.NewSchemaObjectIdentifier(testDb(t).Name, schema.Name, random.AlphanumericN(20)), @@ -39,12 +41,10 @@ func TestInt_IncorrectCreatePipeBehaviour(t *testing.T) { &sdk.CreatePipeOptions{}, ) - require.ErrorContains(t, err, "(42000): SQL compilation error:\nsyntax error line") - require.ErrorContains(t, err, "at position") - require.ErrorContains(t, err, "unexpected '>'") + require.NoError(t, err) }) - t.Run("the same works with using non fully qualified name for table", func(t *testing.T) { + t.Run("the same does not work when using non fully qualified name for table", func(t *testing.T) { createCopyStatementWithoutQualifiersForStage := func(t *testing.T, table *sdk.Table, stage *sdk.Stage) string { t.Helper() require.NotNil(t, table, "table has to be created") @@ -59,7 +59,8 @@ func TestInt_IncorrectCreatePipeBehaviour(t *testing.T) { &sdk.CreatePipeOptions{}, ) - require.NoError(t, err) + require.Error(t, err) + require.ErrorContains(t, err, "object does not exist or not authorized") }) } diff --git a/pkg/sdk/testint/streams_gen_integration_test.go b/pkg/sdk/testint/streams_gen_integration_test.go index 1ce5d59bad..ac1ead524f 100644 --- a/pkg/sdk/testint/streams_gen_integration_test.go +++ b/pkg/sdk/testint/streams_gen_integration_test.go @@ -55,7 +55,8 @@ func TestInt_Streams(t *testing.T) { }) t.Run("CreateOnExternalTable", func(t *testing.T) { - stageID := sdk.NewSchemaObjectIdentifier(TestDatabaseName, TestSchemaName, "EXTERNAL_TABLE_STAGE") + stageName := random.AlphaN(10) + stageID := sdk.NewSchemaObjectIdentifier(TestDatabaseName, TestSchemaName, stageName) stageLocation := fmt.Sprintf("@%s", stageID.FullyQualifiedName()) _, _ = createStageWithURL(t, client, stageID, nycWeatherDataURL) From d2e5ffd5405f10ff30c5ad9f7cd58bd54a5cc028 Mon Sep 17 00:00:00 2001 From: Scott Winkler Date: Tue, 23 Jan 2024 11:36:12 -0800 Subject: [PATCH 5/5] feat: add sequences to sdk (#2351) adds sequences to sdk https://docs.snowflake.com/en/sql-reference/sql/create-sequence --- pkg/sdk/client.go | 2 + pkg/sdk/common_types.go | 11 + pkg/sdk/poc/main.go | 1 + pkg/sdk/sequences_def.go | 124 +++++++++++ pkg/sdk/sequences_dto_builders_gen.go | 144 +++++++++++++ pkg/sdk/sequences_dto_gen.go | 55 +++++ pkg/sdk/sequences_gen.go | 123 +++++++++++ pkg/sdk/sequences_gen_test.go | 194 ++++++++++++++++++ pkg/sdk/sequences_impl_gen.go | 148 +++++++++++++ pkg/sdk/sequences_validations_gen.go | 75 +++++++ pkg/sdk/testint/sequences_integration_test.go | 171 +++++++++++++++ pkg/sdk/testint/setup_test.go | 1 + 12 files changed, 1049 insertions(+) create mode 100644 pkg/sdk/sequences_def.go create mode 100644 pkg/sdk/sequences_dto_builders_gen.go create mode 100644 pkg/sdk/sequences_dto_gen.go create mode 100644 pkg/sdk/sequences_gen.go create mode 100644 pkg/sdk/sequences_gen_test.go create mode 100644 pkg/sdk/sequences_impl_gen.go create mode 100644 pkg/sdk/sequences_validations_gen.go create mode 100644 pkg/sdk/testint/sequences_integration_test.go diff --git a/pkg/sdk/client.go b/pkg/sdk/client.go index 2accdbc55c..aede13ba58 100644 --- a/pkg/sdk/client.go +++ b/pkg/sdk/client.go @@ -63,6 +63,7 @@ type Client struct { Roles Roles RowAccessPolicies RowAccessPolicies Schemas Schemas + Sequences Sequences SessionPolicies SessionPolicies Sessions Sessions Shares Shares @@ -209,6 +210,7 @@ func (c *Client) initialize() { c.Roles = &roles{client: c} c.RowAccessPolicies = &rowAccessPolicies{client: c} c.Schemas = &schemas{client: c} + c.Sequences = &sequences{client: c} c.SessionPolicies = &sessionPolicies{client: c} c.Sessions = &sessions{client: c} c.Shares = &shares{client: c} diff --git a/pkg/sdk/common_types.go b/pkg/sdk/common_types.go index 517ace65ed..03fb6bb4b1 100644 --- a/pkg/sdk/common_types.go +++ b/pkg/sdk/common_types.go @@ -199,6 +199,17 @@ type Secret struct { Name string `ddl:"parameter,no_quotes"` } +type ValuesBehavior string + +var ( + ValuesBehaviorOrder ValuesBehavior = "ORDER" + ValuesBehaviorNoOrder ValuesBehavior = "NOORDER" +) + +func ValuesBehaviorPointer(v ValuesBehavior) *ValuesBehavior { + return &v +} + type Distribution string var ( diff --git a/pkg/sdk/poc/main.go b/pkg/sdk/poc/main.go index ab056a5c40..492bbfe201 100644 --- a/pkg/sdk/poc/main.go +++ b/pkg/sdk/poc/main.go @@ -31,6 +31,7 @@ var definitionMapping = map[string]*generator.Interface{ "storage_integration_def.go": sdk.StorageIntegrationDef, "managed_accounts_def.go": sdk.ManagedAccountsDef, "row_access_policies_def.go": sdk.RowAccessPoliciesDef, + "sequences_def.go": sdk.SequencesDef, } func main() { diff --git a/pkg/sdk/sequences_def.go b/pkg/sdk/sequences_def.go new file mode 100644 index 0000000000..c73ea4d506 --- /dev/null +++ b/pkg/sdk/sequences_def.go @@ -0,0 +1,124 @@ +package sdk + +import g "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/poc/generator" + +//go:generate go run ./poc/main.go + +var sequenceSet = g.NewQueryStruct("SequenceSet"). + PredefinedQueryStructField("ValuesBehavior", "*ValuesBehavior", g.KeywordOptions()). + OptionalTextAssignment("COMMENT", g.ParameterOptions().SingleQuotes()) + +var sequenceConstraint = g.NewQueryStruct("SequenceConstraint"). + OptionalSQL("CASCADE"). + OptionalSQL("RESTRICT"). + WithValidation(g.ExactlyOneValueSet, "Cascade", "Restrict") + +var SequencesDef = g.NewInterface( + "Sequences", + "Sequence", + g.KindOfT[SchemaObjectIdentifier](), +).CreateOperation( + "https://docs.snowflake.com/en/sql-reference/sql/create-sequence", + g.NewQueryStruct("CreateSequence"). + Create(). + OrReplace(). + SQL("SEQUENCE"). + IfNotExists(). + Name(). + OptionalNumberAssignment("START", g.ParameterOptions().NoQuotes()). + OptionalNumberAssignment("INCREMENT", g.ParameterOptions().NoQuotes()). + PredefinedQueryStructField("ValuesBehavior", "*ValuesBehavior", g.KeywordOptions()). + OptionalTextAssignment("COMMENT", g.ParameterOptions().SingleQuotes()). + WithValidation(g.ValidIdentifier, "name"). + WithValidation(g.ConflictingFields, "OrReplace", "IfNotExists"), +).AlterOperation( + "https://docs.snowflake.com/en/sql-reference/sql/alter-sequence", + g.NewQueryStruct("AlterSequence"). + Alter(). + SQL("SEQUENCE"). + IfExists(). + Name(). + Identifier("RenameTo", g.KindOfTPointer[SchemaObjectIdentifier](), g.IdentifierOptions().SQL("RENAME TO")). + OptionalNumberAssignment("SET INCREMENT", g.ParameterOptions().NoQuotes()). + OptionalQueryStructField( + "Set", + sequenceSet, + g.KeywordOptions().SQL("SET"), + ). + OptionalSQL("UNSET COMMENT"). + WithValidation(g.ValidIdentifier, "name"). + WithValidation(g.ValidIdentifierIfSet, "RenameTo"). + WithValidation(g.ExactlyOneValueSet, "RenameTo", "SetIncrement", "Set", "UnsetComment"), +).ShowOperation( + "https://docs.snowflake.com/en/sql-reference/sql/show-sequences", + g.DbStruct("sequenceRow"). + Field("created_on", "string"). + Field("name", "string"). + Field("schema_name", "string"). + Field("database_name", "string"). + Field("next_value", "int"). + Field("interval", "int"). + Field("owner", "string"). + Field("owner_role_type", "string"). + Field("comment", "string"). + Field("ordered", "string"), + g.PlainStruct("Sequence"). + Field("CreatedOn", "string"). + Field("Name", "string"). + Field("SchemaName", "string"). + Field("DatabaseName", "string"). + Field("NextValue", "int"). + Field("Interval", "int"). + Field("Owner", "string"). + Field("OwnerRoleType", "string"). + Field("Comment", "string"). + Field("Ordered", "bool"), + g.NewQueryStruct("ShowSequences"). + Show(). + SQL("SEQUENCES"). + OptionalLike(). + OptionalIn(), +).ShowByIdOperation().DescribeOperation( + g.DescriptionMappingKindSingleValue, + "https://docs.snowflake.com/en/sql-reference/sql/desc-sequence", + g.DbStruct("sequenceDetailRow"). + Field("created_on", "string"). + Field("name", "string"). + Field("schema_name", "string"). + Field("database_name", "string"). + Field("next_value", "int"). + Field("interval", "int"). + Field("owner", "string"). + Field("owner_role_type", "string"). + Field("comment", "string"). + Field("ordered", "string"), + g.PlainStruct("SequenceDetail"). + Field("CreatedOn", "string"). + Field("Name", "string"). + Field("SchemaName", "string"). + Field("DatabaseName", "string"). + Field("NextValue", "int"). + Field("Interval", "int"). + Field("Owner", "string"). + Field("OwnerRoleType", "string"). + Field("Comment", "string"). + Field("Ordered", "bool"), + g.NewQueryStruct("DescribeSequence"). + Describe(). + SQL("SEQUENCE"). + Name(). + WithValidation(g.ValidIdentifier, "name"), +).DropOperation( + "https://docs.snowflake.com/en/sql-reference/sql/drop-sequence", + g.NewQueryStruct("DropSequence"). + Drop(). + SQL("SEQUENCE"). + IfExists(). + Name(). + OptionalQueryStructField( + "Constraint", + sequenceConstraint, + g.KeywordOptions(), + ). + WithValidation(g.ValidIdentifier, "name"), +) diff --git a/pkg/sdk/sequences_dto_builders_gen.go b/pkg/sdk/sequences_dto_builders_gen.go new file mode 100644 index 0000000000..95b0355f95 --- /dev/null +++ b/pkg/sdk/sequences_dto_builders_gen.go @@ -0,0 +1,144 @@ +// Code generated by dto builder generator; DO NOT EDIT. + +package sdk + +import () + +func NewCreateSequenceRequest( + name SchemaObjectIdentifier, +) *CreateSequenceRequest { + s := CreateSequenceRequest{} + s.name = name + return &s +} + +func (s *CreateSequenceRequest) WithOrReplace(OrReplace *bool) *CreateSequenceRequest { + s.OrReplace = OrReplace + return s +} + +func (s *CreateSequenceRequest) WithIfNotExists(IfNotExists *bool) *CreateSequenceRequest { + s.IfNotExists = IfNotExists + return s +} + +func (s *CreateSequenceRequest) WithStart(Start *int) *CreateSequenceRequest { + s.Start = Start + return s +} + +func (s *CreateSequenceRequest) WithIncrement(Increment *int) *CreateSequenceRequest { + s.Increment = Increment + return s +} + +func (s *CreateSequenceRequest) WithValuesBehavior(ValuesBehavior *ValuesBehavior) *CreateSequenceRequest { + s.ValuesBehavior = ValuesBehavior + return s +} + +func (s *CreateSequenceRequest) WithComment(Comment *string) *CreateSequenceRequest { + s.Comment = Comment + return s +} + +func NewAlterSequenceRequest( + name SchemaObjectIdentifier, +) *AlterSequenceRequest { + s := AlterSequenceRequest{} + s.name = name + return &s +} + +func (s *AlterSequenceRequest) WithIfExists(IfExists *bool) *AlterSequenceRequest { + s.IfExists = IfExists + return s +} + +func (s *AlterSequenceRequest) WithRenameTo(RenameTo *SchemaObjectIdentifier) *AlterSequenceRequest { + s.RenameTo = RenameTo + return s +} + +func (s *AlterSequenceRequest) WithSetIncrement(SetIncrement *int) *AlterSequenceRequest { + s.SetIncrement = SetIncrement + return s +} + +func (s *AlterSequenceRequest) WithSet(Set *SequenceSetRequest) *AlterSequenceRequest { + s.Set = Set + return s +} + +func (s *AlterSequenceRequest) WithUnsetComment(UnsetComment *bool) *AlterSequenceRequest { + s.UnsetComment = UnsetComment + return s +} + +func NewSequenceSetRequest() *SequenceSetRequest { + return &SequenceSetRequest{} +} + +func (s *SequenceSetRequest) WithValuesBehavior(ValuesBehavior *ValuesBehavior) *SequenceSetRequest { + s.ValuesBehavior = ValuesBehavior + return s +} + +func (s *SequenceSetRequest) WithComment(Comment *string) *SequenceSetRequest { + s.Comment = Comment + return s +} + +func NewShowSequenceRequest() *ShowSequenceRequest { + return &ShowSequenceRequest{} +} + +func (s *ShowSequenceRequest) WithLike(Like *Like) *ShowSequenceRequest { + s.Like = Like + return s +} + +func (s *ShowSequenceRequest) WithIn(In *In) *ShowSequenceRequest { + s.In = In + return s +} + +func NewDescribeSequenceRequest( + name SchemaObjectIdentifier, +) *DescribeSequenceRequest { + s := DescribeSequenceRequest{} + s.name = name + return &s +} + +func NewDropSequenceRequest( + name SchemaObjectIdentifier, +) *DropSequenceRequest { + s := DropSequenceRequest{} + s.name = name + return &s +} + +func (s *DropSequenceRequest) WithIfExists(IfExists *bool) *DropSequenceRequest { + s.IfExists = IfExists + return s +} + +func (s *DropSequenceRequest) WithConstraint(Constraint *SequenceConstraintRequest) *DropSequenceRequest { + s.Constraint = Constraint + return s +} + +func NewSequenceConstraintRequest() *SequenceConstraintRequest { + return &SequenceConstraintRequest{} +} + +func (s *SequenceConstraintRequest) WithCascade(Cascade *bool) *SequenceConstraintRequest { + s.Cascade = Cascade + return s +} + +func (s *SequenceConstraintRequest) WithRestrict(Restrict *bool) *SequenceConstraintRequest { + s.Restrict = Restrict + return s +} diff --git a/pkg/sdk/sequences_dto_gen.go b/pkg/sdk/sequences_dto_gen.go new file mode 100644 index 0000000000..35509fae5d --- /dev/null +++ b/pkg/sdk/sequences_dto_gen.go @@ -0,0 +1,55 @@ +package sdk + +//go:generate go run ./dto-builder-generator/main.go + +var ( + _ optionsProvider[CreateSequenceOptions] = new(CreateSequenceRequest) + _ optionsProvider[AlterSequenceOptions] = new(AlterSequenceRequest) + _ optionsProvider[ShowSequenceOptions] = new(ShowSequenceRequest) + _ optionsProvider[DescribeSequenceOptions] = new(DescribeSequenceRequest) + _ optionsProvider[DropSequenceOptions] = new(DropSequenceRequest) +) + +type CreateSequenceRequest struct { + OrReplace *bool + IfNotExists *bool + name SchemaObjectIdentifier // required + Start *int + Increment *int + ValuesBehavior *ValuesBehavior + Comment *string +} + +type AlterSequenceRequest struct { + IfExists *bool + name SchemaObjectIdentifier // required + RenameTo *SchemaObjectIdentifier + SetIncrement *int + Set *SequenceSetRequest + UnsetComment *bool +} + +type SequenceSetRequest struct { + ValuesBehavior *ValuesBehavior + Comment *string +} + +type ShowSequenceRequest struct { + Like *Like + In *In +} + +type DescribeSequenceRequest struct { + name SchemaObjectIdentifier // required +} + +type DropSequenceRequest struct { + IfExists *bool + name SchemaObjectIdentifier // required + Constraint *SequenceConstraintRequest +} + +type SequenceConstraintRequest struct { + Cascade *bool + Restrict *bool +} diff --git a/pkg/sdk/sequences_gen.go b/pkg/sdk/sequences_gen.go new file mode 100644 index 0000000000..bb476dc300 --- /dev/null +++ b/pkg/sdk/sequences_gen.go @@ -0,0 +1,123 @@ +package sdk + +import "context" + +type Sequences interface { + Create(ctx context.Context, request *CreateSequenceRequest) error + Alter(ctx context.Context, request *AlterSequenceRequest) error + Show(ctx context.Context, request *ShowSequenceRequest) ([]Sequence, error) + ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*Sequence, error) + Describe(ctx context.Context, id SchemaObjectIdentifier) (*SequenceDetail, error) + Drop(ctx context.Context, request *DropSequenceRequest) error +} + +// CreateSequenceOptions is based on https://docs.snowflake.com/en/sql-reference/sql/create-sequence. +type CreateSequenceOptions struct { + create bool `ddl:"static" sql:"CREATE"` + OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` + sequence bool `ddl:"static" sql:"SEQUENCE"` + IfNotExists *bool `ddl:"keyword" sql:"IF NOT EXISTS"` + name SchemaObjectIdentifier `ddl:"identifier"` + Start *int `ddl:"parameter,no_quotes" sql:"START"` + Increment *int `ddl:"parameter,no_quotes" sql:"INCREMENT"` + ValuesBehavior *ValuesBehavior `ddl:"keyword"` + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` +} + +// AlterSequenceOptions is based on https://docs.snowflake.com/en/sql-reference/sql/alter-sequence. +type AlterSequenceOptions struct { + alter bool `ddl:"static" sql:"ALTER"` + sequence bool `ddl:"static" sql:"SEQUENCE"` + IfExists *bool `ddl:"keyword" sql:"IF EXISTS"` + name SchemaObjectIdentifier `ddl:"identifier"` + RenameTo *SchemaObjectIdentifier `ddl:"identifier" sql:"RENAME TO"` + SetIncrement *int `ddl:"parameter,no_quotes" sql:"SET INCREMENT"` + Set *SequenceSet `ddl:"keyword" sql:"SET"` + UnsetComment *bool `ddl:"keyword" sql:"UNSET COMMENT"` +} + +type SequenceSet struct { + ValuesBehavior *ValuesBehavior `ddl:"keyword"` + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` +} + +// ShowSequenceOptions is based on https://docs.snowflake.com/en/sql-reference/sql/show-sequences. +type ShowSequenceOptions struct { + show bool `ddl:"static" sql:"SHOW"` + sequences bool `ddl:"static" sql:"SEQUENCES"` + Like *Like `ddl:"keyword" sql:"LIKE"` + In *In `ddl:"keyword" sql:"IN"` +} + +type sequenceRow struct { + CreatedOn string `db:"created_on"` + Name string `db:"name"` + SchemaName string `db:"schema_name"` + DatabaseName string `db:"database_name"` + NextValue int `db:"next_value"` + Interval int `db:"interval"` + Owner string `db:"owner"` + OwnerRoleType string `db:"owner_role_type"` + Comment string `db:"comment"` + Ordered string `db:"ordered"` +} + +type Sequence struct { + CreatedOn string + Name string + SchemaName string + DatabaseName string + NextValue int + Interval int + Owner string + OwnerRoleType string + Comment string + Ordered bool +} + +// DescribeSequenceOptions is based on https://docs.snowflake.com/en/sql-reference/sql/desc-sequence. +type DescribeSequenceOptions struct { + describe bool `ddl:"static" sql:"DESCRIBE"` + sequence bool `ddl:"static" sql:"SEQUENCE"` + name SchemaObjectIdentifier `ddl:"identifier"` +} + +type sequenceDetailRow struct { + CreatedOn string `db:"created_on"` + Name string `db:"name"` + SchemaName string `db:"schema_name"` + DatabaseName string `db:"database_name"` + NextValue int `db:"next_value"` + Interval int `db:"interval"` + Owner string `db:"owner"` + OwnerRoleType string `db:"owner_role_type"` + Comment string `db:"comment"` + Ordered string `db:"ordered"` +} + +type SequenceDetail struct { + CreatedOn string + Name string + SchemaName string + DatabaseName string + NextValue int + Interval int + Owner string + OwnerRoleType string + Comment string + Ordered bool +} + +// DropSequenceOptions is based on https://docs.snowflake.com/en/sql-reference/sql/drop-sequence. +type DropSequenceOptions struct { + drop bool `ddl:"static" sql:"DROP"` + sequence bool `ddl:"static" sql:"SEQUENCE"` + IfExists *bool `ddl:"keyword" sql:"IF EXISTS"` + name SchemaObjectIdentifier `ddl:"identifier"` + Constraint *SequenceConstraint `ddl:"keyword"` +} + +type SequenceConstraint struct { + Cascade *bool `ddl:"keyword" sql:"CASCADE"` + Restrict *bool `ddl:"keyword" sql:"RESTRICT"` +} diff --git a/pkg/sdk/sequences_gen_test.go b/pkg/sdk/sequences_gen_test.go new file mode 100644 index 0000000000..d58b11245a --- /dev/null +++ b/pkg/sdk/sequences_gen_test.go @@ -0,0 +1,194 @@ +package sdk + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/internal/random" +) + +func TestSequences_Create(t *testing.T) { + id := RandomSchemaObjectIdentifier() + + defaultOpts := func() *CreateSequenceOptions { + return &CreateSequenceOptions{ + name: id, + } + } + + t.Run("validation: nil options", func(t *testing.T) { + var opts *CreateSequenceOptions = nil + assertOptsInvalidJoinedErrors(t, opts, ErrNilOptions) + }) + + t.Run("validation: incorrect identifier", func(t *testing.T) { + opts := defaultOpts() + opts.name = NewSchemaObjectIdentifier("", "", "") + assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) + }) + + t.Run("validation: conflicting fields", func(t *testing.T) { + opts := defaultOpts() + opts.OrReplace = Bool(true) + opts.IfNotExists = Bool(true) + assertOptsInvalidJoinedErrors(t, opts, errOneOf("CreateSequenceOptions", "OrReplace", "IfNotExists")) + }) + + t.Run("all options", func(t *testing.T) { + opts := defaultOpts() + opts.OrReplace = Bool(true) + opts.Start = Int(1) + opts.Increment = Int(1) + opts.ValuesBehavior = ValuesBehaviorPointer(ValuesBehaviorOrder) + opts.Comment = String("comment") + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SEQUENCE %s START = 1 INCREMENT = 1 ORDER COMMENT = 'comment'`, id.FullyQualifiedName()) + }) +} + +func TestSequences_Alter(t *testing.T) { + id := RandomSchemaObjectIdentifier() + + defaultOpts := func() *AlterSequenceOptions { + return &AlterSequenceOptions{ + name: id, + IfExists: Bool(true), + } + } + + t.Run("validation: nil options", func(t *testing.T) { + opts := (*AlterSequenceOptions)(nil) + assertOptsInvalidJoinedErrors(t, opts, ErrNilOptions) + }) + + t.Run("validation: incorrect identifier", func(t *testing.T) { + opts := defaultOpts() + opts.name = NewSchemaObjectIdentifier("", "", "") + assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) + }) + + t.Run("validation: exactly one field should be present", func(t *testing.T) { + opts := defaultOpts() + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterSequenceOptions", "RenameTo", "SetIncrement", "Set", "UnsetComment")) + }) + + t.Run("validation: exactly one field should be present", func(t *testing.T) { + opts := defaultOpts() + opts.SetIncrement = Int(1) + opts.UnsetComment = Bool(true) + assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("AlterSequenceOptions", "RenameTo", "SetIncrement", "Set", "UnsetComment")) + }) + + t.Run("alter: rename to", func(t *testing.T) { + opts := defaultOpts() + target := NewSchemaObjectIdentifier(id.DatabaseName(), id.SchemaName(), random.StringN(12)) + opts.RenameTo = &target + assertOptsValidAndSQLEquals(t, opts, `ALTER SEQUENCE IF EXISTS %s RENAME TO %s`, id.FullyQualifiedName(), opts.RenameTo.FullyQualifiedName()) + }) + + t.Run("alter: set options", func(t *testing.T) { + opts := defaultOpts() + opts.Set = &SequenceSet{ + Comment: String("comment"), + ValuesBehavior: ValuesBehaviorPointer(ValuesBehaviorOrder), + } + assertOptsValidAndSQLEquals(t, opts, `ALTER SEQUENCE IF EXISTS %s SET ORDER COMMENT = 'comment'`, id.FullyQualifiedName()) + }) + + t.Run("alter: unset comment", func(t *testing.T) { + opts := defaultOpts() + opts.UnsetComment = Bool(true) + assertOptsValidAndSQLEquals(t, opts, `ALTER SEQUENCE IF EXISTS %s UNSET COMMENT`, id.FullyQualifiedName()) + }) + + t.Run("alter: set increment", func(t *testing.T) { + opts := defaultOpts() + opts.SetIncrement = Int(1) + assertOptsValidAndSQLEquals(t, opts, `ALTER SEQUENCE IF EXISTS %s SET INCREMENT = 1`, id.FullyQualifiedName()) + }) +} + +func TestSequences_Show(t *testing.T) { + defaultOpts := func() *ShowSequenceOptions { + return &ShowSequenceOptions{} + } + + t.Run("validation: nil options", func(t *testing.T) { + opts := (*ShowSequenceOptions)(nil) + assertOptsInvalidJoinedErrors(t, opts, ErrNilOptions) + }) + + t.Run("show with empty options", func(t *testing.T) { + opts := defaultOpts() + assertOptsValidAndSQLEquals(t, opts, `SHOW SEQUENCES`) + }) + + t.Run("show with like", func(t *testing.T) { + opts := defaultOpts() + opts.Like = &Like{ + Pattern: String("pattern"), + } + assertOptsValidAndSQLEquals(t, opts, `SHOW SEQUENCES LIKE 'pattern'`) + }) + + t.Run("show with in", func(t *testing.T) { + opts := defaultOpts() + opts.In = &In{ + Account: Bool(true), + } + assertOptsValidAndSQLEquals(t, opts, `SHOW SEQUENCES IN ACCOUNT`) + }) +} + +func TestSequences_Describe(t *testing.T) { + id := RandomSchemaObjectIdentifier() + + defaultOpts := func() *DescribeSequenceOptions { + return &DescribeSequenceOptions{ + name: id, + } + } + + t.Run("validation: nil options", func(t *testing.T) { + opts := (*DescribeSequenceOptions)(nil) + assertOptsInvalidJoinedErrors(t, opts, ErrNilOptions) + }) + + t.Run("validation: incorrect identifier", func(t *testing.T) { + opts := defaultOpts() + opts.name = NewSchemaObjectIdentifier("", "", "") + assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) + }) + + t.Run("all options", func(t *testing.T) { + opts := defaultOpts() + assertOptsValidAndSQLEquals(t, opts, `DESCRIBE SEQUENCE %s`, id.FullyQualifiedName()) + }) +} + +func TestSequences_Drop(t *testing.T) { + id := RandomSchemaObjectIdentifier() + + defaultOpts := func() *DropSequenceOptions { + return &DropSequenceOptions{ + name: id, + } + } + t.Run("validation: nil options", func(t *testing.T) { + var opts *DropSequenceOptions = nil + assertOptsInvalidJoinedErrors(t, opts, ErrNilOptions) + }) + + t.Run("validation: incorrect identifier", func(t *testing.T) { + opts := defaultOpts() + opts.name = NewSchemaObjectIdentifier("", "", "") + assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) + }) + + t.Run("all options", func(t *testing.T) { + opts := defaultOpts() + opts.IfExists = Bool(true) + opts.Constraint = &SequenceConstraint{ + Cascade: Bool(true), + } + assertOptsValidAndSQLEquals(t, opts, `DROP SEQUENCE IF EXISTS %s CASCADE`, id.FullyQualifiedName()) + }) +} diff --git a/pkg/sdk/sequences_impl_gen.go b/pkg/sdk/sequences_impl_gen.go new file mode 100644 index 0000000000..4cac023003 --- /dev/null +++ b/pkg/sdk/sequences_impl_gen.go @@ -0,0 +1,148 @@ +package sdk + +import ( + "context" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/internal/collections" +) + +var _ Sequences = (*sequences)(nil) + +type sequences struct { + client *Client +} + +func (v *sequences) Create(ctx context.Context, request *CreateSequenceRequest) error { + opts := request.toOpts() + return validateAndExec(v.client, ctx, opts) +} + +func (v *sequences) Alter(ctx context.Context, request *AlterSequenceRequest) error { + opts := request.toOpts() + return validateAndExec(v.client, ctx, opts) +} + +func (v *sequences) Show(ctx context.Context, request *ShowSequenceRequest) ([]Sequence, error) { + opts := request.toOpts() + dbRows, err := validateAndQuery[sequenceRow](v.client, ctx, opts) + if err != nil { + return nil, err + } + resultList := convertRows[sequenceRow, Sequence](dbRows) + return resultList, nil +} + +func (v *sequences) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*Sequence, error) { + request := NewShowSequenceRequest().WithIn(&In{Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())}).WithLike(&Like{String(id.Name())}) + sequences, err := v.Show(ctx, request) + if err != nil { + return nil, err + } + return collections.FindOne(sequences, func(r Sequence) bool { return r.Name == id.Name() }) +} + +func (v *sequences) Describe(ctx context.Context, id SchemaObjectIdentifier) (*SequenceDetail, error) { + opts := &DescribeSequenceOptions{ + name: id, + } + result, err := validateAndQueryOne[sequenceDetailRow](v.client, ctx, opts) + if err != nil { + return nil, err + } + return result.convert(), nil +} + +func (v *sequences) Drop(ctx context.Context, request *DropSequenceRequest) error { + opts := request.toOpts() + return validateAndExec(v.client, ctx, opts) +} + +func (r *CreateSequenceRequest) toOpts() *CreateSequenceOptions { + opts := &CreateSequenceOptions{ + OrReplace: r.OrReplace, + IfNotExists: r.IfNotExists, + name: r.name, + Start: r.Start, + Increment: r.Increment, + ValuesBehavior: r.ValuesBehavior, + Comment: r.Comment, + } + return opts +} + +func (r *AlterSequenceRequest) toOpts() *AlterSequenceOptions { + opts := &AlterSequenceOptions{ + IfExists: r.IfExists, + name: r.name, + RenameTo: r.RenameTo, + SetIncrement: r.SetIncrement, + + UnsetComment: r.UnsetComment, + } + if r.Set != nil { + opts.Set = &SequenceSet{ + ValuesBehavior: r.Set.ValuesBehavior, + Comment: r.Set.Comment, + } + } + return opts +} + +func (r *ShowSequenceRequest) toOpts() *ShowSequenceOptions { + opts := &ShowSequenceOptions{ + Like: r.Like, + In: r.In, + } + return opts +} + +func (r sequenceRow) convert() *Sequence { + return &Sequence{ + CreatedOn: r.CreatedOn, + Name: r.Name, + SchemaName: r.SchemaName, + DatabaseName: r.DatabaseName, + NextValue: r.NextValue, + Interval: r.Interval, + Owner: r.Owner, + OwnerRoleType: r.OwnerRoleType, + Comment: r.Comment, + Ordered: r.Ordered == "Y", + } +} + +func (r *DescribeSequenceRequest) toOpts() *DescribeSequenceOptions { + opts := &DescribeSequenceOptions{ + name: r.name, + } + return opts +} + +func (r sequenceDetailRow) convert() *SequenceDetail { + return &SequenceDetail{ + CreatedOn: r.CreatedOn, + Name: r.Name, + SchemaName: r.SchemaName, + DatabaseName: r.DatabaseName, + NextValue: r.NextValue, + Interval: r.Interval, + Owner: r.Owner, + OwnerRoleType: r.OwnerRoleType, + Comment: r.Comment, + Ordered: r.Ordered == "Y", + } +} + +func (r *DropSequenceRequest) toOpts() *DropSequenceOptions { + opts := &DropSequenceOptions{ + IfExists: r.IfExists, + name: r.name, + } + if r.Constraint != nil { + opts.Constraint = &SequenceConstraint{ + Cascade: r.Constraint.Cascade, + Restrict: r.Constraint.Restrict, + } + } + return opts +} diff --git a/pkg/sdk/sequences_validations_gen.go b/pkg/sdk/sequences_validations_gen.go new file mode 100644 index 0000000000..c0bb99e9d1 --- /dev/null +++ b/pkg/sdk/sequences_validations_gen.go @@ -0,0 +1,75 @@ +package sdk + +var ( + _ validatable = new(CreateSequenceOptions) + _ validatable = new(AlterSequenceOptions) + _ validatable = new(ShowSequenceOptions) + _ validatable = new(DescribeSequenceOptions) + _ validatable = new(DropSequenceOptions) +) + +func (opts *CreateSequenceOptions) validate() error { + if opts == nil { + return ErrNilOptions + } + var errs []error + if !ValidObjectIdentifier(opts.name) { + errs = append(errs, ErrInvalidObjectIdentifier) + } + if everyValueSet(opts.OrReplace, opts.IfNotExists) { + errs = append(errs, errOneOf("CreateSequenceOptions", "OrReplace", "IfNotExists")) + } + return JoinErrors(errs...) +} + +func (opts *AlterSequenceOptions) validate() error { + if opts == nil { + return ErrNilOptions + } + var errs []error + if !ValidObjectIdentifier(opts.name) { + errs = append(errs, ErrInvalidObjectIdentifier) + } + if opts.RenameTo != nil && !ValidObjectIdentifier(opts.RenameTo) { + errs = append(errs, ErrInvalidObjectIdentifier) + } + if !exactlyOneValueSet(opts.RenameTo, opts.SetIncrement, opts.Set, opts.UnsetComment) { + errs = append(errs, errExactlyOneOf("AlterSequenceOptions", "RenameTo", "SetIncrement", "Set", "UnsetComment")) + } + return JoinErrors(errs...) +} + +func (opts *ShowSequenceOptions) validate() error { + if opts == nil { + return ErrNilOptions + } + var errs []error + return JoinErrors(errs...) +} + +func (opts *DescribeSequenceOptions) validate() error { + if opts == nil { + return ErrNilOptions + } + var errs []error + if !ValidObjectIdentifier(opts.name) { + errs = append(errs, ErrInvalidObjectIdentifier) + } + return JoinErrors(errs...) +} + +func (opts *DropSequenceOptions) validate() error { + if opts == nil { + return ErrNilOptions + } + var errs []error + if !ValidObjectIdentifier(opts.name) { + errs = append(errs, ErrInvalidObjectIdentifier) + } + if valueSet(opts.Constraint) { + if !exactlyOneValueSet(opts.Constraint.Cascade, opts.Constraint.Restrict) { + errs = append(errs, errExactlyOneOf("DropSequenceOptions.Constraint", "Cascade", "Restrict")) + } + } + return JoinErrors(errs...) +} diff --git a/pkg/sdk/testint/sequences_integration_test.go b/pkg/sdk/testint/sequences_integration_test.go new file mode 100644 index 0000000000..be1af06fa4 --- /dev/null +++ b/pkg/sdk/testint/sequences_integration_test.go @@ -0,0 +1,171 @@ +package testint + +import ( + "errors" + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk/internal/random" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +/* + * todo: `ALTER SEQUENCE [ IF EXISTS ] UNSET COMMENT` not works, and error: Syntax error: unexpected 'COMMENT'. (line 39) + */ + +func TestInt_Sequences(t *testing.T) { + client := testClient(t) + ctx := testContext(t) + + databaseTest, schemaTest := testDb(t), testSchema(t) + + cleanupSequenceHandle := func(t *testing.T, id sdk.SchemaObjectIdentifier) func() { + t.Helper() + return func() { + err := client.Sequences.Drop(ctx, sdk.NewDropSequenceRequest(id)) + if errors.Is(err, sdk.ErrObjectNotExistOrAuthorized) { + return + } + require.NoError(t, err) + } + } + + createSequenceHandle := func(t *testing.T) *sdk.Sequence { + t.Helper() + + id := sdk.NewSchemaObjectIdentifier(databaseTest.Name, schemaTest.Name, random.StringN(4)) + sr := sdk.NewCreateSequenceRequest(id).WithStart(sdk.Int(1)).WithIncrement(sdk.Int(1)) + err := client.Sequences.Create(ctx, sr) + require.NoError(t, err) + t.Cleanup(cleanupSequenceHandle(t, id)) + + s, err := client.Sequences.ShowByID(ctx, id) + require.NoError(t, err) + return s + } + + assertSequence := func(t *testing.T, id sdk.SchemaObjectIdentifier, interval int, ordered bool, comment string) { + t.Helper() + + e, err := client.Sequences.ShowByID(ctx, id) + require.NoError(t, err) + require.NotEmpty(t, e.CreatedOn) + require.Equal(t, id.Name(), e.Name) + require.Equal(t, id.DatabaseName(), e.DatabaseName) + require.Equal(t, id.SchemaName(), e.SchemaName) + require.Equal(t, 1, e.NextValue) + require.Equal(t, interval, e.Interval) + require.Equal(t, "ACCOUNTADMIN", e.Owner) + require.Equal(t, "ROLE", e.OwnerRoleType) + require.Equal(t, comment, e.Comment) + require.Equal(t, ordered, e.Ordered) + } + + t.Run("create sequence", func(t *testing.T) { + name := random.StringN(4) + id := sdk.NewSchemaObjectIdentifier(databaseTest.Name, schemaTest.Name, name) + + comment := random.StringN(4) + request := sdk.NewCreateSequenceRequest(id). + WithStart(sdk.Int(1)). + WithIncrement(sdk.Int(1)). + WithIfNotExists(sdk.Bool(true)). + WithValuesBehavior(sdk.ValuesBehaviorPointer(sdk.ValuesBehaviorOrder)). + WithComment(&comment) + err := client.Sequences.Create(ctx, request) + require.NoError(t, err) + t.Cleanup(cleanupSequenceHandle(t, id)) + assertSequence(t, id, 1, true, comment) + }) + + t.Run("show event table: without like", func(t *testing.T) { + e1 := createSequenceHandle(t) + e2 := createSequenceHandle(t) + + sequences, err := client.Sequences.Show(ctx, sdk.NewShowSequenceRequest()) + require.NoError(t, err) + require.Equal(t, 2, len(sequences)) + require.Contains(t, sequences, *e1) + require.Contains(t, sequences, *e2) + }) + + t.Run("show sequence: with like", func(t *testing.T) { + e1 := createSequenceHandle(t) + e2 := createSequenceHandle(t) + + sequences, err := client.Sequences.Show(ctx, sdk.NewShowSequenceRequest().WithLike(&sdk.Like{Pattern: &e1.Name})) + require.NoError(t, err) + require.Equal(t, 1, len(sequences)) + require.Contains(t, sequences, *e1) + require.NotContains(t, sequences, *e2) + }) + + t.Run("show sequence: no matches", func(t *testing.T) { + sequences, err := client.Sequences.Show(ctx, sdk.NewShowSequenceRequest().WithLike(&sdk.Like{Pattern: sdk.String("non-existent")})) + require.NoError(t, err) + require.Equal(t, 0, len(sequences)) + }) + + t.Run("describe sequence", func(t *testing.T) { + e := createSequenceHandle(t) + id := sdk.NewSchemaObjectIdentifier(databaseTest.Name, schemaTest.Name, e.Name) + + details, err := client.Sequences.Describe(ctx, id) + require.NoError(t, err) + require.Equal(t, e.CreatedOn, details.CreatedOn) + require.Equal(t, e.Name, details.Name) + require.Equal(t, e.SchemaName, details.SchemaName) + require.Equal(t, e.DatabaseName, details.DatabaseName) + require.Equal(t, e.NextValue, details.NextValue) + require.Equal(t, e.Interval, details.Interval) + require.Equal(t, e.Owner, details.Owner) + require.Equal(t, e.OwnerRoleType, details.OwnerRoleType) + require.Equal(t, e.Comment, details.Comment) + require.Equal(t, e.Ordered, details.Ordered) + }) + + t.Run("alter sequence: set options", func(t *testing.T) { + e := createSequenceHandle(t) + id := sdk.NewSchemaObjectIdentifier(databaseTest.Name, schemaTest.Name, e.Name) + + comment := random.StringN(4) + set := sdk.NewSequenceSetRequest().WithComment(&comment).WithValuesBehavior(sdk.ValuesBehaviorPointer(sdk.ValuesBehaviorNoOrder)) + err := client.Sequences.Alter(ctx, sdk.NewAlterSequenceRequest(id).WithSet(set)) + require.NoError(t, err) + + assertSequence(t, id, 1, false, comment) + }) + + t.Run("alter sequence: set increment", func(t *testing.T) { + e := createSequenceHandle(t) + id := sdk.NewSchemaObjectIdentifier(databaseTest.Name, schemaTest.Name, e.Name) + + increment := 2 + err := client.Sequences.Alter(ctx, sdk.NewAlterSequenceRequest(id).WithSetIncrement(&increment)) + require.NoError(t, err) + assertSequence(t, id, 2, true, "") + }) + + t.Run("alter sequence: rename", func(t *testing.T) { + name := random.String() + id := sdk.NewSchemaObjectIdentifier(databaseTest.Name, schemaTest.Name, name) + + err := client.Sequences.Create(ctx, sdk.NewCreateSequenceRequest(id)) + require.NoError(t, err) + nid := sdk.NewSchemaObjectIdentifier(databaseTest.Name, schemaTest.Name, random.String()) + err = client.Sequences.Alter(ctx, sdk.NewAlterSequenceRequest(id).WithRenameTo(&nid)) + if err != nil { + t.Cleanup(cleanupSequenceHandle(t, id)) + } else { + t.Cleanup(cleanupSequenceHandle(t, nid)) + } + require.NoError(t, err) + + _, err = client.Sequences.ShowByID(ctx, id) + assert.ErrorIs(t, err, collections.ErrObjectNotFound) + _, err = client.Sequences.ShowByID(ctx, nid) + require.NoError(t, err) + }) +} diff --git a/pkg/sdk/testint/setup_test.go b/pkg/sdk/testint/setup_test.go index 2af6cbe572..2a0a8b5ee6 100644 --- a/pkg/sdk/testint/setup_test.go +++ b/pkg/sdk/testint/setup_test.go @@ -120,6 +120,7 @@ func (itc *integrationTestContext) initialize() error { if err != nil { return err } + secondaryClient, err := sdk.NewClient(config) if err != nil { return err