Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Warehouse redesign part2 #2887

Merged
merged 32 commits into from
Jun 27, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
5f51414
Add resource monitor tests
sfc-gh-asawicki Jun 13, 2024
1430189
Add tests for setting/unsetting comment
sfc-gh-asawicki Jun 13, 2024
7d1bbe7
Prove problems with unsets
sfc-gh-asawicki Jun 13, 2024
e62dfb6
Add better log
sfc-gh-asawicki Jun 13, 2024
9d9e839
Add more UNSET tests
sfc-gh-asawicki Jun 13, 2024
29bc467
Add missing test step
sfc-gh-asawicki Jun 14, 2024
4494dfe
Solve TODOs in integration tests
sfc-gh-asawicki Jun 18, 2024
5fda1f7
Add conditional suspension
sfc-gh-asawicki Jun 18, 2024
1317275
Change initially suspended logic
sfc-gh-asawicki Jun 18, 2024
2dd7a93
Adjust state upgrader and the description
sfc-gh-asawicki Jun 18, 2024
7793982
Make basic test case run
sfc-gh-asawicki Jun 24, 2024
3cd20b3
Add resource monitor
sfc-gh-asawicki Jun 24, 2024
f29dc9d
Test import after empty config
sfc-gh-asawicki Jun 24, 2024
7db7f20
Add two migration test cases
sfc-gh-asawicki Jun 24, 2024
95b19a6
Handle more migration cases
sfc-gh-asawicki Jun 24, 2024
ec8867c
Handle more migration cases continuation
sfc-gh-asawicki Jun 24, 2024
02a9fed
Add more validation tests
sfc-gh-asawicki Jun 24, 2024
e5de872
Add test for current warehouse drift
sfc-gh-asawicki Jun 24, 2024
ff3392b
Remove snowflakechecks todo
sfc-gh-asawicki Jun 24, 2024
091cadb
Test int behavior
sfc-gh-asawicki Jun 24, 2024
389177e
Run pre-push
sfc-gh-asawicki Jun 24, 2024
9442a0a
Update migration notes
sfc-gh-asawicki Jun 25, 2024
d95b136
Fix auto resume test
sfc-gh-asawicki Jun 25, 2024
376002b
Change resource monitor to id
sfc-gh-asawicki Jun 25, 2024
1b8e1c4
Fix after review and add import check for zero values
sfc-gh-asawicki Jun 25, 2024
5f1eff8
Handle a few more cases
sfc-gh-asawicki Jun 25, 2024
81f6c40
Fix defaults test
sfc-gh-asawicki Jun 25, 2024
7e49dc0
Run make pre-push
sfc-gh-asawicki Jun 25, 2024
6ed1ece
Bump the timeout for tests
sfc-gh-asawicki Jun 26, 2024
32396a8
Merge branch 'main' into warehouse-redesign-part2
sfc-gh-asawicki Jun 26, 2024
f10d668
Merge with show output schemas
sfc-gh-asawicki Jun 26, 2024
2e6ee5e
Use Name() for account object identifiers in show output
sfc-gh-asawicki Jun 27, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ test-client: ## runs test that checks sdk.Client without instrumentedsql
SF_TF_NO_INSTRUMENTED_SQL=1 SF_TF_GOSNOWFLAKE_LOG_LEVEL=debug go test ./pkg/sdk/internal/client/... -v

test-acceptance-%: ## run acceptance tests for the given resource only, e.g. test-acceptance-Warehouse
TF_ACC=1 SF_TF_ACC_TEST_CONFIGURE_CLIENT_ONCE=true go test -run ^TestAcc_$*_ -v ./...
TF_ACC=1 SF_TF_ACC_TEST_CONFIGURE_CLIENT_ONCE=true go test -run ^TestAcc_$*_ -v -timeout=20m ./...

build-local: ## build the binary locally
go build -o $(BASE_BINARY_NAME) .
Expand Down
3 changes: 3 additions & 0 deletions pkg/resources/custom_diffs.go
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,9 @@ func ComputedIfAnyAttributeChanged(key string, changedAttributeKeys ...string) s
return customdiff.ComputedIf(key, func(ctx context.Context, diff *schema.ResourceDiff, meta interface{}) bool {
var result bool
for _, changedKey := range changedAttributeKeys {
if diff.HasChange(changedKey) {
log.Printf("[DEBUG] ComputedIfAnyAttributeChanged: changed key: %s\n", changedKey)
}
result = result || diff.HasChange(changedKey)
}
return result
Expand Down
37 changes: 36 additions & 1 deletion pkg/resources/diff_suppressions.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
package resources

import "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
import (
"fmt"
"log"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)

func NormalizeAndCompare[T comparable](normalize func(string) (T, error)) schema.SchemaDiffSuppressFunc {
return func(_, oldValue, newValue string, _ *schema.ResourceData) bool {
Expand All @@ -21,3 +26,33 @@ func IgnoreAfterCreation(_, _, _ string, d *schema.ResourceData) bool {
// For new resources always show the diff and in every other case we do not want to use this attribute
return d.Id() != ""
}

func IgnoreChangeToCurrentSnowflakeValue(keyInShowOutput string) schema.SchemaDiffSuppressFunc {
return func(_, _, new string, d *schema.ResourceData) bool {
if d.Id() == "" {
return false
}

if showOutput, ok := d.GetOk(showOutputAttributeName); ok {
showOutputList := showOutput.([]any)
if len(showOutputList) == 1 {
result := showOutputList[0].(map[string]any)
log.Printf("[DEBUG] IgnoreChangeToCurrentSnowflakeValue: value for key %s is %v, new value is %s, comparison result is: %t", keyInShowOutput, result[keyInShowOutput], new, new == fmt.Sprintf("%v", result[keyInShowOutput]))
if new == fmt.Sprintf("%v", result[keyInShowOutput]) {
return true
}
}
}
return false
}
}

func SuppressIfAny(diffSuppressFunctions ...schema.SchemaDiffSuppressFunc) schema.SchemaDiffSuppressFunc {
return func(k, old, new string, d *schema.ResourceData) bool {
var suppress bool
for _, f := range diffSuppressFunctions {
suppress = suppress || f(k, old, new, d)
}
return suppress
}
}
139 changes: 105 additions & 34 deletions pkg/resources/warehouse.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,80 +29,86 @@ var warehouseSchema = map[string]*schema.Schema{
Type: schema.TypeString,
Optional: true,
ValidateDiagFunc: sdkValidation(sdk.ToWarehouseType),
DiffSuppressFunc: NormalizeAndCompare(sdk.ToWarehouseType),
DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToWarehouseType), IgnoreChangeToCurrentSnowflakeValue("type")),
Description: fmt.Sprintf("Specifies warehouse type. Valid values are (case-insensitive): %s. Warehouse needs to be suspended to change its type. Provider will handle automatic suspension and resumption if needed.", possibleValuesListed(sdk.ValidWarehouseTypesString)),
},
"warehouse_size": {
Type: schema.TypeString,
Optional: true,
ValidateDiagFunc: sdkValidation(sdk.ToWarehouseSize),
DiffSuppressFunc: NormalizeAndCompare(sdk.ToWarehouseSize),
DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToWarehouseSize), IgnoreChangeToCurrentSnowflakeValue("size")),
Description: fmt.Sprintf("Specifies the size of the virtual warehouse. Valid values are (case-insensitive): %s. Consult [warehouse documentation](https://docs.snowflake.com/en/sql-reference/sql/create-warehouse#optional-properties-objectproperties) for the details. Note: removing the size from config will result in the resource recreation.", possibleValuesListed(sdk.ValidWarehouseSizesString)),
},
"max_cluster_count": {
Type: schema.TypeInt,
Description: "Specifies the maximum number of server clusters for the warehouse.",
Optional: true,
ValidateFunc: validation.IntBetween(1, 10),
Type: schema.TypeInt,
Optional: true,
ValidateFunc: validation.IntBetween(1, 10),
DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValue("max_cluster_count"),
Description: "Specifies the maximum number of server clusters for the warehouse.",
},
"min_cluster_count": {
Type: schema.TypeInt,
Description: "Specifies the minimum number of server clusters for the warehouse (only applies to multi-cluster warehouses).",
Optional: true,
ValidateFunc: validation.IntBetween(1, 10),
Type: schema.TypeInt,
Optional: true,
ValidateFunc: validation.IntBetween(1, 10),
DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValue("min_cluster_count"),
Description: "Specifies the minimum number of server clusters for the warehouse (only applies to multi-cluster warehouses).",
},
"scaling_policy": {
Type: schema.TypeString,
Optional: true,
ValidateDiagFunc: sdkValidation(sdk.ToScalingPolicy),
DiffSuppressFunc: NormalizeAndCompare(sdk.ToScalingPolicy),
DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToWarehouseType), IgnoreChangeToCurrentSnowflakeValue("scaling_policy")),
Description: fmt.Sprintf("Specifies the policy for automatically starting and shutting down clusters in a multi-cluster warehouse running in Auto-scale mode. Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.ValidWarehouseScalingPoliciesString)),
},
"auto_suspend": {
Type: schema.TypeInt,
Description: "Specifies the number of seconds of inactivity after which a warehouse is automatically suspended.",
Optional: true,
ValidateFunc: validation.IntAtLeast(0),
Default: -1,
Type: schema.TypeInt,
Optional: true,
ValidateFunc: validation.IntAtLeast(0),
DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValue("auto_suspend"),
Description: "Specifies the number of seconds of inactivity after which a warehouse is automatically suspended.",
Default: -1,
},
"auto_resume": {
Type: schema.TypeString,
Description: "Specifies whether to automatically resume a warehouse when a SQL statement (e.g. query) is submitted to it.",
ValidateFunc: validation.StringInSlice([]string{"true", "false"}, true),
Optional: true,
Default: "unknown",
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringInSlice([]string{"true", "false"}, true),
DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValue("auto_resume"),
Description: "Specifies whether to automatically resume a warehouse when a SQL statement (e.g. query) is submitted to it.",
sfc-gh-jcieslak marked this conversation as resolved.
Show resolved Hide resolved
Default: "unknown",
},
"initially_suspended": {
Type: schema.TypeBool,
Description: "Specifies whether the warehouse is created initially in the ‘Suspended’ state.",
Optional: true,
DiffSuppressFunc: IgnoreAfterCreation,
Description: "Specifies whether the warehouse is created initially in the ‘Suspended’ state.",
},
"resource_monitor": {
Type: schema.TypeString,
Description: "Specifies the name of a resource monitor that is explicitly assigned to the warehouse.",
Optional: true,
ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](),
DiffSuppressFunc: suppressIdentifierQuoting,
DiffSuppressFunc: SuppressIfAny(suppressIdentifierQuoting, IgnoreChangeToCurrentSnowflakeValue("resource_monitor")),
Description: "Specifies the name of a resource monitor that is explicitly assigned to the warehouse.",
},
"comment": {
Type: schema.TypeString,
Optional: true,
Description: "Specifies a comment for the warehouse.",
},
"enable_query_acceleration": {
Type: schema.TypeString,
Description: "Specifies whether to enable the query acceleration service for queries that rely on this warehouse for compute resources.",
ValidateFunc: validation.StringInSlice([]string{"true", "false"}, true),
Optional: true,
Default: "unknown",
Type: schema.TypeString,
Optional: true,
ValidateFunc: validation.StringInSlice([]string{"true", "false"}, true),
DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValue("enable_query_acceleration"),
Description: "Specifies whether to enable the query acceleration service for queries that rely on this warehouse for compute resources.",
sfc-gh-jcieslak marked this conversation as resolved.
Show resolved Hide resolved
Default: "unknown",
},
"query_acceleration_max_scale_factor": {
Type: schema.TypeInt,
Optional: true,
ValidateFunc: validation.IntBetween(0, 100),
Description: "Specifies the maximum scale factor for leasing compute resources for query acceleration. The scale factor is used as a multiplier based on warehouse size.",
Default: -1,
Type: schema.TypeInt,
Optional: true,
ValidateFunc: validation.IntBetween(0, 100),
DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValue("query_acceleration_max_scale_factor"),
Description: "Specifies the maximum scale factor for leasing compute resources for query acceleration. The scale factor is used as a multiplier based on warehouse size.",
Default: -1,
},
strings.ToLower(string(sdk.ObjectParameterMaxConcurrencyLevel)): {
Type: schema.TypeInt,
Expand Down Expand Up @@ -352,6 +358,71 @@ func GetReadWarehouseFunc(withExternalChangesMarking bool) schema.ReadContextFun
}
}

// These are all identity sets, needed for the case where:
// - previous config was empty (therefore Snowflake defaults had been used)
// - new config have the same values that are already in SF
if !d.GetRawConfig().IsNull() {
if v := d.GetRawConfig().AsValueMap()["warehouse_type"]; !v.IsNull() {
sfc-gh-jcieslak marked this conversation as resolved.
Show resolved Hide resolved
if err = d.Set("warehouse_type", v.AsString()); err != nil {
return diag.FromErr(err)
}
}
if v := d.GetRawConfig().AsValueMap()["warehouse_size"]; !v.IsNull() {
if err = d.Set("warehouse_size", v.AsString()); err != nil {
return diag.FromErr(err)
}
}
if v := d.GetRawConfig().AsValueMap()["max_cluster_count"]; !v.IsNull() {
intVal, _ := v.AsBigFloat().Int64()
if err = d.Set("max_cluster_count", intVal); err != nil {
return diag.FromErr(err)
}
}
if v := d.GetRawConfig().AsValueMap()["min_cluster_count"]; !v.IsNull() {
intVal, _ := v.AsBigFloat().Int64()
if err = d.Set("min_cluster_count", intVal); err != nil {
return diag.FromErr(err)
}
}
if v := d.GetRawConfig().AsValueMap()["scaling_policy"]; !v.IsNull() {
if err = d.Set("scaling_policy", v.AsString()); err != nil {
return diag.FromErr(err)
}
}
if v := d.GetRawConfig().AsValueMap()["auto_suspend"]; !v.IsNull() {
intVal, _ := v.AsBigFloat().Int64()
if err = d.Set("auto_suspend", intVal); err != nil {
return diag.FromErr(err)
}
}
if v := d.GetRawConfig().AsValueMap()["auto_resume"]; !v.IsNull() {
if err = d.Set("auto_resume", v.AsString()); err != nil {
return diag.FromErr(err)
}
}
if v := d.GetRawConfig().AsValueMap()["resource_monitor"]; !v.IsNull() {
if err = d.Set("resource_monitor", v.AsString()); err != nil {
return diag.FromErr(err)
}
}
if v := d.GetRawConfig().AsValueMap()["comment"]; !v.IsNull() {
if err = d.Set("comment", v.AsString()); err != nil {
return diag.FromErr(err)
}
}
if v := d.GetRawConfig().AsValueMap()["enable_query_acceleration"]; !v.IsNull() {
if err = d.Set("enable_query_acceleration", v.AsString()); err != nil {
return diag.FromErr(err)
}
}
if v := d.GetRawConfig().AsValueMap()["query_acceleration_max_scale_factor"]; !v.IsNull() {
intVal, _ := v.AsBigFloat().Int64()
if err = d.Set("query_acceleration_max_scale_factor", intVal); err != nil {
return diag.FromErr(err)
}
}
}

if err = d.Set(showOutputAttributeName, []map[string]any{schemas.WarehouseToSchema(w)}); err != nil {
return diag.FromErr(err)
}
Expand Down
Loading
Loading