Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Fix/prove issues #2733 #2735 #2763 #2767 #2777

Merged
merged 7 commits into from
May 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions MIGRATION_GUIDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@ This document is meant to help you migrate your Terraform config to the new newe
describe deprecations or breaking changes and help you to change your configuration to keep the same (or similar) behavior
across different versions.

## v0.89.0 ➞ v0.90.0
### snowflake_table resource changes
#### *(behavior change)* Validation to column type added
While solving issue [#2733](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2733) we have introduced diff suppression for `column.type`. To make it work correctly we have also added a validation to it. It should not cause any problems, but it's worth noting in case of any data types used that the provider is not aware of.

## v0.88.0 ➞ v0.89.0
#### *(behavior change)* ForceNew removed
The `ForceNew` field was removed in favor of in-place Update for `name` parameter in:
Expand Down
7 changes: 7 additions & 0 deletions pkg/acceptance/helpers/warehouse_client.go
Original file line number Diff line number Diff line change
Expand Up @@ -68,3 +68,10 @@ func (c *WarehouseClient) UpdateMaxConcurrencyLevel(t *testing.T, id sdk.Account
err := c.client().Alter(ctx, id, &sdk.AlterWarehouseOptions{Set: &sdk.WarehouseSet{MaxConcurrencyLevel: sdk.Int(level)}})
require.NoError(t, err)
}

func (c *WarehouseClient) Show(t *testing.T, id sdk.AccountObjectIdentifier) (*sdk.Warehouse, error) {
t.Helper()
ctx := context.Background()

return c.client().ShowByID(ctx, id)
}
40 changes: 40 additions & 0 deletions pkg/resources/function_acceptance_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -289,3 +289,43 @@ resource "snowflake_function" "f" {
}
`, database, schema, name, comment)
}

// TODO [SNOW-1348103]: do not trim the data type (e.g. NUMBER(10, 2) -> NUMBER loses the information as shown in this test); finish the test
// proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735
func TestAcc_Function_gh2735(t *testing.T) {
t.Skipf("Will be fixed with functions redesign in SNOW-1348103")
name := acc.TestClient().Ids.Alpha()
resourceName := "snowflake_function.f"

resource.Test(t, resource.TestCase{
ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories,
PreCheck: func() { acc.TestAccPreCheck(t) },
TerraformVersionChecks: []tfversion.TerraformVersionCheck{
tfversion.RequireAbove(tfversion.Version1_5_0),
},
CheckDestroy: acc.CheckDestroy(t, resources.Function),
Steps: []resource.TestStep{
{
Config: functionConfigGh2735(acc.TestDatabaseName, acc.TestSchemaName, name),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(resourceName, "name", name),
),
},
},
})
}

func functionConfigGh2735(database string, schema string, name string) string {
return fmt.Sprintf(`
resource "snowflake_function" "f" {
database = "%[1]s"
schema = "%[2]s"
name = "%[3]s"
return_type = "TABLE (NUM1 NUMBER, NUM2 NUMBER(10,2))"

statement = <<EOT
SELECT 12,13.4
EOT
}
`, database, schema, name)
}
14 changes: 5 additions & 9 deletions pkg/resources/table.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import (
"context"
"fmt"
"log"
"slices"
"strconv"
"strings"

Expand Down Expand Up @@ -56,14 +55,11 @@ var tableSchema = map[string]*schema.Schema{
Description: "Column name",
},
"type": {
Type: schema.TypeString,
Required: true,
Description: "Column type, e.g. VARIANT",
DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool {
// these are all equivalent as per https://docs.snowflake.com/en/sql-reference/data-types-text.html
varcharType := []string{"VARCHAR(16777216)", "VARCHAR", "text", "string", "NVARCHAR", "NVARCHAR2", "CHAR VARYING", "NCHAR VARYING"}
return slices.Contains(varcharType, new) && slices.Contains(varcharType, old)
},
Type: schema.TypeString,
Required: true,
Description: "Column type, e.g. VARIANT",
ValidateFunc: dataTypeValidateFunc,
DiffSuppressFunc: dataTypeDiffSuppressFunc,
},
"nullable": {
Type: schema.TypeBool,
Expand Down
148 changes: 90 additions & 58 deletions pkg/resources/table_acceptance_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1253,9 +1253,6 @@ func TestAcc_TableCollate(t *testing.T) {
Config: tableColumnWithCollate(accName, acc.TestDatabaseName, acc.TestSchemaName),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_table.test_table", "name", accName),
resource.TestCheckResourceAttr("snowflake_table.test_table", "database", acc.TestDatabaseName),
resource.TestCheckResourceAttr("snowflake_table.test_table", "schema", acc.TestSchemaName),
resource.TestCheckResourceAttr("snowflake_table.test_table", "comment", "Terraform acceptance test"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.#", "3"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.name", "column1"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.collate", "en"),
Expand All @@ -1266,53 +1263,27 @@ func TestAcc_TableCollate(t *testing.T) {
),
},
{
Config: alterTableColumnWithCollate(accName, acc.TestDatabaseName, acc.TestSchemaName),
Config: addColumnWithCollate(accName, acc.TestDatabaseName, acc.TestSchemaName),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_table.test_table", "name", accName),
resource.TestCheckResourceAttr("snowflake_table.test_table", "database", acc.TestDatabaseName),
resource.TestCheckResourceAttr("snowflake_table.test_table", "schema", acc.TestSchemaName),
resource.TestCheckResourceAttr("snowflake_table.test_table", "comment", "Terraform acceptance test"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.#", "4"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.name", "column1"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.collate", "en"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.name", "column2"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.collate", ""),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.2.name", "column3"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.2.collate", ""),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.3.name", "column4"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.3.collate", "utf8"),
),
},
{
Config: alterTableColumnWithIncompatibleCollate(accName, acc.TestDatabaseName, acc.TestSchemaName),
ExpectError: regexp.MustCompile("\"VARCHAR\\(200\\) COLLATE 'fr'\" because they have incompatible collations\\."),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_table.test_table", "name", accName),
resource.TestCheckResourceAttr("snowflake_table.test_table", "database", acc.TestDatabaseName),
resource.TestCheckResourceAttr("snowflake_table.test_table", "schema", acc.TestSchemaName),
resource.TestCheckResourceAttr("snowflake_table.test_table", "comment", "Terraform acceptance test"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.#", "4"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.name", "column1"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.collate", "en"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.name", "column2"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.collate", ""),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.2.name", "column3"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.2.collate", ""),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.3.name", "column4"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.3.collate", "utf8"),
),
ExpectError: regexp.MustCompile("\"VARCHAR\\(100\\) COLLATE 'fr'\" because they have incompatible collations\\."),
},
},
})
}

func tableColumnWithCollate(name string, databaseName string, schemaName string) string {
s := `
return fmt.Sprintf(`
resource "snowflake_table" "test_table" {
name = "%s"
database = "%s"
schema = "%s"
comment = "Terraform acceptance test"
database = "%[2]s"
schema = "%[3]s"
name = "%[1]s"

column {
name = "column1"
Expand All @@ -1329,72 +1300,67 @@ resource "snowflake_table" "test_table" {
type = "VARCHAR(100)"
}
}
`
return fmt.Sprintf(s, name, databaseName, schemaName)
`, name, databaseName, schemaName)
}

func alterTableColumnWithCollate(name string, databaseName string, schemaName string) string {
s := `
func addColumnWithCollate(name string, databaseName string, schemaName string) string {
return fmt.Sprintf(`
resource "snowflake_table" "test_table" {
name = "%s"
database = "%s"
schema = "%s"
comment = "Terraform acceptance test"
database = "%[2]s"
schema = "%[3]s"
name = "%[1]s"

column {
name = "column1"
type = "VARCHAR(200)"
type = "VARCHAR(100)"
collate = "en"
}
column {
name = "column2"
type = "VARCHAR(200)"
type = "VARCHAR(100)"
collate = ""
}
column {
name = "column3"
type = "VARCHAR(200)"
type = "VARCHAR(100)"
}
column {
name = "column4"
type = "VARCHAR"
collate = "utf8"
}
}
`
return fmt.Sprintf(s, name, databaseName, schemaName)
`, name, databaseName, schemaName)
}

func alterTableColumnWithIncompatibleCollate(name string, databaseName string, schemaName string) string {
s := `
return fmt.Sprintf(`
resource "snowflake_table" "test_table" {
name = "%s"
database = "%s"
schema = "%s"
comment = "Terraform acceptance test"
database = "%[2]s"
schema = "%[3]s"
name = "%[1]s"

column {
name = "column1"
type = "VARCHAR(200)"
type = "VARCHAR(100)"
collate = "fr"
}
column {
name = "column2"
type = "VARCHAR(200)"
type = "VARCHAR(100)"
collate = ""
}
column {
name = "column3"
type = "VARCHAR(200)"
type = "VARCHAR(100)"
}
column {
name = "column4"
type = "VARCHAR"
collate = "utf8"
}
}
`
return fmt.Sprintf(s, name, databaseName, schemaName)
`, name, databaseName, schemaName)
}

func TestAcc_TableRename(t *testing.T) {
Expand Down Expand Up @@ -1836,8 +1802,10 @@ resource "snowflake_table" "test_table" {
`, name, databaseName, schemaName)
}

// TODO [SNOW-1348114]: do not trim the data type (e.g. NUMBER(38,0) -> NUMBER(36,0) diff is ignored); finish the test
// proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2588 is fixed
func TestAcc_ColumnTypeChangeWithNonTextType(t *testing.T) {
t.Skipf("Will be fixed with tables redesign in SNOW-1348114")
accName := acc.TestClient().Ids.Alpha()

resource.Test(t, resource.TestCase{
Expand Down Expand Up @@ -1948,3 +1916,67 @@ func setTableDataRetentionTime(t *testing.T, id sdk.SchemaObjectIdentifier, days
require.NoError(t, err)
}
}

// proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2733 is fixed
func TestAcc_Table_gh2733(t *testing.T) {
name := acc.TestClient().Ids.Alpha()

resource.Test(t, resource.TestCase{
ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories,
PreCheck: func() { acc.TestAccPreCheck(t) },
TerraformVersionChecks: []tfversion.TerraformVersionCheck{
tfversion.RequireAbove(tfversion.Version1_5_0),
},
CheckDestroy: acc.CheckDestroy(t, resources.Table),
Steps: []resource.TestStep{
{
Config: tableConfigGh2733(acc.TestDatabaseName, acc.TestSchemaName, name),
Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr("snowflake_table.test_table", "name", name),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.name", "MY_INT"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.0.type", "NUMBER(38,0)"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.name", "MY_STRING"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.1.type", "VARCHAR(16777216)"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.2.name", "MY_DATE"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.2.type", "TIMESTAMP_NTZ(9)"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.3.name", "MY_DATE2"),
resource.TestCheckResourceAttr("snowflake_table.test_table", "column.3.type", "TIMESTAMP_NTZ(9)"),
),
},
},
})
}

func tableConfigGh2733(database string, schema string, name string) string {
return fmt.Sprintf(`
resource "snowflake_table" "test_table" {
database = "%[1]s"
schema = "%[2]s"
name = "%[3]s"

column {
name = "MY_INT"
type = "int"
# type = "NUMBER(38,0)" # Should be equivalent
}

column {
name = "MY_STRING"
type = "VARCHAR(16777216)"
# type = "STRING" # Should be equivalent
}

column {
name = "MY_DATE"
type = "TIMESTAMP_NTZ"
# type = "TIMESTAMP_NTZ(9)" # Should be equivalent
}

column {
name = "MY_DATE2"
type = "DATETIME"
# type = "TIMESTAMP_NTZ" # Equivalent to TIMESTAMP_NTZ
}
}
`, database, schema, name)
}
9 changes: 6 additions & 3 deletions pkg/resources/warehouse.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,9 +123,12 @@ var warehouseSchema = map[string]*schema.Schema{
Description: "Specifies whether to enable the query acceleration service for queries that rely on this warehouse for compute resources.",
},
"query_acceleration_max_scale_factor": {
Type: schema.TypeInt,
Optional: true,
Default: 8,
Type: schema.TypeInt,
Optional: true,
Default: 8,
DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool {
return !d.Get("enable_query_acceleration").(bool)
},
ValidateFunc: validation.IntBetween(0, 100),
Description: "Specifies the maximum scale factor for leasing compute resources for query acceleration. The scale factor is used as a multiplier based on warehouse size.",
},
Expand Down
Loading
Loading