Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

azurerm_stream_analytics_output_mssql - add support for new properties max_batch_count and max_writer_count #16409

Merged
merged 4 commits into from
Apr 20, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,20 @@ func resourceStreamAnalyticsOutputSql() *pluginsdk.Resource {
Sensitive: true,
ValidateFunc: validation.StringIsNotEmpty,
},

"max_batch_count": {
Type: pluginsdk.TypeFloat,
Optional: true,
Default: 10000,
ValidateFunc: validation.FloatBetween(1, 1073741824),
},

"max_writer_count": {
Type: pluginsdk.TypeFloat,
Optional: true,
Default: 1,
ValidateFunc: validation.FloatBetween(0, 1),
},
},
}
}
Expand Down Expand Up @@ -124,11 +138,13 @@ func resourceStreamAnalyticsOutputSqlCreateUpdate(d *pluginsdk.ResourceData, met
Datasource: &streamanalytics.AzureSQLDatabaseOutputDataSource{
Type: streamanalytics.TypeBasicOutputDataSourceTypeMicrosoftSQLServerDatabase,
AzureSQLDatabaseOutputDataSourceProperties: &streamanalytics.AzureSQLDatabaseOutputDataSourceProperties{
Server: utils.String(server),
Database: utils.String(databaseName),
User: utils.String(sqlUser),
Password: utils.String(sqlUserPassword),
Table: utils.String(tableName),
Server: utils.String(server),
Database: utils.String(databaseName),
User: utils.String(sqlUser),
Password: utils.String(sqlUserPassword),
Table: utils.String(tableName),
MaxBatchCount: utils.Float(d.Get("max_batch_count").(float64)),
MaxWriterCount: utils.Float(d.Get("max_writer_count").(float64)),
},
},
},
Expand Down Expand Up @@ -182,6 +198,18 @@ func resourceStreamAnalyticsOutputSqlRead(d *pluginsdk.ResourceData, meta interf
d.Set("database", v.Database)
d.Set("table", v.Table)
d.Set("user", v.User)

maxBatchCount := float64(10000)
if v.MaxBatchCount != nil {
maxBatchCount = *v.MaxBatchCount
}
d.Set("max_batch_count", maxBatchCount)

maxWriterCount := float64(1)
if v.MaxWriterCount != nil {
maxWriterCount = *v.MaxWriterCount
}
d.Set("max_writer_count", maxWriterCount)
}

return nil
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,42 @@ func TestAccStreamAnalyticsOutputSql_requiresImport(t *testing.T) {
})
}

func TestAccStreamAnalyticsOutputSql_maxBatchCountAndMaxWriterCount(t *testing.T) {
data := acceptance.BuildTestData(t, "azurerm_stream_analytics_output_mssql", "test")
r := StreamAnalyticsOutputSqlResource{}

data.ResourceTest(t, r, []acceptance.TestStep{
{
Config: r.basic(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("password"),
{
Config: r.maxBatchCountAndMaxWriterCount(data, 10001, 0),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("password"),
{
Config: r.maxBatchCountAndMaxWriterCount(data, 10002, 1),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("password"),
{
Config: r.basic(data),
Check: acceptance.ComposeTestCheckFunc(
check.That(data.ResourceName).ExistsInAzure(r),
),
},
data.ImportStep("password"),
})
}

func (r StreamAnalyticsOutputSqlResource) Exists(ctx context.Context, client *clients.Client, state *pluginsdk.InstanceState) (*bool, error) {
name := state.Attributes["name"]
jobName := state.Attributes["stream_analytics_job_name"]
Expand Down Expand Up @@ -137,6 +173,28 @@ resource "azurerm_stream_analytics_output_mssql" "import" {
`, template)
}

func (r StreamAnalyticsOutputSqlResource) maxBatchCountAndMaxWriterCount(data acceptance.TestData, maxBatchCount, maxWriterCount float64) string {
template := r.template(data)
return fmt.Sprintf(`
%s

resource "azurerm_stream_analytics_output_mssql" "test" {
name = "acctestoutput-%d"
stream_analytics_job_name = azurerm_stream_analytics_job.test.name
resource_group_name = azurerm_stream_analytics_job.test.resource_group_name

server = azurerm_sql_server.test.fully_qualified_domain_name
user = azurerm_sql_server.test.administrator_login
password = azurerm_sql_server.test.administrator_login_password
database = azurerm_sql_database.test.name
table = "AccTestTable"

max_batch_count = %f
max_writer_count = %f
}
`, template, data.RandomInteger, maxBatchCount, maxWriterCount)
}

func (r StreamAnalyticsOutputSqlResource) template(data acceptance.TestData) string {
return fmt.Sprintf(`
provider "azurerm" {
Expand Down
4 changes: 4 additions & 0 deletions website/docs/r/stream_analytics_output_mssql.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,10 @@ The following arguments are supported:

* `table` - (Required) Table in the database that the output points to. Changing this forces a new resource to be created.

* `max_batch_count` - (Optional) The max batch count to write to the SQL Database. Defaults to `10000`. Possible values are between `1` and `1073741824`.

* `max_writer_count` - (Optional) The max writer count for the SQL Database. Defaults to `1`. Possible values are `0` which bases the writer count on the query partition and `1` which corresponds to a single writer.

## Attributes Reference

The following attributes are exported in addition to the arguments listed above:
Expand Down