Skip to content

Commit

Permalink
Add basic benchmarks
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-jmichalak committed Jan 21, 2025
1 parent 34068e4 commit 12dbc35
Show file tree
Hide file tree
Showing 7 changed files with 317 additions and 0 deletions.
1 change: 1 addition & 0 deletions pkg/manual_tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ and should contain a file describing the manual steps to perform the test.
Here's the list of cases we currently cannot reproduce and write acceptance tests for:
- `user_default_database_and_role`: Setting up a user with default_namespace and default_role, then logging into that user to see what happens with those values in various scenarios (e.g. insufficient privileges on the role).
- `authentication_methods`: Some of the authentication methods require manual steps, like confirming MFA or setting more dependencies.
- `benchmarks`: Performance benchmarks require manually running `terraform` command to imitate the user workflow.
22 changes: 22 additions & 0 deletions pkg/manual_tests/benchmarks/.terraform.lock.hcl

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

35 changes: 35 additions & 0 deletions pkg/manual_tests/benchmarks/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# Authentication methods manual tests

This directory is dedicated to hold steps for manual performance tests in the provider. These tests use simple Terraform files and are run with `terraform` CLI manually to imitate the user workflow and reduce bias with Terraform SDK testing libraries and the binary itself.
The tests are organized by the resource type, e.g. schemas, users, and warehouses.

## Run tests

- Preferably use your secondary test account to avoid potential conflicts with our "main" environments.
- Configure the needed modules in `main.tf`. If you do not want to uses resource from a module, simply set `resource_count` to 0. Note that this field refers to a one "type" of the tests, meaning that one resources can have a few variations (set up dependencies and filled optional fields).
- Run `terraform init -upgrade` to enable the modules.
- Run regular Terraform commands, like `terraform apply`.
- Do not forget to remove the resources with `terraform destroy`.
- To speed up the commands, you can use `-refresh=false` and `-parallelism=N` (default is 10).

## State size

After running the `terraform` commands, the state file should be saved at `terraform.tfstate`. This file can be analyzed in terms of file size.

Run the following command to capture state size:

```bash
ls -lh terraform.tfstate
```

To check potential size reduction with removed parameters, first remove parameters from the state (with using [jq](https://github.com/jqlang/jq)):

```bash
jq 'del(.resources[].instances[].attributes.parameters)' terraform.tfstate > terraform_without_parameters.tfstate
```

And capture the size of the new state.

```bash
ls -lh terraform_without_parameters.tfstate
```
27 changes: 27 additions & 0 deletions pkg/manual_tests/benchmarks/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
module "schema" {
source = "./schema"
resource_count = 1
}

module "warehouse" {
source = "./warehouse"
resource_count = 0
}

module "task" {
source = "./task"
resource_count = 0
}

provider "snowflake" {
profile = "secondary_test_account"
}

terraform {
required_providers {
snowflake = {
source = "Snowflake-Labs/snowflake"
version = "= 1.0.1"
}
}
}
58 changes: 58 additions & 0 deletions pkg/manual_tests/benchmarks/schema/schema.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# Test setup.
variable "resource_count" {
type = number
}

terraform {
required_providers {
snowflake = {
source = "Snowflake-Labs/snowflake"
version = "= 1.0.1"
}
}
}

locals {
id_number_list = {
for index, val in range(0, var.resource_count) :
val => tostring(val)
}
}

resource "snowflake_database" "database" {
count = var.resource_count > 0 ? 1 : 0
name = "PERFORMANCE_TESTS"
}

# basic resource
resource "snowflake_schema" "all_schemas" {
database = snowflake_database.database[0].name
for_each = local.id_number_list
name = format("perf_basic_%v", each.key)
}

# resource with all fields set (without dependencies)
resource "snowflake_schema" "schema" {
database = snowflake_database.database[0].name
for_each = local.id_number_list
name = format("perf_complete_%v", each.key)

with_managed_access = true
is_transient = true
comment = "my schema"
data_retention_time_in_days = 1
max_data_extension_time_in_days = 20
replace_invalid_characters = false
default_ddl_collation = "en_US"
storage_serialization_policy = "COMPATIBLE"
log_level = "INFO"
trace_level = "ALWAYS"
suspend_task_after_num_failures = 10
task_auto_retry_attempts = 10
user_task_managed_initial_warehouse_size = "LARGE"
user_task_timeout_ms = 3600000
user_task_minimum_trigger_interval_in_seconds = 120
quoted_identifiers_ignore_case = false
enable_console_output = false
pipe_execution_paused = false
}
122 changes: 122 additions & 0 deletions pkg/manual_tests/benchmarks/task/task.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
# Test setup.
variable "resource_count" {
type = number
}

terraform {
required_providers {
snowflake = {
source = "Snowflake-Labs/snowflake"
version = "= 1.0.1"
}
}
}

locals {
id_number_list = {
for index, val in range(0, var.resource_count) :
val => tostring(val)
}
}

resource "snowflake_database" "database" {
count = var.resource_count > 0 ? 1 : 0
name = "PERFORMANCE_TESTS"
}

resource "snowflake_schema" "schema" {
database = snowflake_database.database.name
count = var.resource_count > 0 ? 1 : 0
name = "PERFORMANCE_TESTS"
}

# Basic standalone task
resource "snowflake_task" "task" {
database = snowflake_database.database[0].name
schema = snowflake_schema.schema[0].name
for_each = local.id_number_list
name = format("perf_basic_%v", each.key)
warehouse = "SNOWFLAKE"
started = true
schedule {
minutes = 5
}
sql_statement = "select 1"
}

# Complete standalone task
resource "snowflake_task" "test" {
database = snowflake_database.database[0].name
schema = snowflake_schema.schema[0].name
for_each = local.id_number_list
name = format("perf_complete_%v", each.key)
warehouse = "SNOWFLAKE"
started = true
sql_statement = "select 1"

config = "{\"key\":\"value\"}"
allow_overlapping_execution = true
comment = "complete task"

schedule {
minutes = 10
}

# Session Parameters
suspend_task_after_num_failures = 10
task_auto_retry_attempts = 0
user_task_minimum_trigger_interval_in_seconds = 30
user_task_timeout_ms = 3600000
abort_detached_query = false
autocommit = true
binary_input_format = "HEX"
binary_output_format = "HEX"
client_memory_limit = 1536
client_metadata_request_use_connection_ctx = false
client_prefetch_threads = 4
client_result_chunk_size = 160
client_result_column_case_insensitive = false
client_session_keep_alive = false
client_session_keep_alive_heartbeat_frequency = 3600
client_timestamp_type_mapping = "TIMESTAMP_LTZ"
date_input_format = "AUTO"
date_output_format = "YYYY-MM-DD"
enable_unload_physical_type_optimization = true
error_on_nondeterministic_merge = true
error_on_nondeterministic_update = false
geography_output_format = "GeoJSON"
geometry_output_format = "GeoJSON"
jdbc_use_session_timezone = true
json_indent = 2
lock_timeout = 43200
log_level = "OFF"
multi_statement_count = 1
noorder_sequence_as_default = true
odbc_treat_decimal_as_int = false
query_tag = ""
quoted_identifiers_ignore_case = false
rows_per_resultset = 0
s3_stage_vpce_dns_name = ""
search_path = "$current, $public"
statement_queued_timeout_in_seconds = 0
statement_timeout_in_seconds = 172800
strict_json_output = false
timestamp_day_is_always_24h = false
timestamp_input_format = "AUTO"
timestamp_ltz_output_format = ""
timestamp_ntz_output_format = "YYYY-MM-DD HH24:MI:SS.FF3"
timestamp_output_format = "YYYY-MM-DD HH24:MI:SS.FF3 TZHTZM"
timestamp_type_mapping = "TIMESTAMP_NTZ"
timestamp_tz_output_format = ""
timezone = "America/Los_Angeles"
time_input_format = "AUTO"
time_output_format = "HH24:MI:SS"
trace_level = "OFF"
transaction_abort_on_error = false
transaction_default_isolation_level = "READ COMMITTED"
two_digit_century_start = 1970
unsupported_ddl_action = "ignore"
use_cached_result = true
week_of_year_policy = 0
week_start = 0
}
52 changes: 52 additions & 0 deletions pkg/manual_tests/benchmarks/warehouse/warehouse.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
# Test setup.
variable "resource_count" {
type = number
}

terraform {
required_providers {
snowflake = {
source = "Snowflake-Labs/snowflake"
version = "= 1.0.1"
}
}
}

locals {
id_number_list = {
for index, val in range(0, var.resource_count) :
val => tostring(val)
}
}

resource "snowflake_resource_monitor" "monitor" {
count = var.resource_count > 0 ? 1 : 0
name = "perf_resource_monitor"
}

# Resource with required fields
resource "snowflake_warehouse" "basic" {
for_each = local.id_number_list
name = format("perf_basic_%v", each.key)
}

# Resource with all fields
resource "snowflake_warehouse" "complete" {
for_each = local.id_number_list
name = format("perf_complete_%v", each.key)
warehouse_type = "SNOWPARK-OPTIMIZED"
warehouse_size = "MEDIUM"
max_cluster_count = 4
min_cluster_count = 2
scaling_policy = "ECONOMY"
auto_suspend = 1200
auto_resume = false
initially_suspended = false
resource_monitor = snowflake_resource_monitor.monitor[0].fully_qualified_name
comment = "An example warehouse."
enable_query_acceleration = true
query_acceleration_max_scale_factor = 4
max_concurrency_level = 4
statement_queued_timeout_in_seconds = 5
statement_timeout_in_seconds = 86400
}

0 comments on commit 12dbc35

Please sign in to comment.