diff --git a/.circleci/config.yml b/.circleci/config.yml index fafed610..c64411da 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -3,19 +3,17 @@ version: 2.1 orbs: go: circleci/go@1.7.0 + linter: talkiq/linter@1.4.1 jobs: - build: + test: executor: name: go/default - tag: "1.16" + tag: &go_version "1.16.10" steps: - checkout - go/mod-download-cached - - run: - name: go vet - command: make vet - run: name: Run unit tests command: TESTARGS="-v" make test @@ -25,6 +23,9 @@ jobs: - run: name: Test Access Token Resource command: TESTARGS="-run TestAccAccessToken" make testacc + - run: + name: Test Audit Log Subscription Resource + command: TESTARGS="-run TestAccAuditLogSubscription" make testacc - run: name: Test Custom Role Resource command: TESTARGS="-run TestAccCustomRole" make testacc @@ -40,9 +41,18 @@ jobs: - run: name: Test Feature Flag Environment Resource command: TESTARGS="-run TestAccFeatureFlagEnvironment" make testacc + - run: + name: Test Flag Trigger Resource + command: TESTARGS="-run TestAccFlagTrigger" make testacc + - run: + name: Test Metric Resource + command: TESTARGS="-run TestAccMetric" make testacc - run: name: Test Project Resource command: TESTARGS="-run TestAccProject" make testacc + - run: + name: Test Relay Proxy Configuration Resource + command: TESTARGS="-run TestAccRelayProxy" make testacc - run: name: Test Segment Resource command: TESTARGS="-run TestAccSegment" make testacc @@ -57,7 +67,22 @@ jobs: name: Test Webhook Resource command: TESTARGS="-run TestAccWebhook" make testacc + lint: + executor: + name: go/default + tag: *go_version + + steps: + - checkout + - run: + name: Install python + command: | + sudo apt update + sudo apt install python3-pip python-is-python3 + - linter/pre-commit + workflows: main: jobs: - - build + - test + - lint diff --git a/.go-version b/.go-version new file mode 100644 index 00000000..d3799fb2 --- /dev/null +++ b/.go-version @@ -0,0 +1 @@ +1.16.10 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..da478a0d --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,22 @@ +minimum_pre_commit_version: "2.9.3" + +repos: + - repo: https://github.com/ashanbrown/gofmts + rev: v0.1.4 + hooks: + - id: gofmts + - repo: https://github.com/golangci/golangci-lint + rev: v1.43.0 + hooks: + - id: golangci-lint + - repo: local + hooks: + - id: generate-audit-log-subscription-configs + name: Generate Audit Log Subscription Configurations + description: This hook runs a python script to update the audit log subscription configuration validation fields. + entry: python scripts/generate_integration_audit_log_configs.py + pass_filenames: false + language: python + additional_dependencies: ['requests'] + verbose: true + diff --git a/CHANGELOG.md b/CHANGELOG.md index edf56fbe..40653d74 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,25 @@ +## [2.4.0] (January 19, 2022) + +FEATURES: + +- Added a `launchdarkly_team_members` data source to allow using multiple team members in one data source. + +- Added a new `launchdarkly_metric` resource and data source for managing LaunchDarkly experiment flag metrics. + +- Added a new `launchdarkly_flag_triggers` resource and data source for managing LaunchDarkly flag triggers. + +- Added a new `launchdarkly_relay_proxy_configuration` resource and data source for managing configurations for the Relay Proxy's [automatic configuration](https://docs.launchdarkly.com/home/relay-proxy/automatic-configuration#writing-an-inline-policy) feature. + +- Added a new `launchdarkly_audit_log_subscription` resource and data source for managing LaunchDarkly audit log integration subscriptions. + +ENHANCEMENTS: + +- Updated tests to use the constant attribute keys defined in launchdarkly/keys.go. + +- Added a pre-commit file with a hook to alphabetize launchdarkly/keys.go + +- Improved 409 and 429 retry handling. + ## [2.3.0] (January 4, 2022) FEATURES: @@ -29,6 +51,14 @@ NOTES: - The `launchdarkly_feature_flag` resource's argument `include_in_snippet` has been deprecated in favor of `client_side_availability`. Please update your config to use `client_side_availability` in order to maintain compatibility with future versions. +ENHANCEMENTS: + +- Upgraded the LaunchDarkly API client to version 7. + +======= + +> > > > > > > 8c7d0fc67b7d6038363a5a9c20ff8a5ef0f364df + ## [2.1.1] (October 11, 2021) BUG FIXES: diff --git a/GNUmakefile b/GNUmakefile index f5bb61b4..c3ff6172 100644 --- a/GNUmakefile +++ b/GNUmakefile @@ -29,6 +29,8 @@ vet: fi fmt: + go install github.com/ashanbrown/gofmts/cmd/gofmts@v0.1.4 + gofmts -w $(GOFMT_FILES) gofmt -w $(GOFMT_FILES) fmtcheck: diff --git a/docs/DEVELOPMENT.md b/docs/DEVELOPMENT.md index 97ba9d61..7d9a8f41 100644 --- a/docs/DEVELOPMENT.md +++ b/docs/DEVELOPMENT.md @@ -20,6 +20,8 @@ $ mkdir -p $HOME/development/terraform-providers/; cd $HOME/development/terrafor $ git clone git@github.com:launchdarkly/terraform-provider-launchdarkly ``` +If you are working on the `launchdarkly_audit_log_subscription` resource, you will want to ensure the configuration field mapping is up-to-date with the [most recent changes](https://github.com/launchdarkly/integration-framework/tree/master/integrations) while testing by `cd`-ing into `scripts/` and running `python generate_integration_audit_log_configs.py`. Please note you will need to have the Python `requests` package installed locally. Otherwise, this will be run as a git commit hook. Then, to update the go mapping, follow the instructions in audit_log_subscription_configs.go and commit and push your changes. + To compile the provider, run `make build`. This will build the provider and put the provider binary in the `$GOPATH/bin` directory. ```sh diff --git a/examples/v2/audit_log_subscription/.terraform.lock.hcl b/examples/v2/audit_log_subscription/.terraform.lock.hcl new file mode 100644 index 00000000..2341cc74 --- /dev/null +++ b/examples/v2/audit_log_subscription/.terraform.lock.hcl @@ -0,0 +1,23 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/launchdarkly/launchdarkly" { + version = "2.3.0" + constraints = "~> 2.0" + hashes = [ + "h1:VR+DULiV82N4UpHtJun1r3JPP46kab89W/aHo2tt5OM=", + "zh:0af02f9cc42c6282d908b31ab3aa02754c0e9a8fd757a0e5a8a61da29de4cc68", + "zh:0ba4ba6351898598784005506a86ca60a08090e7ee30d3e465f0642ddf7ad830", + "zh:1d520405a977224077b72baf3e472b5092273af87ae265658f29100e4585ecec", + "zh:5b67f5fa15dbef0aff0c03cf5e00ee260c4665b35d23956c053a3e0f5ec62814", + "zh:6bbb63dc6db8a6e9591bd4855762706e1fbdc4308ea6256c8b1f76771aec46b9", + "zh:812fccb8d45e8edb237f2d1512be790cbccbfec650d36ab5fcf287ea71065fc1", + "zh:866b1596011d51319dbb95974319cc88099e912dcb4720e32c6e8442b45e4cee", + "zh:9ef51c7ff15633608c158b86c65cb37e7fdf455de571bafeb3e3134147bf4de7", + "zh:a0ae35202f11c1dc97b6e92aa6b9921016d7ba3022caa6eeba2ed731fb00a7bd", + "zh:a2700a77e5b8116a5c31ed338929fb6e13860656fd27681ef97b8efa1de1965b", + "zh:a56f16bcfab3185582c1e014507419d30154b0be03020d62f8fe9020bc326d1b", + "zh:d067c1d12728b4facda4affd77fbd644576491df218b8fe2cfcbdf690c0ecd55", + "zh:ee4a968d8b408126e9beee0bb215097894b365cdc683bf1e8a4e3a3cfb3b52bc", + ] +} diff --git a/examples/v2/audit_log_subscription/example.tf b/examples/v2/audit_log_subscription/example.tf new file mode 100644 index 00000000..c2e34f6e --- /dev/null +++ b/examples/v2/audit_log_subscription/example.tf @@ -0,0 +1,60 @@ +terraform { + required_providers { + launchdarkly = { + source = "launchdarkly/launchdarkly" + version = "~> 2.0" + } + } + required_version = ">= 0.13" +} + +resource "launchdarkly_audit_log_subscription" "datadog_example" { + integration_key = "datadog" + name = "Example Terraform Subscription" + config = { + api_key = "thisisasecretkey" + host_url = "https://api.datadoghq.com" + } + on = false + tags = ["terraform-managed"] + statements { + actions = ["*"] + effect = "deny" + resources = ["proj/*:env/*:flag/*"] + } +} + +resource "launchdarkly_audit_log_subscription" "dynatrace_example" { + integration_key = "dynatrace" + name = "Example Terraform Subscription" + config = { + api_token = "verysecrettoken" + url = "https://launchdarkly.appdynamics.com" + entity = "APPLICATION_METHOD" + } + tags = ["terraform-managed"] + on = true + statements { + actions = ["*"] + effect = "deny" + resources = ["proj/*:env/test:flag/*"] + } +} + +resource "launchdarkly_audit_log_subscription" "splunk_example" { + integration_key = "splunk" + name = "Example Terraform Subscription" + config = { + base_url = "https://launchdarkly.splunk.com" + token = "averysecrettoken" + skip_ca_verification = true + } + tags = ["terraform-managed"] + on = true + statements { + actions = ["*"] + effect = "allow" + resources = ["proj/*:env/production:flag/*"] + } +} + diff --git a/examples/v2/flag_trigger/example.tf b/examples/v2/flag_trigger/example.tf new file mode 100644 index 00000000..f106ba6e --- /dev/null +++ b/examples/v2/flag_trigger/example.tf @@ -0,0 +1,39 @@ +terraform { + required_providers { + launchdarkly = { + source = "launchdarkly/launchdarkly" + version = "~> 2.0" + } + } + required_version = ">= 0.13" +} + +resource "launchdarkly_project" "trigger_test" { + key = "trigger-test" + name = "A Trigger Test Project" + # configure a production environment + environments { + name = "Terraform Production Environment" + key = "production" + color = "581845" + } +} + +resource "launchdarkly_feature_flag" "trigger_test_flag" { + project_key = launchdarkly_project.trigger_test.key + key = "trigger-test-flag" + name = "Trigger Test Flag" + + variation_type = "boolean" +} + +resource "launchdarkly_flag_trigger" "test_trigger" { + project_key = launchdarkly_project.trigger_test.key + env_key = launchdarkly_project.trigger_test.environments.0.key + flag_key = launchdarkly_feature_flag.trigger_test_flag.key + integration_key = "generic-trigger" + instructions { + kind = "turnFlagOff" + } + enabled = false +} diff --git a/examples/v2/metric/example.tf b/examples/v2/metric/example.tf new file mode 100644 index 00000000..1b2c63b8 --- /dev/null +++ b/examples/v2/metric/example.tf @@ -0,0 +1,72 @@ +resource "launchdarkly_project" "example" { + key = "example-project" + name = "metrics example project" + environments { + name = "example environment" + key = "example-env" + color = "010101" + } +} + +resource "launchdarkly_metric" "pageview_example" { + project_key = launchdarkly_project.example.key + key = "pageview-metric" + name = "Pageview Metric" + description = "example pageview metric" + kind = "pageview" + is_active = false + tags = [ + "example", + ] + urls { + kind = "substring" + substring = "foo" + } + urls { + kind = "regex" + pattern = "`foo`gm" + } +} + +resource "launchdarkly_metric" "click_example" { + project_key = launchdarkly_project.example.key + key = "click-metric" + name = "click Metric" + description = "example click metric" + kind = "click" + selector = ".foo" + tags = [ + "example", + ] + urls { + kind = "exact" + url = "https://example.com/example/" + } +} + +resource "launchdarkly_metric" "custom_example" { + project_key = launchdarkly_project.example.key + key = "custom-metric" + name = "custom Metric" + description = "example custom metric" + kind = "custom" + event_key = "foo" + tags = [ + "example", + ] +} + +resource "launchdarkly_metric" "numeric_example" { + project_key = launchdarkly_project.example.key + key = "numeric-metric" + name = "numeric Metric" + description = "example numeric metric" + kind = "custom" + is_numeric = true + unit = "bar" + success_criteria = "HigherThanBaseline" + event_key = "foo" + tags = [ + "example", + ] +} diff --git a/examples/v2/metric/versions.tf b/examples/v2/metric/versions.tf new file mode 100644 index 00000000..2e41b808 --- /dev/null +++ b/examples/v2/metric/versions.tf @@ -0,0 +1,9 @@ +terraform { + required_providers { + launchdarkly = { + source = "launchdarkly/launchdarkly" + version = "~> 2.0.0" + } + } + required_version = ">= 0.13" +} \ No newline at end of file diff --git a/go.mod b/go.mod index 92b4b1c6..c555234f 100644 --- a/go.mod +++ b/go.mod @@ -6,17 +6,20 @@ require ( github.com/agext/levenshtein v1.2.3 // indirect github.com/fatih/color v1.13.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 github.com/hashicorp/go-hclog v1.0.0 // indirect github.com/hashicorp/go-plugin v1.4.3 // indirect + github.com/hashicorp/go-retryablehttp v0.7.0 github.com/hashicorp/hcl/v2 v2.11.1 // indirect github.com/hashicorp/terraform-plugin-sdk/v2 v2.10.0 github.com/hashicorp/terraform-registry-address v0.0.0-20210816115301-cb2034eba045 // indirect github.com/hashicorp/yamux v0.0.0-20211028200310-0bc27b27de87 // indirect - github.com/launchdarkly/api-client-go/v7 v7.0.0 + github.com/launchdarkly/api-client-go/v7 v7.1.1 github.com/mattn/go-colorable v0.1.12 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mitchellh/mapstructure v1.4.3 // indirect github.com/oklog/run v1.1.0 // indirect + github.com/stoewer/go-strcase v1.2.0 github.com/stretchr/testify v1.7.0 github.com/zclconf/go-cty v1.10.0 // indirect golang.org/x/net v0.0.0-20211208012354-db4efeb81f4b // indirect diff --git a/go.sum b/go.sum index fe8b8993..c4f80cd5 100644 --- a/go.sum +++ b/go.sum @@ -190,6 +190,7 @@ github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320/go.mod h1:EiZBM github.com/hashicorp/go-getter v1.5.3 h1:NF5+zOlQegim+w/EUhSLh6QhXHmZMEeHLQzllkQ3ROU= github.com/hashicorp/go-getter v1.5.3/go.mod h1:BrrV/1clo8cCYu6mxvboYg+KutTiFnXjMEgDD8+i7ZI= github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI= +github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= github.com/hashicorp/go-hclog v0.14.1/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-hclog v0.16.1/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-hclog v1.0.0 h1:bkKf0BeBXcSYa7f5Fyi9gMuQ8gNsxeiNpZjR6VxNZeo= @@ -201,6 +202,8 @@ github.com/hashicorp/go-plugin v1.3.0/go.mod h1:F9eH4LrE/ZsRdbwhfjs9k9HoDUwAHnYt github.com/hashicorp/go-plugin v1.4.1/go.mod h1:5fGEH17QVwTTcR0zV7yhDPLLmFX9YSZ38b18Udy6vYQ= github.com/hashicorp/go-plugin v1.4.3 h1:DXmvivbWD5qdiBts9TpBC7BYL1Aia5sxbRgQB+v6UZM= github.com/hashicorp/go-plugin v1.4.3/go.mod h1:5fGEH17QVwTTcR0zV7yhDPLLmFX9YSZ38b18Udy6vYQ= +github.com/hashicorp/go-retryablehttp v0.7.0 h1:eu1EI/mbirUgP5C8hVsTNaGZreBDlYiwC1FZWkvQPQ4= +github.com/hashicorp/go-retryablehttp v0.7.0/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= github.com/hashicorp/go-safetemp v1.0.0 h1:2HR189eFNrjHQyENnQMMpCiBAsRxzbTMIgBhEyExpmo= github.com/hashicorp/go-safetemp v1.0.0/go.mod h1:oaerMy3BhqiTbVye6QuFhFtIceqFoDHxNAB65b+Rj1I= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= @@ -270,8 +273,8 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/launchdarkly/api-client-go/v7 v7.0.0 h1:mCVGV3adts81Gtq2YxwCi6lvS/V9hYGJlqilLGFKj98= -github.com/launchdarkly/api-client-go/v7 v7.0.0/go.mod h1:5FlSAYTMrNa4UOiuSSL1+85NOiJel6cZT2P86ihNR9s= +github.com/launchdarkly/api-client-go/v7 v7.1.1 h1:3VBkFt9xHljMw5KDlVFDUogxfH78Y7GLVu8irBC8Gy8= +github.com/launchdarkly/api-client-go/v7 v7.1.1/go.mod h1:GVl1inKsWoKX3yLgdqrjxWw8k4ih0HlSmdnrhi5NNDs= github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= @@ -329,6 +332,8 @@ github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNX github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/stoewer/go-strcase v1.2.0 h1:Z2iHWqGXH00XYgqDmNgQbIBxf3wrNq0F3feEy0ainaU= +github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= diff --git a/launchdarkly/approvals_helper.go b/launchdarkly/approvals_helper.go index 42197ada..e3b8e4b1 100644 --- a/launchdarkly/approvals_helper.go +++ b/launchdarkly/approvals_helper.go @@ -28,11 +28,11 @@ func approvalSchema() *schema.Schema { Default: false, }, MIN_NUM_APPROVALS: { - Type: schema.TypeInt, - Optional: true, - Description: "The number of approvals required before an approval request can be applied.", - ValidateFunc: validation.IntBetween(1, 5), - Default: 1, + Type: schema.TypeInt, + Optional: true, + Description: "The number of approvals required before an approval request can be applied.", + ValidateDiagFunc: validation.ToDiagFunc(validation.IntBetween(1, 5)), + Default: 1, }, CAN_APPLY_DECLINED_CHANGES: { Type: schema.TypeBool, @@ -45,8 +45,10 @@ func approvalSchema() *schema.Schema { Optional: true, Description: "An array of tags used to specify which flags with those tags require approval. You may only set requiredApprovalTags or required, not both.", Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validateTags(), + Type: schema.TypeString, + // Can't use validation.ToDiagFunc converted validators on TypeList at the moment + // https://github.com/hashicorp/terraform-plugin-sdk/issues/734 + ValidateFunc: validateTagsNoDiag(), }, }, }, diff --git a/launchdarkly/audit_log_subscription_configs.go b/launchdarkly/audit_log_subscription_configs.go new file mode 100644 index 00000000..a20103e2 --- /dev/null +++ b/launchdarkly/audit_log_subscription_configs.go @@ -0,0 +1,298 @@ +package launchdarkly + +// to get the updated SUBSCRIPTION_CONFIGURATION_FIELDS value, paste the generated json in +// audit_log_subscription_configs.json into https://rodrigo-brito.github.io/json-to-go-map/ + +// TODO: generate this automatically +// func parseAuditLogSubscriptionConfigsFromJson() (map[string]IntegrationConfig, error) { +// var configs map[string]IntegrationConfig +// file, err := ioutil.ReadFile(CONFIG_FILE) +// if err != nil { +// return configs, err +// } + +// err = json.Unmarshal([]byte(file), &configs) +// if err != nil { +// return configs, err +// } +// return configs, nil +// } + +var SUBSCRIPTION_CONFIGURATION_FIELDS = map[string]interface{}{ + "appdynamics": map[string]interface{}{ + "account": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": false, + }, + "applicationID": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": false, + }, + }, + "datadog": map[string]interface{}{ + "apiKey": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": true, + }, + "hostURL": map[string]interface{}{ + "type": "enum", + "isOptional": true, + "allowedValues": []interface{}{ + "https://api.datadoghq.com", + "https://api.datadoghq.eu", + }, + "defaultValue": "https://api.datadoghq.com", + "isSecret": false, + }, + }, + "dynatrace": map[string]interface{}{ + "apiToken": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": true, + }, + "url": map[string]interface{}{ + "type": "uri", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": false, + }, + "entity": map[string]interface{}{ + "type": "enum", + "isOptional": true, + "allowedValues": []interface{}{ + "APPLICATION", + "APPLICATION_METHOD", + "APPLICATION_METHOD_GROUP", + "AUTO_SCALING_GROUP", + "AUXILIARY_SYNTHETIC_TEST", + "AWS_APPLICATION_LOAD_BALANCER", + "AWS_AVAILABILITY_ZONE", + "AWS_CREDENTIALS", + "AWS_LAMBDA_FUNCTION", + "AWS_NETWORK_LOAD_BALANCER", + "AZURE_API_MANAGEMENT_SERVICE", + "AZURE_APPLICATION_GATEWAY", + "AZURE_COSMOS_DB", + "AZURE_CREDENTIALS", + "AZURE_EVENT_HUB", + "AZURE_EVENT_HUB_NAMESPACE", + "AZURE_FUNCTION_APP", + "AZURE_IOT_HUB", + "AZURE_LOAD_BALANCER", + "AZURE_MGMT_GROUP", + "AZURE_REDIS_CACHE", + "AZURE_REGION", + "AZURE_SERVICE_BUS_NAMESPACE", + "AZURE_SERVICE_BUS_QUEUE", + "AZURE_SERVICE_BUS_TOPIC", + "AZURE_SQL_DATABASE", + "AZURE_SQL_ELASTIC_POOL", + "AZURE_SQL_SERVER", + "AZURE_STORAGE_ACCOUNT", + "AZURE_SUBSCRIPTION", + "AZURE_TENANT", + "AZURE_VM", + "AZURE_VM_SCALE_SET", + "AZURE_WEB_APP", + "CF_APPLICATION", + "CF_FOUNDATION", + "CINDER_VOLUME", + "CLOUD_APPLICATION", + "CLOUD_APPLICATION_INSTANCE", + "CLOUD_APPLICATION_NAMESPACE", + "CONTAINER_GROUP", + "CONTAINER_GROUP_INSTANCE", + "CUSTOM_APPLICATION", + "CUSTOM_DEVICE", + "CUSTOM_DEVICE_GROUP", + "DCRUM_APPLICATION", + "DCRUM_SERVICE", + "DCRUM_SERVICE_INSTANCE", + "DEVICE_APPLICATION_METHOD", + "DISK", + "DOCKER_CONTAINER_GROUP_INSTANCE", + "DYNAMO_DB_TABLE", + "EBS_VOLUME", + "EC2_INSTANCE", + "ELASTIC_LOAD_BALANCER", + "ENVIRONMENT", + "EXTERNAL_SYNTHETIC_TEST_STEP", + "GCP_ZONE", + "GEOLOCATION", + "GEOLOC_SITE", + "GOOGLE_COMPUTE_ENGINE", + "HOST", + "HOST_GROUP", + "HTTP_CHECK", + "HTTP_CHECK_STEP", + "HYPERVISOR", + "KUBERNETES_CLUSTER", + "KUBERNETES_NODE", + "MOBILE_APPLICATION", + "NETWORK_INTERFACE", + "NEUTRON_SUBNET", + "OPENSTACK_PROJECT", + "OPENSTACK_REGION", + "OPENSTACK_VM", + "OS", + "PROCESS_GROUP", + "PROCESS_GROUP_INSTANCE", + "RELATIONAL_DATABASE_SERVICE", + "SERVICE", + "SERVICE_INSTANCE", + "SERVICE_METHOD", + "SERVICE_METHOD_GROUP", + "SWIFT_CONTAINER", + "SYNTHETIC_LOCATION", + "SYNTHETIC_TEST", + "SYNTHETIC_TEST_STEP", + "VIRTUALMACHINE", + "VMWARE_DATACENTER", + }, + "defaultValue": "APPLICATION", + "isSecret": false, + }, + }, + "elastic": map[string]interface{}{ + "url": map[string]interface{}{ + "type": "uri", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": false, + }, + "token": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": true, + }, + "index": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": false, + }, + }, + "honeycomb": map[string]interface{}{ + "datasetName": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": false, + }, + "apiKey": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": true, + }, + }, + "logdna": map[string]interface{}{ + "ingestionKey": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": true, + }, + "level": map[string]interface{}{ + "type": "string", + "isOptional": true, + "allowedValues": nil, + "defaultValue": "INFO", + "isSecret": false, + }, + }, + "msteams": map[string]interface{}{ + "url": map[string]interface{}{ + "type": "uri", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": false, + }, + }, + "new-relic-apm": map[string]interface{}{ + "apiKey": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": true, + }, + "applicationId": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": false, + }, + "domain": map[string]interface{}{ + "type": "enum", + "isOptional": true, + "allowedValues": []interface{}{ + "api.newrelic.com", + "api.eu.newrelic.com", + }, + "defaultValue": "api.newrelic.com", + "isSecret": false, + }, + }, + "signalfx": map[string]interface{}{ + "accessToken": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": true, + }, + "realm": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": false, + }, + }, + "splunk": map[string]interface{}{ + "base-url": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": false, + }, + "token": map[string]interface{}{ + "type": "string", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": true, + }, + "skip-ca-verification": map[string]interface{}{ + "type": "boolean", + "isOptional": false, + "allowedValues": nil, + "defaultValue": nil, + "isSecret": false, + }, + }, +} diff --git a/launchdarkly/audit_log_subscription_configs.json b/launchdarkly/audit_log_subscription_configs.json new file mode 100644 index 00000000..c6ab2b60 --- /dev/null +++ b/launchdarkly/audit_log_subscription_configs.json @@ -0,0 +1 @@ +{"appdynamics": {"account": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": false}, "applicationID": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": false}}, "datadog": {"apiKey": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": true}, "hostURL": {"type": "enum", "isOptional": true, "allowedValues": ["https://api.datadoghq.com", "https://api.datadoghq.eu", "https://us3.datadoghq.com", "https://us5.datadoghq.com", "https://app.ddog-gov.com"], "defaultValue": "https://api.datadoghq.com", "isSecret": false}}, "dynatrace": {"apiToken": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": true}, "url": {"type": "uri", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": false}, "entity": {"type": "enum", "isOptional": true, "allowedValues": ["APPLICATION", "APPLICATION_METHOD", "APPLICATION_METHOD_GROUP", "AUTO_SCALING_GROUP", "AUXILIARY_SYNTHETIC_TEST", "AWS_APPLICATION_LOAD_BALANCER", "AWS_AVAILABILITY_ZONE", "AWS_CREDENTIALS", "AWS_LAMBDA_FUNCTION", "AWS_NETWORK_LOAD_BALANCER", "AZURE_API_MANAGEMENT_SERVICE", "AZURE_APPLICATION_GATEWAY", "AZURE_COSMOS_DB", "AZURE_CREDENTIALS", "AZURE_EVENT_HUB", "AZURE_EVENT_HUB_NAMESPACE", "AZURE_FUNCTION_APP", "AZURE_IOT_HUB", "AZURE_LOAD_BALANCER", "AZURE_MGMT_GROUP", "AZURE_REDIS_CACHE", "AZURE_REGION", "AZURE_SERVICE_BUS_NAMESPACE", "AZURE_SERVICE_BUS_QUEUE", "AZURE_SERVICE_BUS_TOPIC", "AZURE_SQL_DATABASE", "AZURE_SQL_ELASTIC_POOL", "AZURE_SQL_SERVER", "AZURE_STORAGE_ACCOUNT", "AZURE_SUBSCRIPTION", "AZURE_TENANT", "AZURE_VM", "AZURE_VM_SCALE_SET", "AZURE_WEB_APP", "CF_APPLICATION", "CF_FOUNDATION", "CINDER_VOLUME", "CLOUD_APPLICATION", "CLOUD_APPLICATION_INSTANCE", "CLOUD_APPLICATION_NAMESPACE", "CONTAINER_GROUP", "CONTAINER_GROUP_INSTANCE", "CUSTOM_APPLICATION", "CUSTOM_DEVICE", "CUSTOM_DEVICE_GROUP", "DCRUM_APPLICATION", "DCRUM_SERVICE", "DCRUM_SERVICE_INSTANCE", "DEVICE_APPLICATION_METHOD", "DISK", "DOCKER_CONTAINER_GROUP_INSTANCE", "DYNAMO_DB_TABLE", "EBS_VOLUME", "EC2_INSTANCE", "ELASTIC_LOAD_BALANCER", "ENVIRONMENT", "EXTERNAL_SYNTHETIC_TEST_STEP", "GCP_ZONE", "GEOLOCATION", "GEOLOC_SITE", "GOOGLE_COMPUTE_ENGINE", "HOST", "HOST_GROUP", "HTTP_CHECK", "HTTP_CHECK_STEP", "HYPERVISOR", "KUBERNETES_CLUSTER", "KUBERNETES_NODE", "MOBILE_APPLICATION", "NETWORK_INTERFACE", "NEUTRON_SUBNET", "OPENSTACK_PROJECT", "OPENSTACK_REGION", "OPENSTACK_VM", "OS", "PROCESS_GROUP", "PROCESS_GROUP_INSTANCE", "RELATIONAL_DATABASE_SERVICE", "SERVICE", "SERVICE_INSTANCE", "SERVICE_METHOD", "SERVICE_METHOD_GROUP", "SWIFT_CONTAINER", "SYNTHETIC_LOCATION", "SYNTHETIC_TEST", "SYNTHETIC_TEST_STEP", "VIRTUALMACHINE", "VMWARE_DATACENTER"], "defaultValue": "APPLICATION", "isSecret": false}}, "elastic": {"url": {"type": "uri", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": false}, "token": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": true}, "index": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": false}}, "honeycomb": {"datasetName": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": false}, "apiKey": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": true}}, "logdna": {"ingestionKey": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": true}, "level": {"type": "string", "isOptional": true, "allowedValues": null, "defaultValue": "INFO", "isSecret": false}}, "msteams": {"url": {"type": "uri", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": false}}, "new-relic-apm": {"apiKey": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": true}, "applicationId": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": false}, "domain": {"type": "enum", "isOptional": true, "allowedValues": ["api.newrelic.com", "api.eu.newrelic.com"], "defaultValue": "api.newrelic.com", "isSecret": false}}, "signalfx": {"accessToken": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": true}, "realm": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": false}}, "splunk": {"base-url": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": false}, "token": {"type": "string", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": true}, "skip-ca-verification": {"type": "boolean", "isOptional": false, "allowedValues": null, "defaultValue": null, "isSecret": false}}} \ No newline at end of file diff --git a/launchdarkly/audit_log_subscription_helper.go b/launchdarkly/audit_log_subscription_helper.go new file mode 100644 index 00000000..02be4802 --- /dev/null +++ b/launchdarkly/audit_log_subscription_helper.go @@ -0,0 +1,253 @@ +package launchdarkly + +import ( + "context" + "fmt" + "log" + "strconv" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + strcase "github.com/stoewer/go-strcase" +) + +var KEBAB_CASE_INTEGRATIONS = []string{"splunk"} + +type IntegrationConfig map[string]FormVariable + +type FormVariable struct { + Type string + IsOptional *bool + AllowedValues *[]string + DefaultValue *interface{} + IsSecret *bool +} + +func auditLogSubscriptionSchema(isDataSource bool) map[string]*schema.Schema { + return map[string]*schema.Schema{ + INTEGRATION_KEY: { + // validated as part of the config validation + Type: schema.TypeString, + Required: true, + // we are omitting appdynamics for now because it requires oauth + ValidateFunc: validation.StringNotInSlice([]string{"appdynamics"}, false), + ForceNew: true, + }, + NAME: { + Type: schema.TypeString, + Required: !isDataSource, + Optional: isDataSource, + }, + CONFIG: { + Type: schema.TypeMap, + Required: !isDataSource, + Optional: isDataSource, + }, + STATEMENTS: policyStatementsSchema(policyStatementSchemaOptions{required: !isDataSource}), + ON: { + Type: schema.TypeBool, + Required: !isDataSource, + Optional: isDataSource, + }, + TAGS: tagsSchema(), + } +} + +func parseAuditLogSubscriptionConfigs() map[string]IntegrationConfig { + // SUBSCRIPTION_CONFIGURATION_FIELDS can be found in audit_log_subscription_configs.go + configs := make(map[string]IntegrationConfig, len(SUBSCRIPTION_CONFIGURATION_FIELDS)) + for integrationKey, rawVariables := range SUBSCRIPTION_CONFIGURATION_FIELDS { + cfg := IntegrationConfig{} + variables := rawVariables.(map[string]interface{}) + for k, v := range variables { + variable := v.(map[string]interface{}) + formVariable := FormVariable{Type: variable["type"].(string)} + if variable["isOptional"] != nil { + isOptional := variable["isOptional"].(bool) + formVariable.IsOptional = &isOptional + } + if variable["allowedValues"] != nil { + rawValues := variable["allowedValues"].([]interface{}) + var allowedValues []string + for _, value := range rawValues { + allowedValues = append(allowedValues, value.(string)) + } + formVariable.AllowedValues = &allowedValues + } + if variable["isSecret"] != nil { + isSecret := variable["isSecret"].(bool) + formVariable.IsSecret = &isSecret + } + if variable["defaultValue"] != nil { + defaultValue := variable["defaultValue"] + formVariable.DefaultValue = &defaultValue + } + cfg[k] = formVariable + } + configs[integrationKey] = cfg + } + return configs +} + +func getConfigFieldKey(integrationKey, resourceKey string) string { + // a select number of integrations take fields in kebab case, ex. "skip-ca-verification" + // currently this only applies to splunk + for _, integration := range KEBAB_CASE_INTEGRATIONS { + if integrationKey == integration { + return strcase.KebabCase(resourceKey) + } + } + return strcase.LowerCamelCase(resourceKey) +} + +// configFromResourceData uses the configuration generated into audit_log_subscription_config.json +// to validate and generate the config the API expects +func configFromResourceData(d *schema.ResourceData) (map[string]interface{}, error) { + // TODO: refactor to return list of diags warnings with all formatting errors + integrationKey := d.Get(INTEGRATION_KEY).(string) + config := d.Get(CONFIG).(map[string]interface{}) + configMap := parseAuditLogSubscriptionConfigs() + configFormat, ok := configMap[integrationKey] + if !ok { + return config, fmt.Errorf("%s is not a valid integration_key for audit log subscriptions", integrationKey) + } + for k := range config { + // error if an incorrect config variable has been set + key := getConfigFieldKey(integrationKey, k) // convert casing to compare to required config format + if integrationKey == "datadog" && key == "hostUrl" { + // this is a one-off for now + key = "hostURL" + } + if _, ok := configFormat[key]; !ok { + return config, fmt.Errorf("config variable %s not valid for integration type %s", k, integrationKey) + } + } + convertedConfig := make(map[string]interface{}, len(config)) + for k, v := range configFormat { + key := strcase.SnakeCase(k) // convert to snake case to validate user config + rawValue, ok := config[key] + if !ok { + if !*v.IsOptional { + return config, fmt.Errorf("config variable %s must be set", key) + } + // we will let the API handle default configs for now since it otherwise messes + // up the plan if we set an attribute a user has not set on a non-computed attribute + continue + } + // type will be one of ["string", "boolean", "uri", "enum", "oauth", "dynamicEnum"] + // for now we do not need to handle oauth or dynamicEnum + switch v.Type { + case "string", "uri": + // we'll let the API handle the URI validation for now + value := rawValue.(string) + convertedConfig[k] = value + case "boolean": + value, err := strconv.ParseBool(rawValue.(string)) // map values may only be one type, so all non-string types have to be converted + if err != nil { + return config, fmt.Errorf("config value %s for %v must be of type bool", rawValue, k) + } + convertedConfig[k] = value + case "enum": + value := rawValue.(string) + if !stringInSlice(value, *v.AllowedValues) { + return config, fmt.Errorf("config value %s for %v must be one of the following approved string values: %v", rawValue, k, *v.AllowedValues) + } + convertedConfig[k] = value + default: + // just set to the existing value + convertedConfig[k] = rawValue + } + } + return convertedConfig, nil +} + +func configToResourceData(d *schema.ResourceData, config map[string]interface{}) (map[string]interface{}, error) { + integrationKey := d.Get(INTEGRATION_KEY).(string) + configMap := parseAuditLogSubscriptionConfigs() + configFormat, ok := configMap[integrationKey] + if !ok { + return config, fmt.Errorf("%s is not a currently supported integration_key for audit log subscriptions", integrationKey) + } + originalConfig := d.Get(CONFIG).(map[string]interface{}) + convertedConfig := make(map[string]interface{}, len(config)) + for k, v := range config { + key := strcase.SnakeCase(k) + // some attributes have defaults that the API will return and terraform will complain since config + // is not a computed attribute (cannot be both required & computed) + // TODO: handle this in a SuppressDiff function + if _, setByUser := originalConfig[key]; !setByUser { + continue + } + convertedConfig[key] = v + if value, isBool := v.(bool); isBool { + convertedConfig[key] = strconv.FormatBool(value) + } + if *configFormat[k].IsSecret { + // if the user didn't put it in as obfuscated, we don't want to set it as obfuscated + convertedConfig[key] = originalConfig[key] + } + } + return convertedConfig, nil +} + +func auditLogSubscriptionRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}, isDataSource bool) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) + var id string + if isDataSource { + id = d.Get(ID).(string) + } else { + id = d.Id() + } + integrationKey := d.Get(INTEGRATION_KEY).(string) + + sub, res, err := client.ld.IntegrationAuditLogSubscriptionsApi.GetSubscriptionByID(client.ctx, integrationKey, id).Execute() + + if isStatusNotFound(res) && !isDataSource { + log.Printf("[WARN] failed to find integration with ID %q, removing from state if present", id) + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("[WARN] failed to find integration with ID %q, removing from state if present", id), + }) + d.SetId("") + return diags + } + if err != nil { + return diag.Errorf("failed to get integration with ID %q: %v", id, err) + } + + if isDataSource { + d.SetId(*sub.Id) + } + + _ = d.Set(INTEGRATION_KEY, sub.Kind) + _ = d.Set(NAME, sub.Name) + _ = d.Set(ON, sub.On) + cfg, err := configToResourceData(d, *sub.Config) + if err != nil { + return diag.Errorf("failed to set config on integration with id %q: %v", *sub.Id, err) + } + err = d.Set(CONFIG, cfg) + if err != nil { + return diag.Errorf("failed to set config on integration with id %q: %v", *sub.Id, err) + } + err = d.Set(STATEMENTS, policyStatementsToResourceData(*sub.Statements)) + if err != nil { + return diag.Errorf("failed to set statements on integration with id %q: %v", *sub.Id, err) + } + err = d.Set(TAGS, sub.Tags) + if err != nil { + return diag.Errorf("failed to set tags on integration with id %q: %v", *sub.Id, err) + } + return diags +} + +func stringInSlice(a string, list []string) bool { + for _, b := range list { + if b == a { + return true + } + } + return false +} diff --git a/launchdarkly/clause_helper.go b/launchdarkly/clause_helper.go index a128d2b3..168a538f 100644 --- a/launchdarkly/clause_helper.go +++ b/launchdarkly/clause_helper.go @@ -29,10 +29,10 @@ func clauseSchema() *schema.Schema { Description: "The user attribute to operate on", }, OP: { - Type: schema.TypeString, - Required: true, - Description: "The operator associated with the rule clause. Available options are in, endsWith, startsWith, matches, contains, lessThan, lessThanOrEqual, greaterThanOrEqual, before, after, segmentMatch, semVerEqual, semVerLessThan, and semVerGreaterThan", - ValidateFunc: validateOp(), + Type: schema.TypeString, + Required: true, + Description: "The operator associated with the rule clause. Available options are in, endsWith, startsWith, matches, contains, lessThan, lessThanOrEqual, greaterThanOrEqual, before, after, segmentMatch, semVerEqual, semVerLessThan, and semVerGreaterThan", + ValidateDiagFunc: validateOp(), }, VALUES: { Type: schema.TypeList, @@ -47,14 +47,14 @@ func clauseSchema() *schema.Schema { Default: STRING_CLAUSE_VALUE, Optional: true, Description: "The type for each of the clause's values. Available types are boolean, string, and number. If omitted, value_type defaults to string", - ValidateFunc: validation.StringInSlice( + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice( []string{ BOOL_CLAUSE_VALUE, STRING_CLAUSE_VALUE, NUMBER_CLAUSE_VALUE, }, false, - ), + )), }, NEGATE: { Type: schema.TypeBool, diff --git a/launchdarkly/config.go b/launchdarkly/config.go index a69c0f9b..8b6ac883 100644 --- a/launchdarkly/config.go +++ b/launchdarkly/config.go @@ -4,17 +4,24 @@ import ( "context" "errors" "fmt" + "log" + "math" "net/http" + "strconv" "time" + retryablehttp "github.com/hashicorp/go-retryablehttp" ldapi "github.com/launchdarkly/api-client-go/v7" ) -// The version string gets updated at build time using -ldflags +//nolint:staticcheck // The version string gets updated at build time using -ldflags var version = "unreleased" const ( - APIVersion = "20191212" + APIVersion = "20191212" + MAX_RETRIES = 8 + RETRY_WAIT_MIN = 200 * time.Millisecond + RETRY_WAIT_MAX = 2000 * time.Millisecond ) // Client is used by the provider to access the ld API. @@ -35,6 +42,7 @@ func newClient(token string, apiHost string, oauth bool) (*Client, error) { cfg.Host = apiHost cfg.DefaultHeader = make(map[string]string) cfg.UserAgent = fmt.Sprintf("launchdarkly-terraform-provider/%s", version) + cfg.HTTPClient = newRetryableClient() cfg.AddDefaultHeader("LD-API-Version", APIVersion) @@ -47,15 +55,61 @@ func newClient(token string, apiHost string, oauth bool) (*Client, error) { } // TODO: remove this once we get the go client reset endpoint fixed - fallbackClient := http.Client{ - Timeout: time.Duration(5 * time.Second), - } + fallbackClient := newRetryableClient() + fallbackClient.Timeout = time.Duration(5 * time.Second) return &Client{ apiKey: token, apiHost: apiHost, ld: ldapi.NewAPIClient(cfg), ctx: ctx, - fallbackClient: &fallbackClient, + fallbackClient: fallbackClient, }, nil } + +func newRetryableClient() *http.Client { + retryClient := retryablehttp.NewClient() + retryClient.RetryWaitMin = RETRY_WAIT_MIN + retryClient.RetryWaitMax = RETRY_WAIT_MAX + retryClient.Backoff = backOff + retryClient.CheckRetry = retryPolicy + retryClient.RetryMax = MAX_RETRIES + retryClient.ErrorHandler = retryablehttp.PassthroughErrorHandler + + return retryClient.StandardClient() +} + +func backOff(min, max time.Duration, attemptNum int, resp *http.Response) time.Duration { + if resp != nil && resp.StatusCode == http.StatusTooManyRequests { + sleepStr := resp.Header.Get("X-RateLimit-Reset") + if sleep, err := strconv.ParseInt(sleepStr, 10, 64); err == nil { + resetTime := time.Unix(0, sleep*int64(time.Millisecond)) + sleepDuration := time.Until(resetTime) + + // We have observed situations where LD-s retry header results in a negative sleep duration. In this case, + // multiply the duration by -1 and add jitter + if sleepDuration <= 0 { + log.Printf("[DEBUG] received a negative rate limit retry duration of %s.", sleepDuration) + sleepDuration = -1 * sleepDuration + } + + return sleepDuration + getRandomSleepDuration(sleepDuration) + } + } + + backoffTime := math.Pow(2, float64(attemptNum)) * float64(min) + sleep := time.Duration(backoffTime) + if float64(sleep) != backoffTime || sleep > max { + sleep = max + } + return sleep +} + +func retryPolicy(ctx context.Context, resp *http.Response, err error) (bool, error) { + retry, retryErr := retryablehttp.DefaultRetryPolicy(ctx, resp, err) + if !retry && retryErr == nil && err == nil && resp.StatusCode == http.StatusConflict { + return true, nil + } + + return retry, retryErr +} diff --git a/launchdarkly/config_test.go b/launchdarkly/config_test.go new file mode 100644 index 00000000..a6d75962 --- /dev/null +++ b/launchdarkly/config_test.go @@ -0,0 +1,211 @@ +package launchdarkly + +import ( + "net/http" + "net/http/httptest" + "strconv" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestHandleRateLimits(t *testing.T) { + t.Run("no retries needed", func(t *testing.T) { + t.Parallel() + calls := 0 + + // create a test server + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + calls++ + w.WriteHeader(http.StatusOK) + })) + defer ts.Close() + + // create a client + client, err := newClient("token", ts.URL, false) + require.NoError(t, err) + + res, err := client.ld.GetConfig().HTTPClient.Get(ts.URL) + require.NoError(t, err) + assert.Equal(t, res.StatusCode, http.StatusOK) + assert.Equal(t, calls, 1) + }) + + t.Run("max retries exceeded", func(t *testing.T) { + t.Parallel() + calls := 0 + + // create a test server + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + calls++ + w.Header().Add("X-RateLimit-Reset", strconv.FormatInt(time.Now().Add(100*time.Millisecond).UnixNano()/int64(time.Millisecond), 10)) + w.WriteHeader(http.StatusTooManyRequests) + })) + defer ts.Close() + + // create a client + client, err := newClient("token", ts.URL, false) + require.NoError(t, err) + + res, err := client.ld.GetConfig().HTTPClient.Get(ts.URL) + require.NoError(t, err) + assert.Equal(t, res.StatusCode, http.StatusTooManyRequests) + assert.Equal(t, calls, MAX_RETRIES+1) + }) + + t.Run("retry resolved with header", func(t *testing.T) { + t.Parallel() + calls := 0 + + // create a test server + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + calls++ + + if calls == 3 { + w.WriteHeader(http.StatusOK) + return + } + + w.Header().Add("X-RateLimit-Reset", strconv.FormatInt(time.Now().Add(100*time.Millisecond).UnixNano()/int64(time.Millisecond), 10)) + w.WriteHeader(http.StatusTooManyRequests) + })) + defer ts.Close() + + // create a client + client, err := newClient("token", ts.URL, false) + require.NoError(t, err) + + res, err := client.ld.GetConfig().HTTPClient.Get(ts.URL) + require.NoError(t, err) + assert.Equal(t, res.StatusCode, http.StatusOK) + assert.Equal(t, 3, calls) + }) + + t.Run("retry resolved with negative header", func(t *testing.T) { + t.Parallel() + calls := 0 + + // create a test server + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + calls++ + + if calls == 3 { + w.WriteHeader(http.StatusOK) + return + } + + w.Header().Add("X-RateLimit-Reset", strconv.FormatInt(time.Now().Add(-100*time.Millisecond).UnixNano()/int64(time.Millisecond), 10)) + w.WriteHeader(http.StatusTooManyRequests) + })) + defer ts.Close() + + // create a client + client, err := newClient("token", ts.URL, false) + require.NoError(t, err) + + res, err := client.ld.GetConfig().HTTPClient.Get(ts.URL) + require.NoError(t, err) + assert.Equal(t, res.StatusCode, http.StatusOK) + assert.Equal(t, 3, calls) + }) + + t.Run("retry resolved without header", func(t *testing.T) { + t.Parallel() + calls := 0 + + // create a test server + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + calls++ + + if calls == 3 { + w.WriteHeader(http.StatusOK) + return + } + + w.WriteHeader(http.StatusTooManyRequests) + })) + defer ts.Close() + + // create a client + client, err := newClient("token", ts.URL, false) + require.NoError(t, err) + + res, err := client.ld.GetConfig().HTTPClient.Get(ts.URL) + require.NoError(t, err) + assert.Equal(t, res.StatusCode, http.StatusOK) + assert.Equal(t, 3, calls) + }) +} + +func TestHandleConflicts(t *testing.T) { + t.Run("no retries needed", func(t *testing.T) { + t.Parallel() + calls := 0 + + // create a test server + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + calls++ + w.WriteHeader(http.StatusOK) + })) + defer ts.Close() + + // create a client + client, err := newClient("token", ts.URL, false) + require.NoError(t, err) + + res, err := client.ld.GetConfig().HTTPClient.Get(ts.URL) + require.NoError(t, err) + assert.Equal(t, res.StatusCode, http.StatusOK) + assert.Equal(t, calls, 1) + }) + + t.Run("max retries exceeded", func(t *testing.T) { + t.Parallel() + calls := 0 + + // create a test server + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + calls++ + w.WriteHeader(http.StatusConflict) + })) + defer ts.Close() + + // create a client + client, err := newClient("token", ts.URL, false) + require.NoError(t, err) + + res, err := client.ld.GetConfig().HTTPClient.Get(ts.URL) + require.NoError(t, err) + assert.Equal(t, res.StatusCode, http.StatusConflict) + assert.Equal(t, calls, MAX_RETRIES+1) + }) + + t.Run("conflict resolved", func(t *testing.T) { + t.Parallel() + calls := 0 + + // create a test server + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + calls++ + + if calls == 3 { + w.WriteHeader(http.StatusOK) + return + } + + w.WriteHeader(http.StatusConflict) + })) + defer ts.Close() + + // create a client + client, err := newClient("token", ts.URL, false) + require.NoError(t, err) + + res, err := client.ld.GetConfig().HTTPClient.Get(ts.URL) + require.NoError(t, err) + assert.Equal(t, res.StatusCode, http.StatusOK) + assert.Equal(t, 3, calls) + }) +} diff --git a/launchdarkly/custom_properties_helper.go b/launchdarkly/custom_properties_helper.go index 1abbc32f..75bc4c2d 100644 --- a/launchdarkly/custom_properties_helper.go +++ b/launchdarkly/custom_properties_helper.go @@ -22,21 +22,23 @@ func customPropertiesSchema() *schema.Schema { Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ KEY: { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringLenBetween(1, CUSTOM_PROPERTY_CHAR_LIMIT), + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: validation.ToDiagFunc(validation.StringLenBetween(1, CUSTOM_PROPERTY_CHAR_LIMIT)), }, NAME: { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringLenBetween(1, CUSTOM_PROPERTY_CHAR_LIMIT), + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: validation.ToDiagFunc(validation.StringLenBetween(1, CUSTOM_PROPERTY_CHAR_LIMIT)), }, VALUE: { Type: schema.TypeList, Required: true, MaxItems: CUSTOM_PROPERTY_ITEM_LIMIT, Elem: &schema.Schema{ - Type: schema.TypeString, + Type: schema.TypeString, + // Can't use validation.ToDiagFunc converted validators on TypeList at the moment + // https://github.com/hashicorp/terraform-plugin-sdk/issues/734 ValidateFunc: validation.StringLenBetween(1, CUSTOM_PROPERTY_CHAR_LIMIT), }, }, diff --git a/launchdarkly/data_source_launchdarkly_audit_log_subscription.go b/launchdarkly/data_source_launchdarkly_audit_log_subscription.go new file mode 100644 index 00000000..be4e7bfa --- /dev/null +++ b/launchdarkly/data_source_launchdarkly_audit_log_subscription.go @@ -0,0 +1,25 @@ +package launchdarkly + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func dataSourceAuditLogSubscription() *schema.Resource { + schemaMap := auditLogSubscriptionSchema(true) + schemaMap[ID] = &schema.Schema{ + Type: schema.TypeString, + Required: true, + Description: "The audit log subscription ID", + } + return &schema.Resource{ + ReadContext: dataSourceAuditLogSubscriptionRead, + Schema: schemaMap, + } +} + +func dataSourceAuditLogSubscriptionRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + return auditLogSubscriptionRead(ctx, d, metaRaw, true) +} diff --git a/launchdarkly/data_source_launchdarkly_audit_log_subscription_test.go b/launchdarkly/data_source_launchdarkly_audit_log_subscription_test.go new file mode 100644 index 00000000..01dc454a --- /dev/null +++ b/launchdarkly/data_source_launchdarkly_audit_log_subscription_test.go @@ -0,0 +1,118 @@ +package launchdarkly + +import ( + "fmt" + "os" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + ldapi "github.com/launchdarkly/api-client-go/v7" + "github.com/stretchr/testify/require" +) + +const ( + testAccDataSourceAuditLogSubscriptionBasic = ` +data "launchdarkly_audit_log_subscription" "test" { + id = "%s" + integration_key = "%s" +} +` + + testAccDataSourceAuditLogSubscriptionExists = ` +data "launchdarkly_audit_log_subscription" "test" { + id = "%s" + integration_key = "%s" + } + ` +) + +func testAccDataSourceAuditLogSubscriptionCreate(client *Client, integrationKey string, subscriptionBody ldapi.SubscriptionPost) (*ldapi.Integration, error) { + statementResources := []string{"proj/*"} + statementActions := []string{"*"} + statements := []ldapi.StatementPost{{ + Effect: "allow", + Resources: &statementResources, + Actions: &statementActions, + }} + subscriptionBody.Statements = &statements + + sub, _, err := client.ld.IntegrationAuditLogSubscriptionsApi.CreateSubscription(client.ctx, integrationKey).SubscriptionPost(subscriptionBody).Execute() + if err != nil { + return nil, fmt.Errorf("failed to create integration subscription for test: %v", handleLdapiErr(err)) + } + return &sub, nil +} + +func testAccDataSourceAuditLogSubscriptionDelete(client *Client, integrationKey, id string) error { + _, err := client.ld.IntegrationAuditLogSubscriptionsApi.DeleteSubscription(client.ctx, integrationKey, id).Execute() + + if err != nil { + return fmt.Errorf("failed to delete integration with ID %q: %s", id, handleLdapiErr(err)) + } + return nil +} + +func TestAccDataSourceAuditLogSubscription_noMatchReturnsError(t *testing.T) { + accTest := os.Getenv("TF_ACC") + if accTest == "" { + t.SkipNow() + } + id := "fake-id" + integrationKey := "msteams" + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccDataSourceAuditLogSubscriptionBasic, id, integrationKey), + ExpectError: regexp.MustCompile(`Error: failed to get integration with ID "fake-id": 404 Not Found`), + }, + }, + }) +} + +func TestAccDataSourceAuditLogSubscription_exists(t *testing.T) { + accTest := os.Getenv("TF_ACC") + if accTest == "" { + t.SkipNow() + } + + integrationKey := "datadog" + client, err := newClient(os.Getenv(LAUNCHDARKLY_ACCESS_TOKEN), os.Getenv(LAUNCHDARKLY_API_HOST), false) + require.NoError(t, err) + + subscriptionBody := ldapi.SubscriptionPost{ + Name: "test subscription", + Config: map[string]interface{}{ + "apiKey": "thisisasecretkey", + "hostURL": "https://api.datadoghq.com", + }, + } + sub, err := testAccDataSourceAuditLogSubscriptionCreate(client, integrationKey, subscriptionBody) + require.NoError(t, err) + + defer func() { + err := testAccDataSourceAuditLogSubscriptionDelete(client, integrationKey, *sub.Id) + require.NoError(t, err) + }() + + resourceName := "data.launchdarkly_audit_log_subscription.test" + resource.Test(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccDataSourceAuditLogSubscriptionExists, *sub.Id, integrationKey), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, "id"), + resource.TestCheckResourceAttr(resourceName, "id", *sub.Id), + ), + }, + }, + }) +} diff --git a/launchdarkly/data_source_launchdarkly_environment.go b/launchdarkly/data_source_launchdarkly_environment.go index ae9d88bd..18ade538 100644 --- a/launchdarkly/data_source_launchdarkly_environment.go +++ b/launchdarkly/data_source_launchdarkly_environment.go @@ -1,20 +1,25 @@ package launchdarkly -import "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +import ( + "context" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) func dataSourceEnvironment() *schema.Resource { envSchema := dataSourceEnvironmentSchema(false) envSchema[PROJECT_KEY] = &schema.Schema{ - Type: schema.TypeString, - Required: true, - ValidateFunc: validateKey(), + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: validateKey(), } return &schema.Resource{ - Read: dataSourceEnvironmentRead, - Schema: envSchema, + ReadContext: dataSourceEnvironmentRead, + Schema: envSchema, } } -func dataSourceEnvironmentRead(d *schema.ResourceData, meta interface{}) error { - return environmentRead(d, meta, true) +func dataSourceEnvironmentRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + return environmentRead(ctx, d, meta, true) } diff --git a/launchdarkly/data_source_launchdarkly_environment_test.go b/launchdarkly/data_source_launchdarkly_environment_test.go index ece7017f..3b36dc6d 100644 --- a/launchdarkly/data_source_launchdarkly_environment_test.go +++ b/launchdarkly/data_source_launchdarkly_environment_test.go @@ -118,16 +118,16 @@ func TestAccDataSourceEnv_exists(t *testing.T) { { Config: fmt.Sprintf(testAccDataSourceEnvironment, envKey, projectKey), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttrSet(resourceName, "key"), - resource.TestCheckResourceAttrSet(resourceName, "name"), - resource.TestCheckResourceAttrSet(resourceName, "color"), - resource.TestCheckResourceAttr(resourceName, "key", env.Key), - resource.TestCheckResourceAttr(resourceName, "name", env.Name), - resource.TestCheckResourceAttr(resourceName, "color", env.Color), + resource.TestCheckResourceAttrSet(resourceName, KEY), + resource.TestCheckResourceAttrSet(resourceName, NAME), + resource.TestCheckResourceAttrSet(resourceName, COLOR), + resource.TestCheckResourceAttr(resourceName, KEY, env.Key), + resource.TestCheckResourceAttr(resourceName, NAME, env.Name), + resource.TestCheckResourceAttr(resourceName, COLOR, env.Color), resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), - resource.TestCheckResourceAttr(resourceName, "mobile_key", env.MobileKey), - resource.TestCheckResourceAttr(resourceName, "default_ttl", "0"), - resource.TestCheckResourceAttr(resourceName, "id", projectKey+"/"+env.Key), + resource.TestCheckResourceAttr(resourceName, MOBILE_KEY, env.MobileKey), + resource.TestCheckResourceAttr(resourceName, DEFAULT_TTL, "0"), + resource.TestCheckResourceAttr(resourceName, ID, projectKey+"/"+env.Key), ), }, }, diff --git a/launchdarkly/data_source_launchdarkly_feature_flag.go b/launchdarkly/data_source_launchdarkly_feature_flag.go index 66c4bac2..764b135d 100644 --- a/launchdarkly/data_source_launchdarkly_feature_flag.go +++ b/launchdarkly/data_source_launchdarkly_feature_flag.go @@ -1,8 +1,10 @@ package launchdarkly import ( + "context" "fmt" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -20,11 +22,11 @@ func dataSourceFeatureFlag() *schema.Resource { BOOL_VARIATION, STRING_VARIATION, NUMBER_VARIATION, JSON_VARIATION), } return &schema.Resource{ - Read: dataSourceFeatureFlagRead, - Schema: schemaMap, + ReadContext: dataSourceFeatureFlagRead, + Schema: schemaMap, } } -func dataSourceFeatureFlagRead(d *schema.ResourceData, raw interface{}) error { - return featureFlagRead(d, raw, true) +func dataSourceFeatureFlagRead(ctx context.Context, d *schema.ResourceData, raw interface{}) diag.Diagnostics { + return featureFlagRead(ctx, d, raw, true) } diff --git a/launchdarkly/data_source_launchdarkly_feature_flag_environment.go b/launchdarkly/data_source_launchdarkly_feature_flag_environment.go index d019c924..521aea09 100644 --- a/launchdarkly/data_source_launchdarkly_feature_flag_environment.go +++ b/launchdarkly/data_source_launchdarkly_feature_flag_environment.go @@ -1,14 +1,19 @@ package launchdarkly -import "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +import ( + "context" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) func dataSourceFeatureFlagEnvironment() *schema.Resource { return &schema.Resource{ - Read: dataSourceFeatureFlagEnvironmentRead, - Schema: baseFeatureFlagEnvironmentSchema(true), + ReadContext: dataSourceFeatureFlagEnvironmentRead, + Schema: baseFeatureFlagEnvironmentSchema(true), } } -func dataSourceFeatureFlagEnvironmentRead(d *schema.ResourceData, meta interface{}) error { - return featureFlagEnvironmentRead(d, meta, true) +func dataSourceFeatureFlagEnvironmentRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + return featureFlagEnvironmentRead(ctx, d, meta, true) } diff --git a/launchdarkly/data_source_launchdarkly_feature_flag_environment_test.go b/launchdarkly/data_source_launchdarkly_feature_flag_environment_test.go index b5ce2f78..3309ab3b 100644 --- a/launchdarkly/data_source_launchdarkly_feature_flag_environment_test.go +++ b/launchdarkly/data_source_launchdarkly_feature_flag_environment_test.go @@ -2,7 +2,6 @@ package launchdarkly import ( "fmt" - "net/http" "os" "testing" @@ -41,30 +40,21 @@ func testAccDataSourceFeatureFlagEnvironmentScaffold(client *Client, projectKey, patch := ldapi.NewPatchWithComment(envConfigPatches) patch.SetComment("Terraform feature flag env data source test") - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict(func() (interface{}, *http.Response, error) { - return client.ld.FeatureFlagsApi.PatchFeatureFlag(client.ctx, projectKey, flagKey).PatchWithComment(*patch).Execute() - }) - }) + _, _, err = client.ld.FeatureFlagsApi.PatchFeatureFlag(client.ctx, projectKey, flagKey).PatchWithComment(*patch).Execute() + if err != nil { // delete project if anything fails because otherwise we will see a // 409 error later and have to clean it up manually _ = testAccDataSourceProjectDelete(client, projectKey) return nil, fmt.Errorf("failed to create feature flag env config: %s", err.Error()) } - flagRaw, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.FeatureFlagsApi.GetFeatureFlag(client.ctx, projectKey, flagKey).Execute() - }) + flag, _, err := client.ld.FeatureFlagsApi.GetFeatureFlag(client.ctx, projectKey, flagKey).Execute() + if err != nil { _ = testAccDataSourceProjectDelete(client, projectKey) return nil, fmt.Errorf("failed to get feature flag: %s", err.Error()) } - flag, ok := flagRaw.(ldapi.FeatureFlag) - if !ok { - _ = testAccDataSourceProjectDelete(client, projectKey) - return nil, fmt.Errorf("failed to create feature flag env config") - } return &flag, nil } @@ -161,17 +151,17 @@ func TestAccDataSourceFeatureFlagEnvironment_exists(t *testing.T) { { Config: fmt.Sprintf(testAccDataSourceFeatureFlagEnvironment, envKey, flagId), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttrSet(resourceName, "flag_id"), - resource.TestCheckResourceAttr(resourceName, "env_key", envKey), - resource.TestCheckResourceAttr(resourceName, "on", fmt.Sprint(thisConfig.On)), - resource.TestCheckResourceAttr(resourceName, "track_events", fmt.Sprint(thisConfig.TrackEvents)), + resource.TestCheckResourceAttrSet(resourceName, FLAG_ID), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, envKey), + resource.TestCheckResourceAttr(resourceName, ON, fmt.Sprint(thisConfig.On)), + resource.TestCheckResourceAttr(resourceName, TRACK_EVENTS, fmt.Sprint(thisConfig.TrackEvents)), resource.TestCheckResourceAttr(resourceName, "rules.0.variation", fmt.Sprint(*thisConfig.Rules[0].Variation)), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.0.attribute", thisConfig.Rules[0].Clauses[0].Attribute), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.0.op", thisConfig.Rules[0].Clauses[0].Op), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.0.values.0", fmt.Sprint(thisConfig.Rules[0].Clauses[0].Values[0])), resource.TestCheckResourceAttr(resourceName, "prerequisites.0.flag_key", thisConfig.Prerequisites[0].Key), resource.TestCheckResourceAttr(resourceName, "prerequisites.0.variation", fmt.Sprint(thisConfig.Prerequisites[0].Variation)), - resource.TestCheckResourceAttr(resourceName, "off_variation", fmt.Sprint(*thisConfig.OffVariation)), + resource.TestCheckResourceAttr(resourceName, OFF_VARIATION, fmt.Sprint(*thisConfig.OffVariation)), resource.TestCheckResourceAttr(resourceName, "targets.0.values.#", fmt.Sprint(len(thisConfig.Targets[0].Values))), resource.TestCheckResourceAttr(resourceName, "targets.0.variation", "1"), ), @@ -179,10 +169,10 @@ func TestAccDataSourceFeatureFlagEnvironment_exists(t *testing.T) { { Config: fmt.Sprintf(testAccDataSourceFeatureFlagEnvironment, "production", flagId), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttrSet(resourceName, "flag_id"), - resource.TestCheckResourceAttr(resourceName, "env_key", "production"), - resource.TestCheckResourceAttr(resourceName, "on", fmt.Sprint(otherConfig.On)), - resource.TestCheckResourceAttr(resourceName, "track_events", fmt.Sprint(otherConfig.TrackEvents)), + resource.TestCheckResourceAttrSet(resourceName, FLAG_ID), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "production"), + resource.TestCheckResourceAttr(resourceName, ON, fmt.Sprint(otherConfig.On)), + resource.TestCheckResourceAttr(resourceName, TRACK_EVENTS, fmt.Sprint(otherConfig.TrackEvents)), resource.TestCheckResourceAttr(resourceName, "rules.#", fmt.Sprint(len(otherConfig.Rules))), resource.TestCheckResourceAttr(resourceName, "prerequisites.#", fmt.Sprint(len(otherConfig.Prerequisites))), resource.TestCheckResourceAttr(resourceName, "targets.#", fmt.Sprint(len(otherConfig.Targets))), diff --git a/launchdarkly/data_source_launchdarkly_feature_flag_test.go b/launchdarkly/data_source_launchdarkly_feature_flag_test.go index 5371d023..74b0984c 100644 --- a/launchdarkly/data_source_launchdarkly_feature_flag_test.go +++ b/launchdarkly/data_source_launchdarkly_feature_flag_test.go @@ -100,17 +100,18 @@ func TestAccDataSourceFeatureFlag_exists(t *testing.T) { { Config: fmt.Sprintf(testAccDataSourceFeatureFlag, flagKey, projectKey), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttrSet(resourceName, "key"), - resource.TestCheckResourceAttrSet(resourceName, "name"), - resource.TestCheckResourceAttrSet(resourceName, "project_key"), - resource.TestCheckResourceAttr(resourceName, "key", flag.Key), - resource.TestCheckResourceAttr(resourceName, "name", flag.Name), - resource.TestCheckResourceAttr(resourceName, "description", *flag.Description), - resource.TestCheckResourceAttr(resourceName, "temporary", "true"), + resource.TestCheckResourceAttrSet(resourceName, KEY), + resource.TestCheckResourceAttrSet(resourceName, NAME), + resource.TestCheckResourceAttrSet(resourceName, PROJECT_KEY), + resource.TestCheckResourceAttr(resourceName, KEY, flag.Key), + resource.TestCheckResourceAttr(resourceName, NAME, flag.Name), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, *flag.Description), + resource.TestCheckResourceAttr(resourceName, TEMPORARY, "true"), resource.TestCheckResourceAttr(resourceName, "variations.#", "2"), resource.TestCheckResourceAttr(resourceName, "variations.0.value", "true"), resource.TestCheckResourceAttr(resourceName, "variations.1.value", "false"), - resource.TestCheckResourceAttr(resourceName, "id", projectKey+"/"+flag.Key), + resource.TestCheckResourceAttr(resourceName, ID, projectKey+"/"+flag.Key), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_environment_id", "true"), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_mobile_key", "false"), ), diff --git a/launchdarkly/data_source_launchdarkly_flag_trigger.go b/launchdarkly/data_source_launchdarkly_flag_trigger.go new file mode 100644 index 00000000..052e82a7 --- /dev/null +++ b/launchdarkly/data_source_launchdarkly_flag_trigger.go @@ -0,0 +1,25 @@ +package launchdarkly + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func dataSourceFlagTrigger() *schema.Resource { + schemaMap := baseFlagTriggerSchema(true) + schemaMap[ID] = &schema.Schema{ + Type: schema.TypeString, + Required: true, + Description: "The flag trigger resource ID. This can be found on your trigger URL - please see docs for more info", + } + return &schema.Resource{ + ReadContext: dataSourceFlagTriggerRead, + Schema: schemaMap, + } +} + +func dataSourceFlagTriggerRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + return flagTriggerRead(ctx, d, metaRaw, true) +} diff --git a/launchdarkly/data_source_launchdarkly_flag_trigger_test.go b/launchdarkly/data_source_launchdarkly_flag_trigger_test.go new file mode 100644 index 00000000..a16e1d54 --- /dev/null +++ b/launchdarkly/data_source_launchdarkly_flag_trigger_test.go @@ -0,0 +1,115 @@ +package launchdarkly + +import ( + "fmt" + "os" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + ldapi "github.com/launchdarkly/api-client-go/v7" + "github.com/stretchr/testify/require" +) + +const ( + testAccDataSourceFlagTrigger = ` +data "launchdarkly_flag_trigger" "test" { + project_key = "%s" + env_key = "production" + flag_key = "%s" + id = "%s" +} +` +) + +func testAccDataSourceFlagTriggerScaffold(client *Client, projectKey, flagKey string, triggerBody *ldapi.TriggerPost) (*ldapi.TriggerWorkflowRep, error) { + _, err := testAccDataSourceFeatureFlagScaffold(client, projectKey, *ldapi.NewFeatureFlagBody("Trigger Test", flagKey)) + if err != nil { + return nil, err + } + trigger, _, err := client.ld.FlagTriggersApi.CreateTriggerWorkflow(client.ctx, projectKey, "production", flagKey).TriggerPost(*triggerBody).Execute() + if err != nil { + return nil, err + } + return &trigger, nil +} + +func TestAccDataSourceFlagTrigger_noMatchReturnsError(t *testing.T) { + id := "nonexistent-id" + accTest := os.Getenv("TF_ACC") + if accTest == "" { + t.SkipNow() + } + client, err := newClient(os.Getenv(LAUNCHDARKLY_ACCESS_TOKEN), os.Getenv(LAUNCHDARKLY_API_HOST), false) + require.NoError(t, err) + projectKey := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) + flagKey := "trigger-test" + _, err = testAccDataSourceFeatureFlagScaffold(client, projectKey, *ldapi.NewFeatureFlagBody("Trigger Test", flagKey)) + require.NoError(t, err) + + defer func() { + err := testAccDataSourceProjectDelete(client, projectKey) + require.NoError(t, err) + }() + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccDataSourceFlagTrigger, projectKey, flagKey, id), + // the integration key will not appear here since it is not set on the data source + ExpectError: regexp.MustCompile(`Error: failed to get trigger with ID `), + }, + }, + }) +} + +func TestAccDataSourceFlagTrigger_exists(t *testing.T) { + accTest := os.Getenv("TF_ACC") + if accTest == "" { + t.SkipNow() + } + + client, err := newClient(os.Getenv(LAUNCHDARKLY_ACCESS_TOKEN), os.Getenv(LAUNCHDARKLY_API_HOST), false) + require.NoError(t, err) + projectKey := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) + flagKey := "trigger-test" + instructions := []map[string]interface{}{{"kind": "turnFlagOff"}} + post := ldapi.NewTriggerPost("datadog") + post.Instructions = &instructions + trigger, err := testAccDataSourceFlagTriggerScaffold(client, projectKey, flagKey, post) + require.NoError(t, err) + + defer func() { + err := testAccDataSourceProjectDelete(client, projectKey) + require.NoError(t, err) + }() + + resourceName := "data.launchdarkly_flag_trigger.test" + resource.Test(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccDataSourceFlagTrigger, projectKey, flagKey, *trigger.Id), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "id", *trigger.Id), + resource.TestCheckResourceAttrSet(resourceName, "maintainer_id"), + resource.TestCheckResourceAttrSet(resourceName, "enabled"), + resource.TestCheckResourceAttr(resourceName, "instructions.0.kind", "turnFlagOff"), + resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), + resource.TestCheckResourceAttr(resourceName, "env_key", "production"), + resource.TestCheckResourceAttr(resourceName, "flag_key", flagKey), + resource.TestCheckResourceAttr(resourceName, "integration_key", *trigger.IntegrationKey), + ), + }, + }, + }) + +} diff --git a/launchdarkly/data_source_launchdarkly_metric.go b/launchdarkly/data_source_launchdarkly_metric.go new file mode 100644 index 00000000..b7868555 --- /dev/null +++ b/launchdarkly/data_source_launchdarkly_metric.go @@ -0,0 +1,20 @@ +package launchdarkly + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func dataSourceMetric() *schema.Resource { + schemaMap := baseMetricSchema(true) + return &schema.Resource{ + ReadContext: dataSourceMetricRead, + Schema: schemaMap, + } +} + +func dataSourceMetricRead(ctx context.Context, d *schema.ResourceData, raw interface{}) diag.Diagnostics { + return metricRead(ctx, d, raw, true) +} diff --git a/launchdarkly/data_source_launchdarkly_metric_test.go b/launchdarkly/data_source_launchdarkly_metric_test.go new file mode 100644 index 00000000..316a7fab --- /dev/null +++ b/launchdarkly/data_source_launchdarkly_metric_test.go @@ -0,0 +1,133 @@ +package launchdarkly + +import ( + "fmt" + "os" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + ldapi "github.com/launchdarkly/api-client-go/v7" + "github.com/stretchr/testify/require" +) + +const ( + testAccDataSourceMetric = ` +data "launchdarkly_metric" "testing" { + key = "%s" + project_key = "%s" +} +` +) + +func testAccDataSourceMetricScaffold(client *Client, projectKey string, metricBody ldapi.MetricPost) (*ldapi.MetricRep, error) { + projectBody := ldapi.ProjectPost{ + Name: "Metric Test Project", + Key: projectKey, + } + project, err := testAccDataSourceProjectCreate(client, projectBody) + if err != nil { + return nil, err + } + + metric, _, err := client.ld.MetricsApi.PostMetric(client.ctx, project.Key).MetricPost(metricBody).Execute() + if err != nil { + return nil, err + } + + return &metric, nil +} + +func TestAccDataSourceMetric_noMatchReturnsError(t *testing.T) { + accTest := os.Getenv("TF_ACC") + if accTest == "" { + t.SkipNow() + } + client, err := newClient(os.Getenv(LAUNCHDARKLY_ACCESS_TOKEN), os.Getenv(LAUNCHDARKLY_API_HOST), false) + require.NoError(t, err) + projectKey := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) + projectBody := ldapi.ProjectPost{ + Name: "Terraform Metric Test Project", + Key: projectKey, + } + project, err := testAccDataSourceProjectCreate(client, projectBody) + require.NoError(t, err) + + defer func() { + err := testAccDataSourceProjectDelete(client, projectKey) + require.NoError(t, err) + }() + + metricKey := "nonexistent-metric" + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccDataSourceMetric, metricKey, project.Key), + ExpectError: regexp.MustCompile("Error: 404 Not Found"), + }, + }, + }) +} + +func TestAccDataSourceMetric_exists(t *testing.T) { + accTest := os.Getenv("TF_ACC") + if accTest == "" { + t.SkipNow() + } + + projectKey := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) + client, err := newClient(os.Getenv(LAUNCHDARKLY_ACCESS_TOKEN), os.Getenv(LAUNCHDARKLY_API_HOST), false) + require.NoError(t, err) + + metricName := "Metric Data Source Test" + metricKey := "metric-ds-testing" + metricUrlKind := "substring" + metricUrlSubstring := "foo" + metricBody := ldapi.MetricPost{ + Name: &metricName, + Key: metricKey, + Kind: "pageview", + Urls: &[]ldapi.UrlPost{{ + Kind: &metricUrlKind, + Substring: &metricUrlSubstring, + }}, + Description: ldapi.PtrString("a metric to test the terraform metric data source"), + } + metric, err := testAccDataSourceMetricScaffold(client, projectKey, metricBody) + require.NoError(t, err) + + defer func() { + err := testAccDataSourceProjectDelete(client, projectKey) + require.NoError(t, err) + }() + + resourceName := "data.launchdarkly_metric.testing" + resource.Test(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccDataSourceMetric, metricKey, projectKey), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, KEY), + resource.TestCheckResourceAttrSet(resourceName, NAME), + resource.TestCheckResourceAttrSet(resourceName, PROJECT_KEY), + resource.TestCheckResourceAttr(resourceName, KEY, metric.Key), + resource.TestCheckResourceAttr(resourceName, NAME, metric.Name), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, *metric.Description), + resource.TestCheckResourceAttr(resourceName, ID, projectKey+"/"+metric.Key), + resource.TestCheckResourceAttr(resourceName, KIND, metric.Kind), + resource.TestCheckResourceAttr(resourceName, "urls.0.kind", metricUrlKind), + resource.TestCheckResourceAttr(resourceName, "urls.0.substring", metricUrlSubstring), + ), + }, + }, + }) +} diff --git a/launchdarkly/data_source_launchdarkly_project.go b/launchdarkly/data_source_launchdarkly_project.go index 3d410b4b..d1d8f24e 100644 --- a/launchdarkly/data_source_launchdarkly_project.go +++ b/launchdarkly/data_source_launchdarkly_project.go @@ -1,12 +1,15 @@ package launchdarkly import ( + "context" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) func dataSourceProject() *schema.Resource { return &schema.Resource{ - Read: dataSourceProjectRead, + ReadContext: dataSourceProjectRead, Schema: map[string]*schema.Schema{ KEY: { @@ -55,6 +58,6 @@ func dataSourceProject() *schema.Resource { } } -func dataSourceProjectRead(d *schema.ResourceData, meta interface{}) error { - return projectRead(d, meta, true) +func dataSourceProjectRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + return projectRead(ctx, d, meta, true) } diff --git a/launchdarkly/data_source_launchdarkly_project_test.go b/launchdarkly/data_source_launchdarkly_project_test.go index d6bcbd85..3eb7b091 100644 --- a/launchdarkly/data_source_launchdarkly_project_test.go +++ b/launchdarkly/data_source_launchdarkly_project_test.go @@ -100,11 +100,11 @@ func TestAccDataSourceProject_exists(t *testing.T) { { Config: fmt.Sprintf(testAccProjectExists, projectKey), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttrSet(resourceName, "key"), - resource.TestCheckResourceAttrSet(resourceName, "name"), - resource.TestCheckResourceAttr(resourceName, "key", project.Key), - resource.TestCheckResourceAttr(resourceName, "name", project.Name), - resource.TestCheckResourceAttr(resourceName, "id", project.Id), + resource.TestCheckResourceAttrSet(resourceName, KEY), + resource.TestCheckResourceAttrSet(resourceName, NAME), + resource.TestCheckResourceAttr(resourceName, KEY, project.Key), + resource.TestCheckResourceAttr(resourceName, NAME, project.Name), + resource.TestCheckResourceAttr(resourceName, ID, project.Id), resource.TestCheckResourceAttr(resourceName, "tags.#", "1"), // TODO: remove deprecated client_side_availability attribute tests pending next major release resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_environment_id", "false"), diff --git a/launchdarkly/data_source_launchdarkly_relay_proxy_configuration.go b/launchdarkly/data_source_launchdarkly_relay_proxy_configuration.go new file mode 100644 index 00000000..e6e0307a --- /dev/null +++ b/launchdarkly/data_source_launchdarkly_relay_proxy_configuration.go @@ -0,0 +1,39 @@ +package launchdarkly + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func dataSourceRelayProxyConfig() *schema.Resource { + return &schema.Resource{ + ReadContext: dataSourceRelayProxyRead, + + Schema: map[string]*schema.Schema{ + ID: { + Type: schema.TypeString, + Required: true, + Description: "The Relay Proxy configuration's unique 24 character ID", + }, + NAME: { + Type: schema.TypeString, + Description: "A human-friendly name for the Relay Proxy configuration", + Computed: true, + }, + POLICY: policyStatementsSchema(policyStatementSchemaOptions{required: false}), + DISPLAY_KEY: { + Type: schema.TypeString, + Computed: true, + Description: "The last four characters of the full_key.", + }, + }, + } +} + +func dataSourceRelayProxyRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + id := d.Get(ID).(string) + d.SetId(id) + return relayProxyConfigRead(ctx, d, m, true) +} diff --git a/launchdarkly/data_source_launchdarkly_relay_proxy_configuration_test.go b/launchdarkly/data_source_launchdarkly_relay_proxy_configuration_test.go new file mode 100644 index 00000000..35b37094 --- /dev/null +++ b/launchdarkly/data_source_launchdarkly_relay_proxy_configuration_test.go @@ -0,0 +1,140 @@ +package launchdarkly + +import ( + "fmt" + "os" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + ldapi "github.com/launchdarkly/api-client-go/v7" + "github.com/stretchr/testify/require" +) + +const ( + testAccDataSourceRelayProxyConfig = ` +data "launchdarkly_relay_proxy_configuration" "test" { + id = "%s" +} +` +) + +func TestAccDataSourceRelayProxyConfig_noMatchReturnsError(t *testing.T) { + accTest := os.Getenv("TF_ACC") + if accTest == "" { + t.SkipNow() + } + + invalidID := "31e801b0f65c6216806bd53b" + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccDataSourceRelayProxyConfig, invalidID), + ExpectError: regexp.MustCompile(fmt.Sprintf("Relay Proxy configuration with id %q not found", invalidID)), + }, + }, + }) +} + +func TestAccDataSourceRelayProxyConfig_exists(t *testing.T) { + accTest := os.Getenv("TF_ACC") + if accTest == "" { + t.SkipNow() + } + client, err := newClient(os.Getenv(LAUNCHDARKLY_ACCESS_TOKEN), os.Getenv(LAUNCHDARKLY_API_HOST), false) + require.NoError(t, err) + + name := "test config" + resourceSpec := "proj/*:env/*" + policy := []ldapi.StatementRep{{ + Resources: &([]string{resourceSpec}), + Actions: &([]string{"*"}), + Effect: "allow", + }} + + post := ldapi.NewRelayAutoConfigPost(name, policy) + config, _, err := client.ld.RelayProxyConfigurationsApi.PostRelayAutoConfig(client.ctx).RelayAutoConfigPost(*post).Execute() + require.NoError(t, err) + + defer testAccDeleteRelayProxyConfig(t, client, config.Id) + + resourceName := "data.launchdarkly_relay_proxy_configuration.test" + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccDataSourceRelayProxyConfig, config.Id), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, NAME, name), + resource.TestCheckResourceAttr(resourceName, DISPLAY_KEY, config.DisplayKey), + resource.TestCheckResourceAttr(resourceName, "policy.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.effect", "allow"), + resource.TestCheckResourceAttr(resourceName, "policy.0.resources.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.resources.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.resources.0", resourceSpec), + resource.TestCheckResourceAttr(resourceName, "policy.0.actions.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.actions.0", "*"), + ), + }, + }, + }) +} + +func TestAccDataSourceRelayProxyConfig_NotResource(t *testing.T) { + accTest := os.Getenv("TF_ACC") + if accTest == "" { + t.SkipNow() + } + client, err := newClient(os.Getenv(LAUNCHDARKLY_ACCESS_TOKEN), os.Getenv(LAUNCHDARKLY_API_HOST), false) + require.NoError(t, err) + + name := "test config" + resourceSpec := "proj/*:env/*" + policy := []ldapi.StatementRep{{ + NotResources: &([]string{resourceSpec}), + Actions: &([]string{"*"}), + Effect: "allow", + }} + + post := ldapi.NewRelayAutoConfigPost(name, policy) + config, _, err := client.ld.RelayProxyConfigurationsApi.PostRelayAutoConfig(client.ctx).RelayAutoConfigPost(*post).Execute() + require.NoError(t, err) + + defer testAccDeleteRelayProxyConfig(t, client, config.Id) + + resourceName := "data.launchdarkly_relay_proxy_configuration.test" + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccDataSourceRelayProxyConfig, config.Id), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, NAME, name), + resource.TestCheckResourceAttr(resourceName, DISPLAY_KEY, config.DisplayKey), + resource.TestCheckResourceAttr(resourceName, "policy.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.effect", "allow"), + resource.TestCheckResourceAttr(resourceName, "policy.0.not_resources.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.not_resources.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.not_resources.0", resourceSpec), + resource.TestCheckResourceAttr(resourceName, "policy.0.actions.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.actions.0", "*"), + ), + }, + }, + }) +} + +func testAccDeleteRelayProxyConfig(t *testing.T, client *Client, id string) { + _, err := client.ld.RelayProxyConfigurationsApi.DeleteRelayAutoConfig(client.ctx, id).Execute() + require.NoError(t, err) +} diff --git a/launchdarkly/data_source_launchdarkly_segment.go b/launchdarkly/data_source_launchdarkly_segment.go index 90b594ef..c80562d2 100644 --- a/launchdarkly/data_source_launchdarkly_segment.go +++ b/launchdarkly/data_source_launchdarkly_segment.go @@ -1,26 +1,31 @@ package launchdarkly -import "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +import ( + "context" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) func dataSourceSegment() *schema.Resource { schemaMap := baseSegmentSchema() schemaMap[PROJECT_KEY] = &schema.Schema{ - Type: schema.TypeString, - Required: true, - ValidateFunc: validateKey(), - Description: "The segment's project key.", + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: validateKey(), + Description: "The segment's project key.", } schemaMap[ENV_KEY] = &schema.Schema{ - Type: schema.TypeString, - Required: true, - ValidateFunc: validateKey(), - Description: "The segment's environment key.", + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: validateKey(), + Description: "The segment's environment key.", } schemaMap[KEY] = &schema.Schema{ - Type: schema.TypeString, - Required: true, - ValidateFunc: validateKey(), - Description: "The unique key that references the segment.", + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: validateKey(), + Description: "The unique key that references the segment.", } schemaMap[NAME] = &schema.Schema{ Type: schema.TypeString, @@ -28,11 +33,11 @@ func dataSourceSegment() *schema.Resource { Description: "The human-friendly name for the segment.", } return &schema.Resource{ - Read: dataSourceSegmentRead, - Schema: schemaMap, + ReadContext: dataSourceSegmentRead, + Schema: schemaMap, } } -func dataSourceSegmentRead(d *schema.ResourceData, raw interface{}) error { - return segmentRead(d, raw, true) +func dataSourceSegmentRead(ctx context.Context, d *schema.ResourceData, raw interface{}) diag.Diagnostics { + return segmentRead(ctx, d, raw, true) } diff --git a/launchdarkly/data_source_launchdarkly_segment_test.go b/launchdarkly/data_source_launchdarkly_segment_test.go index 37b4db8c..c5c0189a 100644 --- a/launchdarkly/data_source_launchdarkly_segment_test.go +++ b/launchdarkly/data_source_launchdarkly_segment_test.go @@ -2,7 +2,6 @@ package launchdarkly import ( "fmt" - "net/http" "os" "regexp" "testing" @@ -46,9 +45,8 @@ func testAccDataSourceSegmentCreate(client *Client, projectKey, segmentKey strin Description: ldapi.PtrString("test description"), Tags: &[]string{"terraform"}, } - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.SegmentsApi.PostSegment(client.ctx, project.Key, envKey).SegmentBody(segmentBody).Execute() - }) + _, _, err = client.ld.SegmentsApi.PostSegment(client.ctx, project.Key, envKey).SegmentBody(segmentBody).Execute() + if err != nil { return nil, fmt.Errorf("failed to create segment %q in project %q: %s", segmentKey, projectKey, handleLdapiErr(err)) } @@ -60,19 +58,13 @@ func testAccDataSourceSegmentCreate(client *Client, projectKey, segmentKey strin patchReplace("/rules", properties.Rules), }, } - rawSegment, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict(func() (interface{}, *http.Response, error) { - return client.ld.SegmentsApi.PatchSegment(client.ctx, projectKey, envKey, segmentKey).PatchWithComment(patch).Execute() - }) - }) + segment, _, err := client.ld.SegmentsApi.PatchSegment(client.ctx, projectKey, envKey, segmentKey).PatchWithComment(patch).Execute() + if err != nil { return nil, fmt.Errorf("failed to update segment %q in project %q: %s", segmentKey, projectKey, handleLdapiErr(err)) } - if segment, ok := rawSegment.(ldapi.UserSegment); ok { - return &segment, nil - } - return nil, fmt.Errorf("failed to create segment %q in project %q: %s", segmentKey, projectKey, handleLdapiErr(err)) + return &segment, nil } func TestAccDataSourceSegment_noMatchReturnsError(t *testing.T) { @@ -151,12 +143,12 @@ func TestAccDataSourceSegment_exists(t *testing.T) { { Config: fmt.Sprintf(testAccDataSourceSegment, segmentKey, projectKey), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttrSet(resourceName, "key"), - resource.TestCheckResourceAttr(resourceName, "name", segment.Name), - resource.TestCheckResourceAttr(resourceName, "key", segment.Key), - resource.TestCheckResourceAttr(resourceName, "id", projectKey+"/test/"+segmentKey), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), + resource.TestCheckResourceAttrSet(resourceName, KEY), + resource.TestCheckResourceAttr(resourceName, NAME, segment.Name), + resource.TestCheckResourceAttr(resourceName, KEY, segment.Key), + resource.TestCheckResourceAttr(resourceName, ID, projectKey+"/test/"+segmentKey), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.0.attribute", "name"), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.0.op", "startsWith"), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.0.values.#", "1"), @@ -165,7 +157,7 @@ func TestAccDataSourceSegment_exists(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "excluded.#", "1"), resource.TestCheckResourceAttr(resourceName, "excluded.0", "some_bad@email.com"), resource.TestCheckResourceAttr(resourceName, "tags.#", "1"), - resource.TestCheckResourceAttrSet(resourceName, "creation_date"), + resource.TestCheckResourceAttrSet(resourceName, CREATION_DATE), ), }, }, diff --git a/launchdarkly/data_source_launchdarkly_team_member.go b/launchdarkly/data_source_launchdarkly_team_member.go index 821d88ba..a70d58de 100644 --- a/launchdarkly/data_source_launchdarkly_team_member.go +++ b/launchdarkly/data_source_launchdarkly_team_member.go @@ -1,41 +1,51 @@ package launchdarkly import ( + "context" "fmt" - "net/http" + "net/url" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ldapi "github.com/launchdarkly/api-client-go/v7" ) +func memberSchema() map[string]*schema.Schema { + return map[string]*schema.Schema{ + EMAIL: { + Type: schema.TypeString, + Required: true, + }, + ID: { + Type: schema.TypeString, + Computed: true, + Optional: true, + }, + FIRST_NAME: { + Type: schema.TypeString, + Computed: true, + }, + LAST_NAME: { + Type: schema.TypeString, + Computed: true, + }, + ROLE: { + Type: schema.TypeString, + Computed: true, + }, + CUSTOM_ROLES: { + Type: schema.TypeSet, + Set: schema.HashString, + Elem: &schema.Schema{Type: schema.TypeString}, + Computed: true, + }, + } +} + func dataSourceTeamMember() *schema.Resource { return &schema.Resource{ - Read: dataSourceTeamMemberRead, - - Schema: map[string]*schema.Schema{ - EMAIL: { - Type: schema.TypeString, - Required: true, - }, - FIRST_NAME: { - Type: schema.TypeString, - Computed: true, - }, - LAST_NAME: { - Type: schema.TypeString, - Computed: true, - }, - ROLE: { - Type: schema.TypeString, - Computed: true, - }, - CUSTOM_ROLES: { - Type: schema.TypeSet, - Set: schema.HashString, - Elem: &schema.Schema{Type: schema.TypeString}, - Computed: true, - }, - }, + ReadContext: dataSourceTeamMemberRead, + Schema: memberSchema(), } } @@ -43,27 +53,25 @@ func getTeamMemberByEmail(client *Client, memberEmail string) (*ldapi.Member, er // this should be the max limit allowed when the member-list-max-limit flag is on teamMemberLimit := int64(1000) - membersRaw, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.AccountMembersApi.GetMembers(client.ctx).Limit(teamMemberLimit).Execute() - }) + // After changing this to query by member email, we shouldn't need the limit and recursion on requests, but leaving it in just to be extra safe + members, _, err := client.ld.AccountMembersApi.GetMembers(client.ctx).Filter(fmt.Sprintf("query:%s", url.QueryEscape(memberEmail))).Execute() + if err != nil { return nil, fmt.Errorf("failed to read team member with email: %s: %v", memberEmail, handleLdapiErr(err)) } - members := membersRaw.(ldapi.Members) totalMemberCount := int(*members.TotalCount) memberItems := members.Items membersPulled := len(memberItems) for membersPulled < totalMemberCount { offset := int64(membersPulled) - newRawMembers, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.AccountMembersApi.GetMembers(client.ctx).Limit(teamMemberLimit).Offset(offset).Execute() - }) + newMembers, _, err := client.ld.AccountMembersApi.GetMembers(client.ctx).Limit(teamMemberLimit).Offset(offset).Filter(fmt.Sprintf("query:%s", url.QueryEscape(memberEmail))).Execute() + if err != nil { return nil, fmt.Errorf("failed to read team member with email: %s: %v", memberEmail, handleLdapiErr(err)) } - newMembers := newRawMembers.(ldapi.Members) + memberItems = append(memberItems, newMembers.Items...) membersPulled = len(memberItems) } @@ -77,12 +85,13 @@ func getTeamMemberByEmail(client *Client, memberEmail string) (*ldapi.Member, er } -func dataSourceTeamMemberRead(d *schema.ResourceData, meta interface{}) error { +func dataSourceTeamMemberRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + var diags diag.Diagnostics client := meta.(*Client) memberEmail := d.Get(EMAIL).(string) member, err := getTeamMemberByEmail(client, memberEmail) if err != nil { - return err + return diag.FromErr(err) } d.SetId(member.Id) _ = d.Set(EMAIL, member.Email) @@ -91,8 +100,8 @@ func dataSourceTeamMemberRead(d *schema.ResourceData, meta interface{}) error { _ = d.Set(ROLE, member.Role) err = d.Set(CUSTOM_ROLES, member.CustomRoles) if err != nil { - return fmt.Errorf("failed to set custom roles on team member with email %q: %v", member.Email, err) + return diag.Errorf("failed to set custom roles on team member with email %q: %v", member.Email, err) } - return nil + return diags } diff --git a/launchdarkly/data_source_launchdarkly_team_member_test.go b/launchdarkly/data_source_launchdarkly_team_member_test.go index 0cd29d03..ff74d947 100644 --- a/launchdarkly/data_source_launchdarkly_team_member_test.go +++ b/launchdarkly/data_source_launchdarkly_team_member_test.go @@ -70,7 +70,7 @@ func TestAccDataSourceTeamMember_exists(t *testing.T) { teamMembers := make([]ldapi.Member, 0, teamMemberCount) for i := 0; i < teamMemberCount; i++ { - randomEmail := fmt.Sprintf("%s@example.com", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum)) + randomEmail := fmt.Sprintf("%s@example.com", acctest.RandStringFromCharSet(10, "abcdefghijklmnopqrstuvwxyz012346789+")) member, err := testAccDataSourceTeamMemberCreate(client, randomEmail) require.NoError(t, err) teamMembers = append(teamMembers, *member) @@ -87,11 +87,11 @@ func TestAccDataSourceTeamMember_exists(t *testing.T) { { Config: testAccDataSourceTeamMemberConfig(testMember.Email), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttrSet(resourceName, "email"), - resource.TestCheckResourceAttr(resourceName, "email", testMember.Email), - resource.TestCheckResourceAttr(resourceName, "first_name", *testMember.FirstName), - resource.TestCheckResourceAttr(resourceName, "last_name", *testMember.LastName), - resource.TestCheckResourceAttr(resourceName, "id", testMember.Id), + resource.TestCheckResourceAttrSet(resourceName, EMAIL), + resource.TestCheckResourceAttr(resourceName, EMAIL, testMember.Email), + resource.TestCheckResourceAttr(resourceName, FIRST_NAME, *testMember.FirstName), + resource.TestCheckResourceAttr(resourceName, LAST_NAME, *testMember.LastName), + resource.TestCheckResourceAttr(resourceName, ID, testMember.Id), ), }, }, diff --git a/launchdarkly/data_source_launchdarkly_team_members.go b/launchdarkly/data_source_launchdarkly_team_members.go new file mode 100644 index 00000000..8600e76f --- /dev/null +++ b/launchdarkly/data_source_launchdarkly_team_members.go @@ -0,0 +1,142 @@ +package launchdarkly + +import ( + "context" + "crypto/sha1" + "encoding/base64" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + ldapi "github.com/launchdarkly/api-client-go/v7" +) + +func dataSourceTeamMembers() *schema.Resource { + return &schema.Resource{ + ReadContext: dataSourceTeamMembersRead, + Schema: map[string]*schema.Schema{ + EMAILS: { + Type: schema.TypeList, + Required: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + IGNORE_MISSING: { + Type: schema.TypeBool, + Optional: true, + Default: false, + }, + TEAM_MEMBERS: { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: memberSchema(), + }, + }, + }, + } +} + +func dataSourceTeamMembersRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := meta.(*Client) + var members []ldapi.Member + expectedCount := 0 + ignoreMissing := d.Get(IGNORE_MISSING).(bool) + + // Get our members + // There are tradeoffs to be had here + // We've decided to get all the members and filter in code for now, in order to not scale the amount of requests with team_member list size + if emails, ok := d.Get(EMAILS).([]interface{}); ok && len(emails) > 0 { + expectedCount = len(emails) + allMembers, err := getAllTeamMembers(client) + if err != nil { + return diag.FromErr(err) + } + for _, memberEmail := range emails { + var member ldapi.Member + memberFound := false + for _, foundMember := range allMembers { + if foundMember.Email == memberEmail { + member = foundMember + memberFound = true + break + } + } + if !memberFound { + if ignoreMissing { + continue + } + return diag.Errorf("No team member found for email: %s", memberEmail) + } + members = append(members, member) + } + } + + if !ignoreMissing && len(members) != expectedCount { + return diag.Errorf("unexpected number of users returned (%d != %d)", len(members), expectedCount) + } + + // Build our member list + ids := make([]string, 0, len(members)) + memberList := make([]map[string]interface{}, 0, len(members)) + for _, m := range members { + member := make(map[string]interface{}) + member[ID] = m.Id + member[EMAIL] = m.Email + member[FIRST_NAME] = m.FirstName + member[LAST_NAME] = m.LastName + member[ROLE] = m.Role + member[CUSTOM_ROLES] = m.CustomRoles + memberList = append(memberList, member) + ids = append(ids, m.Id) + } + + // Build an ID out of a hash of all the team members ids + h := sha1.New() + if _, err := h.Write([]byte(strings.Join(ids, "-"))); err != nil { + return diag.Errorf("unable to compute hash for IDs: %v", err) + } + d.SetId("team_members#" + base64.URLEncoding.EncodeToString(h.Sum(nil))) + + err := d.Set(TEAM_MEMBERS, memberList) + + if err != nil { + return diag.FromErr(err) + } + + return diags +} + +func getAllTeamMembers(client *Client) ([]ldapi.Member, error) { + // this should be the max limit allowed when the member-list-max-limit flag is on + teamMemberLimit := int64(1000) + + // After changing this to query by member email, we shouldn't need the limit and recursion on requests, but leaving it in just to be extra safe + members, _, err := client.ld.AccountMembersApi.GetMembers(client.ctx).Limit(teamMemberLimit).Execute() + + if err != nil { + return nil, fmt.Errorf("failed to read team members: %v", handleLdapiErr(err)) + } + + totalMemberCount := int(*members.TotalCount) + + memberItems := members.Items + membersPulled := len(memberItems) + for membersPulled < totalMemberCount { + offset := int64(membersPulled) + newMembers, _, err := client.ld.AccountMembersApi.GetMembers(client.ctx).Limit(teamMemberLimit).Offset(offset).Execute() + + if err != nil { + return nil, fmt.Errorf("failed to read team members: %v", handleLdapiErr(err)) + } + + memberItems = append(memberItems, newMembers.Items...) + membersPulled = len(memberItems) + } + + return memberItems, nil + +} diff --git a/launchdarkly/data_source_launchdarkly_team_members_test.go b/launchdarkly/data_source_launchdarkly_team_members_test.go new file mode 100644 index 00000000..3e6042f2 --- /dev/null +++ b/launchdarkly/data_source_launchdarkly_team_members_test.go @@ -0,0 +1,121 @@ +package launchdarkly + +import ( + "fmt" + "os" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + ldapi "github.com/launchdarkly/api-client-go/v7" + "github.com/stretchr/testify/require" +) + +func testAccDataSourceTeamMembersConfig(emails string) string { + return fmt.Sprintf(` +data "launchdarkly_team_members" "test" { + emails = %s + ignore_missing = false +} +`, emails) +} + +func testAccDataSourceTeamMembersConfigIgnoreMissing(emails string) string { + return fmt.Sprintf(` +data "launchdarkly_team_members" "test" { + emails = %s + ignore_missing = true +} +`, emails) +} + +func TestAccDataSourceTeamMembers_noMatchReturnsError(t *testing.T) { + emails := `["does-not-exist@example.com"]` + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceTeamMembersConfig(emails), + ExpectError: regexp.MustCompile(`Error: No team member found for email: does-not-exist@example.com`), + }, + }, + }) +} + +func TestAccDataSourceTeamMembers_noMatchReturnsNoErrorIfIgnoreMissing(t *testing.T) { + emails := `["does-not-exist@example.com"]` + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceTeamMembersConfigIgnoreMissing(emails), + }, + }, + }) +} + +func TestAccDataSourceTeamMembers_exists(t *testing.T) { + accTest := os.Getenv("TF_ACC") + if accTest == "" { + t.SkipNow() + } + + // Populate account with dummy team members to ensure pagination is working + teamMemberCount := 15 + client, err := newClient(os.Getenv(LAUNCHDARKLY_ACCESS_TOKEN), os.Getenv(LAUNCHDARKLY_API_HOST), false) + require.NoError(t, err) + + teamMembers := make([]ldapi.Member, 0, teamMemberCount) + for i := 0; i < teamMemberCount; i++ { + randomEmail := fmt.Sprintf("%s@example.com", acctest.RandStringFromCharSet(10, "abcdefghijklmnopqrstuvwxyz012346789+")) + member, err := testAccDataSourceTeamMemberCreate(client, randomEmail) + require.NoError(t, err) + teamMembers = append(teamMembers, *member) + } + + resourceName := "data.launchdarkly_team_members.test" + testMember := teamMembers[teamMemberCount-1] + testMember2 := teamMembers[teamMemberCount-2] + testMember3 := teamMembers[teamMemberCount-3] + resource.Test(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceTeamMembersConfig(fmt.Sprintf(`["%s","%s","%s"]`, testMember.Email, testMember2.Email, testMember3.Email)), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, IGNORE_MISSING), + resource.TestCheckResourceAttr(resourceName, "team_members.#", "3"), + resource.TestCheckResourceAttr(resourceName, "team_members.0.email", testMember.Email), + resource.TestCheckResourceAttr(resourceName, "team_members.0.first_name", *testMember.FirstName), + resource.TestCheckResourceAttr(resourceName, "team_members.0.last_name", *testMember.LastName), + resource.TestCheckResourceAttr(resourceName, "team_members.0.id", testMember.Id), + resource.TestCheckResourceAttr(resourceName, "team_members.0.role", testMember.Role), + resource.TestCheckResourceAttr(resourceName, "team_members.1.email", testMember2.Email), + resource.TestCheckResourceAttr(resourceName, "team_members.1.first_name", *testMember2.FirstName), + resource.TestCheckResourceAttr(resourceName, "team_members.1.last_name", *testMember2.LastName), + resource.TestCheckResourceAttr(resourceName, "team_members.1.id", testMember2.Id), + resource.TestCheckResourceAttr(resourceName, "team_members.1.role", testMember2.Role), + resource.TestCheckResourceAttr(resourceName, "team_members.2.email", testMember3.Email), + resource.TestCheckResourceAttr(resourceName, "team_members.2.first_name", *testMember3.FirstName), + resource.TestCheckResourceAttr(resourceName, "team_members.2.last_name", *testMember3.LastName), + resource.TestCheckResourceAttr(resourceName, "team_members.2.id", testMember3.Id), + resource.TestCheckResourceAttr(resourceName, "team_members.2.role", testMember3.Role), + ), + }, + }, + }) + for _, member := range teamMembers { + err := testAccDataSourceTeamMemberDelete(client, member.Id) + require.NoError(t, err) + } +} diff --git a/launchdarkly/data_source_launchdarkly_webhook.go b/launchdarkly/data_source_launchdarkly_webhook.go index 5472071e..91e13b7e 100644 --- a/launchdarkly/data_source_launchdarkly_webhook.go +++ b/launchdarkly/data_source_launchdarkly_webhook.go @@ -1,6 +1,9 @@ package launchdarkly import ( + "context" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -20,11 +23,11 @@ func dataSourceWebhook() *schema.Resource { Description: "The ID of the webhook", } return &schema.Resource{ - Read: dataSourceWebhookRead, - Schema: schemaMap, + ReadContext: dataSourceWebhookRead, + Schema: schemaMap, } } -func dataSourceWebhookRead(d *schema.ResourceData, meta interface{}) error { - return webhookRead(d, meta, true) +func dataSourceWebhookRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + return webhookRead(ctx, d, meta, true) } diff --git a/launchdarkly/data_source_launchdarkly_webhook_test.go b/launchdarkly/data_source_launchdarkly_webhook_test.go index 5d177c0f..af7e0b80 100644 --- a/launchdarkly/data_source_launchdarkly_webhook_test.go +++ b/launchdarkly/data_source_launchdarkly_webhook_test.go @@ -2,7 +2,6 @@ package launchdarkly import ( "fmt" - "net/http" "os" "regexp" "testing" @@ -22,6 +21,8 @@ data "launchdarkly_webhook" "test" { ) func testAccDataSourceWebhookCreate(client *Client, webhookName string) (*ldapi.Webhook, error) { + statementResources := []string{"proj/*"} + statementActions := []string{"turnFlagOn"} webhookBody := ldapi.WebhookPost{ Url: "https://www.example.com", Sign: false, @@ -30,30 +31,24 @@ func testAccDataSourceWebhookCreate(client *Client, webhookName string) (*ldapi. Tags: &[]string{"terraform"}, Statements: &[]ldapi.StatementPost{ { - Resources: []string{"proj/*"}, - Actions: []string{"turnFlagOn"}, + Resources: &statementResources, + Actions: &statementActions, Effect: "allow", }, }, } - webhookRaw, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.WebhooksApi.PostWebhook(client.ctx).WebhookPost(webhookBody).Execute() - }) + webhook, _, err := client.ld.WebhooksApi.PostWebhook(client.ctx).WebhookPost(webhookBody).Execute() + if err != nil { return nil, fmt.Errorf("failed to create webhook with name %q: %s", webhookName, handleLdapiErr(err)) } - if webhook, ok := webhookRaw.(ldapi.Webhook); ok { - return &webhook, nil - } - return nil, fmt.Errorf("failed to create webhook") + return &webhook, nil } func testAccDataSourceWebhookDelete(client *Client, webhookId string) error { - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - res, err := client.ld.WebhooksApi.DeleteWebhook(client.ctx, webhookId).Execute() - return nil, res, err - }) + _, err := client.ld.WebhooksApi.DeleteWebhook(client.ctx, webhookId).Execute() + if err != nil { return fmt.Errorf("failed to delete webhook with id %q: %s", webhookId, handleLdapiErr(err)) } @@ -107,17 +102,17 @@ func TestAccDataSourceWebhook_exists(t *testing.T) { { Config: fmt.Sprintf(testAccDataSourceWebhook, webhook.Id), Check: resource.ComposeAggregateTestCheckFunc( - resource.TestCheckResourceAttrSet(resourceName, "id"), - resource.TestCheckResourceAttr(resourceName, "id", webhook.Id), - resource.TestCheckResourceAttr(resourceName, "name", webhookName), - resource.TestCheckResourceAttr(resourceName, "url", webhook.Url), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttrSet(resourceName, ID), + resource.TestCheckResourceAttr(resourceName, ID, webhook.Id), + resource.TestCheckResourceAttr(resourceName, NAME, webhookName), + resource.TestCheckResourceAttr(resourceName, URL, webhook.Url), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "tags.#", "1"), resource.TestCheckResourceAttr(resourceName, "statements.#", "1"), resource.TestCheckResourceAttr(resourceName, "statements.0.resources.0", "proj/*"), resource.TestCheckResourceAttr(resourceName, "statements.0.actions.0", "turnFlagOn"), resource.TestCheckResourceAttr(resourceName, "statements.0.effect", "allow"), - resource.TestCheckResourceAttr(resourceName, "secret", ""), // since we set Sign to false + resource.TestCheckResourceAttr(resourceName, SECRET, ""), // since we set Sign to false ), }, diff --git a/launchdarkly/environments_helper.go b/launchdarkly/environments_helper.go index 2703fb4c..1425cf49 100644 --- a/launchdarkly/environments_helper.go +++ b/launchdarkly/environments_helper.go @@ -1,10 +1,11 @@ package launchdarkly import ( + "context" "fmt" "log" - "net/http" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ldapi "github.com/launchdarkly/api-client-go/v7" @@ -20,8 +21,8 @@ func baseEnvironmentSchema(forProject bool) map[string]*schema.Schema { Required: true, Description: "A project-unique key for the new environment", // Don't force new if the environment schema will be nested in a project - ForceNew: !forProject, - ValidateFunc: validateKey(), + ForceNew: !forProject, + ValidateDiagFunc: validateKey(), }, API_KEY: { Type: schema.TypeString, @@ -43,8 +44,8 @@ func baseEnvironmentSchema(forProject bool) map[string]*schema.Schema { Optional: true, Default: 0, // Default TTL should be between 0 and 60 minutes: https://docs.launchdarkly.com/docs/environments - Description: "The TTL for the environment. This must be between 0 and 60 minutes. The TTL setting only applies to environments using the PHP SDK", - ValidateFunc: validation.IntBetween(0, 60), + Description: "The TTL for the environment. This must be between 0 and 60 minutes. The TTL setting only applies to environments using the PHP SDK", + ValidateDiagFunc: validation.ToDiagFunc(validation.IntBetween(0, 60)), }, SECURE_MODE: { Default: false, @@ -222,24 +223,27 @@ func rawEnvironmentConfigsToKeyList(rawEnvs []interface{}) []string { return keys } -func environmentRead(d *schema.ResourceData, meta interface{}, isDataSource bool) error { +func environmentRead(ctx context.Context, d *schema.ResourceData, meta interface{}, isDataSource bool) diag.Diagnostics { + var diags diag.Diagnostics client := meta.(*Client) projectKey := d.Get(PROJECT_KEY).(string) key := d.Get(KEY).(string) - envRaw, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.EnvironmentsApi.GetEnvironment(client.ctx, projectKey, key).Execute() - }) + env, res, err := client.ld.EnvironmentsApi.GetEnvironment(client.ctx, projectKey, key).Execute() + if isStatusNotFound(res) && !isDataSource { log.Printf("[WARN] failed to find environment with key %q in project %q, removing from state", key, projectKey) + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("[WARN] failed to find environment with key %q in project %q, removing from state", key, projectKey), + }) d.SetId("") - return nil + return diags } if err != nil { - return fmt.Errorf("failed to get environment with key %q for project key: %q: %v", key, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to get environment with key %q for project key: %q: %v", key, projectKey, handleLdapiErr(err)) } - env := envRaw.(ldapi.Environment) d.SetId(projectKey + "/" + key) _ = d.Set(KEY, env.Key) _ = d.Set(NAME, env.Name) @@ -257,9 +261,9 @@ func environmentRead(d *schema.ResourceData, meta interface{}, isDataSource bool if env.ApprovalSettings != nil { err = d.Set(APPROVAL_SETTINGS, approvalSettingsToResourceData(*env.ApprovalSettings)) if err != nil { - return err + return diag.FromErr(err) } } - return nil + return diags } diff --git a/launchdarkly/fallthrough_helper.go b/launchdarkly/fallthrough_helper.go index 7b866a7c..d7ad044a 100644 --- a/launchdarkly/fallthrough_helper.go +++ b/launchdarkly/fallthrough_helper.go @@ -25,10 +25,10 @@ func fallthroughSchema(forDataSource bool) *schema.Schema { Description: "Group percentage rollout by a custom attribute. This argument is only valid if rollout_weights is also specified", }, VARIATION: { - Type: schema.TypeInt, - Optional: true, - Description: "The integer variation index to serve in case of fallthrough", - ValidateFunc: validation.IntAtLeast(0), + Type: schema.TypeInt, + Optional: true, + Description: "The integer variation index to serve in case of fallthrough", + ValidateDiagFunc: validation.ToDiagFunc(validation.IntAtLeast(0)), }, }, }, diff --git a/launchdarkly/feature_flag_environment_helper.go b/launchdarkly/feature_flag_environment_helper.go index 21dbd0ab..42913200 100644 --- a/launchdarkly/feature_flag_environment_helper.go +++ b/launchdarkly/feature_flag_environment_helper.go @@ -1,11 +1,13 @@ package launchdarkly import ( + "context" "fmt" "log" "net/http" "strings" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ldapi "github.com/launchdarkly/api-client-go/v7" @@ -14,18 +16,18 @@ import ( func baseFeatureFlagEnvironmentSchema(forDataSource bool) map[string]*schema.Schema { return map[string]*schema.Schema{ FLAG_ID: { - Type: schema.TypeString, - Required: true, - Description: "The global feature flag's unique id in the format `/`", - ForceNew: true, - ValidateFunc: validateFlagID, + Type: schema.TypeString, + Required: true, + Description: "The global feature flag's unique id in the format `/`", + ForceNew: true, + ValidateDiagFunc: validation.ToDiagFunc(validateFlagID), }, ENV_KEY: { - Type: schema.TypeString, - Required: true, - Description: "The LaunchDarkly environment key", - ForceNew: true, - ValidateFunc: validateKey(), + Type: schema.TypeString, + Required: true, + Description: "The LaunchDarkly environment key", + ForceNew: true, + ValidateDiagFunc: validateKey(), }, ON: { Type: schema.TypeBool, @@ -44,49 +46,54 @@ func baseFeatureFlagEnvironmentSchema(forDataSource bool) map[string]*schema.Sch Default: false, }, OFF_VARIATION: { - Type: schema.TypeInt, - Required: !forDataSource, - Optional: forDataSource, - Description: "The index of the variation to serve if targeting is disabled", - ValidateFunc: validation.IntAtLeast(0), + Type: schema.TypeInt, + Required: !forDataSource, + Optional: forDataSource, + Description: "The index of the variation to serve if targeting is disabled", + ValidateDiagFunc: validation.ToDiagFunc(validation.IntAtLeast(0)), }, } } // get FeatureFlagEnvironment uses a query parameter to get the ldapi.FeatureFlag with only a single environment. func getFeatureFlagEnvironment(client *Client, projectKey, flagKey, environmentKey string) (ldapi.FeatureFlag, *http.Response, error) { - flagRaw, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.FeatureFlagsApi.GetFeatureFlag(client.ctx, projectKey, flagKey).Env(environmentKey).Execute() - }) - flag := flagRaw.(ldapi.FeatureFlag) - return flag, res, err + return client.ld.FeatureFlagsApi.GetFeatureFlag(client.ctx, projectKey, flagKey).Env(environmentKey).Execute() } -func featureFlagEnvironmentRead(d *schema.ResourceData, raw interface{}, isDataSource bool) error { +func featureFlagEnvironmentRead(ctx context.Context, d *schema.ResourceData, raw interface{}, isDataSource bool) diag.Diagnostics { + var diags diag.Diagnostics client := raw.(*Client) flagId := d.Get(FLAG_ID).(string) projectKey, flagKey, err := flagIdToKeys(flagId) if err != nil { - return err + return diag.FromErr(err) } envKey := d.Get(ENV_KEY).(string) flag, res, err := getFeatureFlagEnvironment(client, projectKey, flagKey, envKey) if isStatusNotFound(res) && !isDataSource { log.Printf("[WARN] failed to find flag %q in project %q, removing from state", flagKey, projectKey) + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("[WARN] failed to find flag %q in project %q, removing from state", flagKey, projectKey), + }) d.SetId("") - return nil + return diags } if err != nil { - return fmt.Errorf("failed to get flag %q of project %q: %s", flagKey, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to get flag %q of project %q: %s", flagKey, projectKey, handleLdapiErr(err)) } environment, ok := flag.Environments[envKey] if !ok { log.Printf("[WARN] failed to find environment %q for flag %q, removing from state", envKey, flagKey) + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("[WARN] failed to find environment %q for flag %q, removing from state", envKey, flagKey), + }) d.SetId("") - return nil + return diags } if isDataSource { @@ -101,29 +108,29 @@ func featureFlagEnvironmentRead(d *schema.ResourceData, raw interface{}, isDataS rules, err := rulesToResourceData(environment.Rules) if err != nil { - return fmt.Errorf("failed to read rules on flag with key %q: %v", flagKey, err) + return diag.Errorf("failed to read rules on flag with key %q: %v", flagKey, err) } err = d.Set(RULES, rules) if err != nil { - return fmt.Errorf("failed to set rules on flag with key %q: %v", flagKey, err) + return diag.Errorf("failed to set rules on flag with key %q: %v", flagKey, err) } err = d.Set(TARGETS, targetsToResourceData(environment.Targets)) if err != nil { - return fmt.Errorf("failed to set targets on flag with key %q: %v", flagKey, err) + return diag.Errorf("failed to set targets on flag with key %q: %v", flagKey, err) } err = d.Set(FALLTHROUGH, fallthroughToResourceData(environment.Fallthrough)) if err != nil { - return fmt.Errorf("failed to set flag fallthrough on flag with key %q: %v", flagKey, err) + return diag.Errorf("failed to set flag fallthrough on flag with key %q: %v", flagKey, err) } err = d.Set(OFF_VARIATION, environment.OffVariation) if err != nil { - return fmt.Errorf("failed to set off_variation on flag with key %q: %v", flagKey, err) + return diag.Errorf("failed to set off_variation on flag with key %q: %v", flagKey, err) } - return nil + return diags } func patchFlagEnvPath(d *schema.ResourceData, op string) string { diff --git a/launchdarkly/feature_flags_helper.go b/launchdarkly/feature_flags_helper.go index 2122d5b4..8f170630 100644 --- a/launchdarkly/feature_flags_helper.go +++ b/launchdarkly/feature_flags_helper.go @@ -1,11 +1,12 @@ package launchdarkly import ( + "context" "fmt" "log" - "net/http" "strings" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ldapi "github.com/launchdarkly/api-client-go/v7" @@ -14,25 +15,25 @@ import ( func baseFeatureFlagSchema() map[string]*schema.Schema { return map[string]*schema.Schema{ PROJECT_KEY: { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: "The LaunchDarkly project key", - ValidateFunc: validateKey(), + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The LaunchDarkly project key", + ValidateDiagFunc: validateKey(), }, KEY: { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validateKey(), - Description: "A unique key that will be used to reference the flag in your code", + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateDiagFunc: validateKey(), + Description: "A unique key that will be used to reference the flag in your code", }, MAINTAINER_ID: { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: "The LaunchDarkly id of the user who will maintain the flag. If not set, the API will automatically apply the member associated with your Terraform API key or the most recently set maintainer", - ValidateFunc: validateID(), + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: "The LaunchDarkly id of the user who will maintain the flag. If not set, the API will automatically apply the member associated with your Terraform API key or the most recently set maintainer", + ValidateDiagFunc: validateID(), }, DESCRIPTION: { Type: schema.TypeString, @@ -86,16 +87,16 @@ func baseFeatureFlagSchema() map[string]*schema.Schema { Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ ON_VARIATION: { - Type: schema.TypeInt, - Required: true, - Description: "The index of the variation served when the flag is on for new environments", - ValidateFunc: validation.IntAtLeast(0), + Type: schema.TypeInt, + Required: true, + Description: "The index of the variation served when the flag is on for new environments", + ValidateDiagFunc: validation.ToDiagFunc(validation.IntAtLeast(0)), }, OFF_VARIATION: { - Type: schema.TypeInt, - Required: true, - Description: "The index of the variation served when the flag is off for new environments", - ValidateFunc: validation.IntAtLeast(0), + Type: schema.TypeInt, + Required: true, + Description: "The index of the variation served when the flag is off for new environments", + ValidateDiagFunc: validation.ToDiagFunc(validation.IntAtLeast(0)), }, }, }, @@ -109,23 +110,27 @@ func baseFeatureFlagSchema() map[string]*schema.Schema { } } -func featureFlagRead(d *schema.ResourceData, raw interface{}, isDataSource bool) error { +func featureFlagRead(ctx context.Context, d *schema.ResourceData, raw interface{}, isDataSource bool) diag.Diagnostics { + var diags diag.Diagnostics client := raw.(*Client) projectKey := d.Get(PROJECT_KEY).(string) key := d.Get(KEY).(string) - flagRaw, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.FeatureFlagsApi.GetFeatureFlag(client.ctx, projectKey, key).Execute() - }) - flag := flagRaw.(ldapi.FeatureFlag) + flag, res, err := client.ld.FeatureFlagsApi.GetFeatureFlag(client.ctx, projectKey, key).Execute() + if isStatusNotFound(res) && !isDataSource { + // TODO: Can probably get rid of all of these WARN logs? log.Printf("[WARN] feature flag %q in project %q not found, removing from state", key, projectKey) + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("[WARN] feature flag %q in project %q not found, removing from state", key, projectKey), + }) d.SetId("") - return nil + return diags } if err != nil { - return fmt.Errorf("failed to get flag %q of project %q: %s", key, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to get flag %q of project %q: %s", key, projectKey, handleLdapiErr(err)) } transformedCustomProperties := customPropertiesToResourceData(flag.CustomProperties) @@ -152,30 +157,30 @@ func featureFlagRead(d *schema.ResourceData, raw interface{}, isDataSource bool) variationType, err := variationsToVariationType(flag.Variations) if err != nil { - return fmt.Errorf("failed to determine variation type on flag with key %q: %v", flag.Key, err) + return diag.Errorf("failed to determine variation type on flag with key %q: %v", flag.Key, err) } err = d.Set(VARIATION_TYPE, variationType) if err != nil { - return fmt.Errorf("failed to set variation type on flag with key %q: %v", flag.Key, err) + return diag.Errorf("failed to set variation type on flag with key %q: %v", flag.Key, err) } parsedVariations, err := variationsToResourceData(flag.Variations, variationType) if err != nil { - return fmt.Errorf("failed to parse variations on flag with key %q: %v", flag.Key, err) + return diag.Errorf("failed to parse variations on flag with key %q: %v", flag.Key, err) } err = d.Set(VARIATIONS, parsedVariations) if err != nil { - return fmt.Errorf("failed to set variations on flag with key %q: %v", flag.Key, err) + return diag.Errorf("failed to set variations on flag with key %q: %v", flag.Key, err) } err = d.Set(TAGS, flag.Tags) if err != nil { - return fmt.Errorf("failed to set tags on flag with key %q: %v", flag.Key, err) + return diag.Errorf("failed to set tags on flag with key %q: %v", flag.Key, err) } err = d.Set(CUSTOM_PROPERTIES, transformedCustomProperties) if err != nil { - return fmt.Errorf("failed to set custom properties on flag with key %q: %v", flag.Key, err) + return diag.Errorf("failed to set custom properties on flag with key %q: %v", flag.Key, err) } var defaults []map[string]interface{} @@ -193,7 +198,7 @@ func featureFlagRead(d *schema.ResourceData, raw interface{}, isDataSource bool) _ = d.Set(DEFAULTS, defaults) d.SetId(projectKey + "/" + key) - return nil + return diags } func flagIdToKeys(id string) (projectKey string, flagKey string, err error) { @@ -206,12 +211,10 @@ func flagIdToKeys(id string) (projectKey string, flagKey string, err error) { } func getProjectDefaultCSAandIncludeInSnippet(client *Client, projectKey string) (ldapi.ClientSideAvailability, bool, error) { - rawProject, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.ProjectsApi.GetProject(client.ctx, projectKey).Execute() - }) + project, _, err := client.ld.ProjectsApi.GetProject(client.ctx, projectKey).Execute() if err != nil { return ldapi.ClientSideAvailability{}, false, err } - project := rawProject.(ldapi.Project) + return *project.DefaultClientSideAvailability, project.IncludeInSnippetByDefault, nil } diff --git a/launchdarkly/flag_trigger_helper.go b/launchdarkly/flag_trigger_helper.go new file mode 100644 index 00000000..f1dd3ec1 --- /dev/null +++ b/launchdarkly/flag_trigger_helper.go @@ -0,0 +1,148 @@ +package launchdarkly + +import ( + "context" + "fmt" + "log" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" +) + +func baseFlagTriggerSchema(isDataSource bool) map[string]*schema.Schema { + return map[string]*schema.Schema{ + PROJECT_KEY: { + Type: schema.TypeString, + Required: true, + Description: "The LaunchDarkly project key", + ForceNew: true, + ValidateDiagFunc: validateKey(), + }, + ENV_KEY: { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The LaunchDarkly environment key", + }, + FLAG_KEY: { + Type: schema.TypeString, + Required: true, + Description: "The key of the feature flag the trigger acts upon", + ForceNew: true, + ValidateDiagFunc: validateKey(), + }, + INTEGRATION_KEY: { + Type: schema.TypeString, + Required: !isDataSource, + Optional: isDataSource, + Description: "The unique identifier of the integration you intend to set your trigger up with. \"generic-trigger\" should be used for integrations not explicitly supported.", + ForceNew: true, + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice([]string{"generic-trigger", "datadog", "dynatrace", "honeycomb", "new-relic-apm", "signalfx"}, false)), + }, + INSTRUCTIONS: { + Type: schema.TypeList, + Required: !isDataSource, + Optional: isDataSource, + Description: "Instructions containing the action to perform when triggering. Currently supported flag actions are \"turnFlagOn\" and \"turnFlagOff\".", + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + KIND: { + Type: schema.TypeString, + Required: true, + Description: "The action to perform when triggering. Currently supported flag actions are \"turnFlagOn\" and \"turnFlagOff\".", + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice([]string{"turnFlagOn", "turnFlagOff"}, false)), + }, + }, + }, + }, + TRIGGER_URL: { + Type: schema.TypeString, + Computed: true, + Description: "The unique trigger URL", + Sensitive: true, + }, + MAINTAINER_ID: { + Type: schema.TypeString, + Computed: true, + Description: "The LaunchDarkly ID of the member who maintains the trigger. The API will automatically apply the member associated with your Terraform API key or the most recently-set maintainer", + }, + ENABLED: { + Type: schema.TypeBool, + Required: !isDataSource, + Optional: isDataSource, + Description: "Whether the trigger is currently active or not. This property defaults to true upon creation", + }, + } +} + +func flagTriggerRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}, isDataSource bool) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) + integrationKey := d.Get(INTEGRATION_KEY).(string) + projectKey := d.Get(PROJECT_KEY).(string) + envKey := d.Get(ENV_KEY).(string) + flagKey := d.Get(FLAG_KEY).(string) + + var triggerId string + if isDataSource { + triggerId = d.Get(ID).(string) + } else { + triggerId = d.Id() + } + + trigger, res, err := client.ld.FlagTriggersApi.GetTriggerWorkflowById(client.ctx, projectKey, flagKey, envKey, triggerId).Execute() + // if the trigger does not exist it simply return an empty trigger object + if (isStatusNotFound(res) || trigger.Id == nil) && !isDataSource { + log.Printf("[WARN] failed to find %s trigger with ID %s, removing from state if present", integrationKey, triggerId) + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("[WARN] failed to find %s trigger with ID %s, removing from state if present", integrationKey, triggerId), + }) + d.SetId("") + return diags + } + if err != nil || trigger.Id == nil { + return diag.Errorf("failed to get %s trigger with ID %s", integrationKey, triggerId) + } + + if isDataSource { + d.SetId(*trigger.Id) + } + _ = d.Set(PROJECT_KEY, projectKey) + _ = d.Set(ENV_KEY, envKey) + _ = d.Set(FLAG_KEY, flagKey) + _ = d.Set(INTEGRATION_KEY, *trigger.IntegrationKey) + _ = d.Set(INSTRUCTIONS, *trigger.Instructions) + _ = d.Set(MAINTAINER_ID, trigger.MaintainerId) + _ = d.Set(ENABLED, trigger.Enabled) + // NOTE: we do not want to set the trigger url at any point past the create as it will always be obscured + + return diags +} + +func instructionsFromResourceData(d *schema.ResourceData, method string) []map[string]interface{} { + rawInstructions := d.Get(INSTRUCTIONS).([]interface{}) + var instructions []map[string]interface{} + switch method { + case "POST": + for _, v := range rawInstructions { + instructions = append(instructions, v.(map[string]interface{})) + } + case "PATCH": + if d.HasChange(INSTRUCTIONS) { + for _, v := range rawInstructions { + oldInstruction := v.(map[string]interface{}) + value := oldInstruction[KIND] + instructions = append(instructions, map[string]interface{}{ + KIND: "replaceTriggerActionInstructions", + VALUE: []map[string]interface{}{{ + KIND: value, + }, + }}) + } + } + } + return instructions +} diff --git a/launchdarkly/helper.go b/launchdarkly/helper.go index 390583f5..4cb3f6f4 100644 --- a/launchdarkly/helper.go +++ b/launchdarkly/helper.go @@ -2,59 +2,13 @@ package launchdarkly import ( "fmt" - "log" "math/rand" "net/http" - "strconv" "time" ldapi "github.com/launchdarkly/api-client-go/v7" ) -const ( - MAX_409_RETRIES = 5 - MAX_429_RETRIES = 20 -) - -func handleRateLimit(apiCall func() (interface{}, *http.Response, error)) (interface{}, *http.Response, error) { - obj, res, err := apiCall() - for retryCount := 0; res != nil && res.StatusCode == http.StatusTooManyRequests && retryCount < MAX_429_RETRIES; retryCount++ { - log.Println("[DEBUG] received a 429 Too Many Requests error. retrying") - resetStr := res.Header.Get("X-RateLimit-Reset") - resetInt, parseErr := strconv.ParseInt(resetStr, 10, 64) - if parseErr != nil { - log.Println("[DEBUG] could not parse X-RateLimit-Reset header. Sleeping for a random interval.") - randomRetrySleep() - } else { - resetTime := time.Unix(0, resetInt*int64(time.Millisecond)) - sleepDuration := time.Until(resetTime) - - // We have observed situations where LD-s retry header results in a negative sleep duration. In this case, - // multiply the duration by -1 and add jitter - if sleepDuration <= 0 { - log.Printf("[DEBUG] received a negative rate limit retry duration of %s.", sleepDuration) - sleepDuration = -1 * sleepDuration - } - sleepDurationWithJitter := sleepDuration + getRandomSleepDuration(sleepDuration) - log.Println("[DEBUG] sleeping", sleepDurationWithJitter) - time.Sleep(sleepDurationWithJitter) - } - obj, res, err = apiCall() - } - return obj, res, err - -} - -func handleNoConflict(apiCall func() (interface{}, *http.Response, error)) (interface{}, *http.Response, error) { - obj, res, err := apiCall() - for retryCount := 0; res != nil && res.StatusCode == http.StatusConflict && retryCount < MAX_409_RETRIES; retryCount++ { - log.Println("[DEBUG] received a 409 conflict. retrying") - randomRetrySleep() - obj, res, err = apiCall() - } - return obj, res, err -} - var randomRetrySleepSeeded = false // getRandomSleepDuration returns a duration between [0, maxDuration) @@ -66,12 +20,6 @@ func getRandomSleepDuration(maxDuration time.Duration) time.Duration { return time.Duration(n) } -// Sleep for a random interval between 200ms and 500ms -func randomRetrySleep() { - duration := 200*time.Millisecond + getRandomSleepDuration(300*time.Millisecond) - time.Sleep(duration) -} - func ptr(v interface{}) *interface{} { return &v } func intPtr(i int) *int { @@ -131,3 +79,11 @@ func stringSliceToInterfaceSlice(input []string) []interface{} { } return o } + +func interfaceSliceToStringSlice(input []interface{}) []string { + o := make([]string, 0, len(input)) + for _, v := range input { + o = append(o, v.(string)) + } + return o +} diff --git a/launchdarkly/helper_test.go b/launchdarkly/helper_test.go deleted file mode 100644 index 6ba99704..00000000 --- a/launchdarkly/helper_test.go +++ /dev/null @@ -1,125 +0,0 @@ -package launchdarkly - -import ( - "errors" - "net/http" - "strconv" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestHandleNoConflict(t *testing.T) { - t.Run("no retries needed", func(t *testing.T) { - t.Parallel() - calls := 0 - _, res, err := handleNoConflict(func() (interface{}, *http.Response, error) { - calls++ - return nil, &http.Response{StatusCode: http.StatusOK}, nil - }) - require.NoError(t, err) - assert.Equal(t, 1, calls) - assert.Equal(t, res.StatusCode, http.StatusOK) - }) - t.Run("max retries exceeded", func(t *testing.T) { - t.Parallel() - calls := 0 - _, res, err := handleNoConflict(func() (interface{}, *http.Response, error) { - calls++ - return nil, &http.Response{StatusCode: http.StatusConflict}, errors.New("Conflict") - }) - require.Error(t, err) - assert.Equal(t, 6, calls) - assert.Equal(t, res.StatusCode, http.StatusConflict) - }) - t.Run("conflict resolved", func(t *testing.T) { - t.Parallel() - calls := 0 - _, res, err := handleNoConflict(func() (interface{}, *http.Response, error) { - calls++ - if calls == 3 { - return nil, &http.Response{StatusCode: http.StatusOK}, nil - } - return nil, &http.Response{StatusCode: http.StatusConflict}, nil - }) - require.NoError(t, err) - assert.Equal(t, 3, calls) - assert.Equal(t, res.StatusCode, http.StatusOK) - }) -} - -func TestHandleRateLimit(t *testing.T) { - t.Run("no retries needed", func(t *testing.T) { - t.Parallel() - calls := 0 - _, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - calls++ - return nil, &http.Response{StatusCode: http.StatusOK}, nil - }) - require.NoError(t, err) - assert.Equal(t, 1, calls) - assert.Equal(t, res.StatusCode, http.StatusOK) - }) - t.Run("max retries exceeded", func(t *testing.T) { - t.Parallel() - calls := 0 - _, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - calls++ - res := &http.Response{StatusCode: http.StatusTooManyRequests, Header: http.Header{}} - res.Header.Set("X-RateLimit-Reset", strconv.FormatInt(time.Now().Add(100*time.Millisecond).UnixNano()/int64(time.Millisecond), 10)) - return nil, res, errors.New("Rate limit exceeded") - }) - require.Error(t, err) - assert.Equal(t, MAX_429_RETRIES+1, calls) - assert.Equal(t, res.StatusCode, http.StatusTooManyRequests) - }) - t.Run("retry resolved with header", func(t *testing.T) { - t.Parallel() - calls := 0 - _, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - calls++ - if calls == 3 { - return nil, &http.Response{StatusCode: http.StatusOK}, nil - } - res := &http.Response{StatusCode: http.StatusTooManyRequests, Header: http.Header{}} - res.Header.Set("X-RateLimit-Reset", strconv.FormatInt(time.Now().Add(100*time.Millisecond).UnixNano()/int64(time.Millisecond), 10)) - return nil, res, errors.New("Rate limit exceeded") - }) - require.NoError(t, err) - assert.Equal(t, 3, calls) - assert.Equal(t, res.StatusCode, http.StatusOK) - }) - t.Run("retry resolved with negative header", func(t *testing.T) { - t.Parallel() - calls := 0 - _, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - calls++ - if calls == 3 { - return nil, &http.Response{StatusCode: http.StatusOK}, nil - } - res := &http.Response{StatusCode: http.StatusTooManyRequests, Header: http.Header{}} - res.Header.Set("X-RateLimit-Reset", strconv.FormatInt(time.Now().Add(-100*time.Millisecond).UnixNano()/int64(time.Millisecond), 10)) - return nil, res, errors.New("Rate limit exceeded") - }) - require.NoError(t, err) - assert.Equal(t, 3, calls) - assert.Equal(t, res.StatusCode, http.StatusOK) - }) - t.Run("retry resolved without header", func(t *testing.T) { - t.Parallel() - calls := 0 - _, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - calls++ - if calls == 3 { - return nil, &http.Response{StatusCode: http.StatusOK}, nil - } - res := &http.Response{StatusCode: http.StatusTooManyRequests, Header: http.Header{}} - return nil, res, errors.New("Rate limit exceeded") - }) - require.NoError(t, err) - assert.Equal(t, 3, calls) - assert.Equal(t, res.StatusCode, http.StatusOK) - }) -} diff --git a/launchdarkly/keys.go b/launchdarkly/keys.go index 80c0b8b9..16861126 100644 --- a/launchdarkly/keys.go +++ b/launchdarkly/keys.go @@ -1,90 +1,108 @@ package launchdarkly +// keys used in terraform files referencing keys in launchdarkly resource objects. +// The name of each constant is the same as its value. const ( - // keys used in terraform files referencing keys in launchdarkly resource objects. - // The name of each constant is the same as its value. - PROJECT_KEY = "project_key" - ENV_KEY = "env_key" - KEY = "key" - FLAG_ID = "flag_id" - NAME = "name" - TAGS = "tags" - ENVIRONMENTS = "environments" + //gofmts:sort + ACTIONS = "actions" API_KEY = "api_key" - MOBILE_KEY = "mobile_key" + APPROVAL_SETTINGS = "approval_settings" + ARCHIVED = "archived" + ATTRIBUTE = "attribute" + BUCKET_BY = "bucket_by" + CAN_APPLY_DECLINED_CHANGES = "can_apply_declined_changes" + CAN_REVIEW_OWN_REQUEST = "can_review_own_request" + CLAUSES = "clauses" + CLIENT_SIDE_AVAILABILITY = "client_side_availability" CLIENT_SIDE_ID = "client_side_id" COLOR = "color" - DEFAULT_TTL = "default_ttl" - SECURE_MODE = "secure_mode" - DEFAULT_TRACK_EVENTS = "default_track_events" - REQUIRE_COMMENTS = "require_comments" + CONFIG = "config" CONFIRM_CHANGES = "confirm_changes" - DESCRIPTION = "description" - MAINTAINER_ID = "maintainer_id" - VARIATION_TYPE = "variation_type" - VARIATIONS = "variations" - TEMPORARY = "temporary" - INCLUDE_IN_SNIPPET = "include_in_snippet" - VALUE = "value" - URL = "url" - SECRET = "secret" - ENABLED = "enabled" - ON = "on" - RESOURCES = "resources" - NOT_RESOURCES = "not_resources" - ACTIONS = "actions" - NOT_ACTIONS = "not_actions" - EFFECT = "effect" - POLICY = "policy" - STATEMENTS = "statements" - POLICY_STATEMENTS = "policy_statements" - INLINE_ROLES = "inline_roles" - EXCLUDED = "excluded" - INCLUDED = "included" CREATION_DATE = "creation_date" CUSTOM_PROPERTIES = "custom_properties" - EMAIL = "email" - FIRST_NAME = "first_name" - LAST_NAME = "last_name" - ROLE = "role" CUSTOM_ROLES = "custom_roles" - RULES = "rules" - ATTRIBUTE = "attribute" - OP = "op" - VALUES = "values" - VALUE_TYPE = "value_type" - NEGATE = "negate" - CLAUSES = "clauses" - WEIGHT = "weight" - BUCKET_BY = "bucket_by" - ROLLOUT_WEIGHTS = "rollout_weights" - VARIATION = "variation" - TARGETS = "targets" - PREREQUISITES = "prerequisites" - FLAG_KEY = "flag_key" - TRACK_EVENTS = "track_events" - FALLTHROUGH = "fallthrough" - KIND = "kind" - CONFIG = "config" - DEFAULT_ON_VARIATION = "default_on_variation" - DEFAULT_OFF_VARIATION = "default_off_variation" DEFAULTS = "defaults" - ON_VARIATION = "on_variation" - OFF_VARIATION = "off_variation" - SERVICE_TOKEN = "service_token" DEFAULT_API_VERSION = "default_api_version" - TOKEN = "token" + DEFAULT_CLIENT_SIDE_AVAILABILITY = "default_client_side_availability" + DEFAULT_OFF_VARIATION = "default_off_variation" + DEFAULT_ON_VARIATION = "default_on_variation" + DEFAULT_TRACK_EVENTS = "default_track_events" + DEFAULT_TTL = "default_ttl" + DESCRIPTION = "description" + DISPLAY_KEY = "display_key" + EFFECT = "effect" + EMAIL = "email" + EMAILS = "emails" + ENABLED = "enabled" + ENVIRONMENTS = "environments" + ENV_KEY = "env_key" + EVENT_KEY = "event_key" + EXCLUDED = "excluded" EXPIRE = "expire" + FALLTHROUGH = "fallthrough" + FIRST_NAME = "first_name" + FLAG_ID = "flag_id" + FLAG_KEY = "flag_key" + FULL_KEY = "full_key" ID = "id" - CLIENT_SIDE_AVAILABILITY = "client_side_availability" - DEFAULT_CLIENT_SIDE_AVAILABILITY = "default_client_side_availability" - ARCHIVED = "archived" - APPROVAL_SETTINGS = "approval_settings" - REQUIRED = "required" - CAN_REVIEW_OWN_REQUEST = "can_review_own_request" + IGNORE_MISSING = "ignore_missing" + INCLUDED = "included" + INCLUDE_IN_SNIPPET = "include_in_snippet" + INLINE_ROLES = "inline_roles" + INSTRUCTIONS = "instructions" + INTEGRATION_KEY = "integration_key" + IS_ACTIVE = "is_active" + IS_NUMERIC = "is_numeric" + KEY = "key" + KIND = "kind" + LAST_NAME = "last_name" + MAINTAINER_ID = "maintainer_id" MIN_NUM_APPROVALS = "min_num_approvals" - CAN_APPLY_DECLINED_CHANGES = "can_apply_declined_changes" + MOBILE_KEY = "mobile_key" + NAME = "name" + NEGATE = "negate" + NOT_ACTIONS = "not_actions" + NOT_RESOURCES = "not_resources" + OFF_VARIATION = "off_variation" + ON = "on" + ON_VARIATION = "on_variation" + OP = "op" + PATTERN = "pattern" + POLICY = "policy" + POLICY_STATEMENTS = "policy_statements" + PREREQUISITES = "prerequisites" + PROJECT_KEY = "project_key" + REQUIRED = "required" REQUIRED_APPROVAL_TAGS = "required_approval_tags" + REQUIRE_COMMENTS = "require_comments" + RESOURCES = "resources" + ROLE = "role" + ROLLOUT_WEIGHTS = "rollout_weights" + RULES = "rules" + SECRET = "secret" + SECURE_MODE = "secure_mode" + SELECTOR = "selector" + SERVICE_TOKEN = "service_token" + STATEMENTS = "statements" + SUBSTRING = "substring" + SUCCESS_CRITERIA = "success_criteria" + TAGS = "tags" + TARGETS = "targets" + TEAM_MEMBERS = "team_members" + TEMPORARY = "temporary" + TOKEN = "token" + TRACK_EVENTS = "track_events" + TRIGGER_URL = "trigger_url" + UNIT = "unit" + URL = "url" + URLS = "urls" USING_ENVIRONMENT_ID = "using_environment_id" USING_MOBILE_KEY = "using_mobile_key" + VALUE = "value" + VALUES = "values" + VALUE_TYPE = "value_type" + VARIATION = "variation" + VARIATIONS = "variations" + VARIATION_TYPE = "variation_type" + WEIGHT = "weight" ) diff --git a/launchdarkly/metrics_helper.go b/launchdarkly/metrics_helper.go new file mode 100644 index 00000000..15c9b4aa --- /dev/null +++ b/launchdarkly/metrics_helper.go @@ -0,0 +1,250 @@ +package launchdarkly + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/go-cty/cty" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + + ldapi "github.com/launchdarkly/api-client-go/v7" +) + +func baseMetricSchema(isDataSource bool) map[string]*schema.Schema { + return map[string]*schema.Schema{ + PROJECT_KEY: { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The LaunchDarkly project key", + ValidateDiagFunc: validateKey(), + }, + KEY: { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateDiagFunc: validateKey(), + Description: "A unique key that will be used to reference the metric in your code", + }, + NAME: { + Type: schema.TypeString, + Required: !isDataSource, + Optional: isDataSource, + Description: "A human-readable name for your metric", + }, + KIND: { + Type: schema.TypeString, + Required: !isDataSource, + Optional: isDataSource, + Description: "The metric type -available choices are 'pageview', 'click', and 'custom'", + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice([]string{"pageview", "click", "custom"}, false)), + ForceNew: true, + }, + MAINTAINER_ID: { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: "The LaunchDarkly ID of the user who will maintain the metric. If not set, the API will automatically apply the member associated with your Terraform API key or the most recently-set maintainer", + ValidateDiagFunc: validateID(), + }, + DESCRIPTION: { + Type: schema.TypeString, + Optional: true, + Description: "A short description of what the metric will be used for", + }, + TAGS: tagsSchema(), + IS_ACTIVE: { + Type: schema.TypeBool, + Optional: true, + Description: "Whether the metric is active", + Default: false, + }, + IS_NUMERIC: { + Type: schema.TypeBool, + Optional: true, + Description: "Whether the metric is numeric", + Default: false, + }, + UNIT: { + Type: schema.TypeString, + Optional: true, + Description: "The unit for your metric (if numeric metric)", + }, + SELECTOR: { + Type: schema.TypeString, + Optional: true, + Description: "The CSS selector for your metric (if click metric)", + }, + EVENT_KEY: { + Type: schema.TypeString, + Optional: true, + Description: "The event key for your metric (if custom metric)", + }, + SUCCESS_CRITERIA: { + Type: schema.TypeString, + Optional: true, + Description: "The success criteria for your metric (if numeric metric)", + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice([]string{"HigherThanBaseline", "LowerThanBaseline"}, false)), + }, + URLS: { + Type: schema.TypeList, + Optional: true, + Description: "List of nested `url` blocks describing URLs that you want to track the metric on", + Elem: &schema.Resource{ + Schema: metricUrlSchema(), + }, + }, + } +} + +func metricRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}, isDataSource bool) diag.Diagnostics { + client := metaRaw.(*Client) + + // Warning or errors can be collected in a slice type + var diags diag.Diagnostics + + projectKey := d.Get(PROJECT_KEY).(string) + key := d.Get(KEY).(string) + + metric, res, err := client.ld.MetricsApi.GetMetric(client.ctx, projectKey, key).Execute() + + if isStatusNotFound(res) && !isDataSource { + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Metric not found", + Detail: fmt.Sprintf("[WARN] metric %q in project %q not found, removing from state", key, projectKey), + }) + d.SetId("") + return diags + } + if err != nil { + return diag.FromErr(err) + } + + _ = d.Set(KEY, metric.Key) + _ = d.Set(NAME, metric.Name) + _ = d.Set(DESCRIPTION, metric.Description) + _ = d.Set(TAGS, metric.Tags) + _ = d.Set(KIND, metric.Kind) + _ = d.Set(IS_ACTIVE, metric.IsActive) + _ = d.Set(IS_NUMERIC, metric.IsNumeric) + _ = d.Set(SELECTOR, metric.Selector) + _ = d.Set(URLS, metric.Urls) + _ = d.Set(UNIT, metric.Unit) + _ = d.Set(EVENT_KEY, metric.EventKey) + _ = d.Set(SUCCESS_CRITERIA, metric.SuccessCriteria) + + d.SetId(projectKey + "/" + key) + + return diags +} + +func metricUrlSchema() map[string]*schema.Schema { + return map[string]*schema.Schema{ + KIND: { + Type: schema.TypeString, + Required: true, + Description: "The url type - vailable choices are 'exact', 'canonical', 'substring' and 'regex'", + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice([]string{"exact", "canonical", "substring", "regex"}, false)), + }, + URL: { + Type: schema.TypeString, + Optional: true, + Description: "The exact or canonical URL", + }, + SUBSTRING: { + Type: schema.TypeString, + Optional: true, + Description: "The URL substring", + }, + PATTERN: { + Type: schema.TypeString, + Optional: true, + Description: "The URL-matching regex", + }, + } +} + +func metricUrlsFromResourceData(d *schema.ResourceData) []ldapi.UrlPost { + schemaUrlList := d.Get(URLS).([]interface{}) + urls := make([]ldapi.UrlPost, len(schemaUrlList)) + for i, url := range schemaUrlList { + urls[i] = metricUrlPostFromResourceData(url) + } + return urls +} + +func metricUrlPostFromResourceData(urlData interface{}) ldapi.UrlPost { + urlMap := urlData.(map[string]interface{}) + kind := urlMap[KIND].(string) + url := urlMap[URL].(string) + substring := urlMap[SUBSTRING].(string) + pattern := urlMap[PATTERN].(string) + urlPost := ldapi.UrlPost{ + Kind: &kind, + Url: &url, + Substring: &substring, + Pattern: &pattern, + } + return urlPost +} + +func metricIdToKeys(id string) (projectKey string, flagKey string, err error) { + if strings.Count(id, "/") != 1 { + return "", "", fmt.Errorf("found unexpected metric id format: %q expected format: 'project_key/metric_key'", id) + } + parts := strings.SplitN(id, "/", 2) + projectKey, flagKey = parts[0], parts[1] + return projectKey, flagKey, nil +} + +// Checks each of the URL config entries to make sure that the required field for each kind is set +// If it isn't return true which breaks out of a forEach down the line +func checkUrlConfigValues(key cty.Value, val cty.Value) bool { + urlKind := val.GetAttr("kind").AsString() + substringNull := val.GetAttr("substring").IsNull() + urlNull := val.GetAttr("url").IsNull() + patternNull := val.GetAttr("pattern").IsNull() + switch urlKind { + case "canonical": + // Ensure required value is set + if urlNull { + return true + } + // Disallow keys specific to other 'kind' values - these updates are ignored by the backend and lead to misleading plans being generated + if !patternNull || !substringNull { + return true + } + case "exact": + // Ensure required value is set + if urlNull { + return true + } + // Disallow keys specific to other 'kind' values - these updates are ignored by the backend and lead to misleading plans being generated + if !patternNull || !substringNull { + return true + } + case "substring": + // Ensure required value is set + if substringNull { + return true + } + // Disallow keys specific to other 'kind' values - these updates are ignored by the backend and lead to misleading plans being generated + if !patternNull || !urlNull { + return true + } + case "pattern": + // Ensure required value is set + if patternNull { + return true + } + // Disallow keys specific to other 'kind' values - these updates are ignored by the backend and lead to misleading plans being generated + if !substringNull || !urlNull { + return true + } + } + return false +} diff --git a/launchdarkly/policies_helper.go b/launchdarkly/policies_helper.go index 5105427e..1e4ae8c3 100644 --- a/launchdarkly/policies_helper.go +++ b/launchdarkly/policies_helper.go @@ -53,20 +53,24 @@ func policiesFromResourceData(d *schema.ResourceData) []ldapi.StatementPost { func policyFromResourceData(val interface{}) ldapi.StatementPost { policyMap := val.(map[string]interface{}) - p := ldapi.StatementPost{ - Resources: []string{}, - Actions: []string{}, - Effect: policyMap[EFFECT].(string), - } + statementResources := []string{} + statementActions := []string{} + for _, r := range policyMap[RESOURCES].([]interface{}) { - p.Resources = append(p.Resources, r.(string)) + statementResources = append(statementResources, r.(string)) } for _, a := range policyMap[ACTIONS].([]interface{}) { - p.Actions = append(p.Actions, a.(string)) + statementActions = append(statementActions, a.(string)) } - sort.Strings(p.Actions) - sort.Strings(p.Resources) + sort.Strings(statementActions) + sort.Strings(statementResources) + + p := ldapi.StatementPost{ + Resources: &statementResources, + Actions: &statementActions, + Effect: policyMap[EFFECT].(string), + } return p } @@ -83,8 +87,23 @@ func policiesToResourceData(policies []ldapi.Statement) interface{} { return transformed } +// https://godoc.org/github.com/hashicorp/terraform/helper/schema#SchemaSetFunc +type hashStatement struct { + Resources []string + Actions []string + Effect string +} + // https://godoc.org/github.com/hashicorp/terraform/helper/schema#SchemaSetFunc func policyHash(val interface{}) int { - policy := policyFromResourceData(val) + rawPolicy := policyFromResourceData(val) + // since this function runs once for each sub-field (unclear why) + // it was creating 3 different hash indices per policy since it was hashing the + // pointer addresses rather than the values themselves + policy := hashStatement{ + Resources: *rawPolicy.Resources, + Actions: *rawPolicy.Actions, + Effect: rawPolicy.Effect, + } return schema.HashString(fmt.Sprintf("%v", policy)) } diff --git a/launchdarkly/policy_statements_helper.go b/launchdarkly/policy_statements_helper.go index 36b6a50b..55f6655c 100644 --- a/launchdarkly/policy_statements_helper.go +++ b/launchdarkly/policy_statements_helper.go @@ -15,12 +15,14 @@ type policyStatementSchemaOptions struct { deprecated string description string conflictsWith []string + required bool } func policyStatementsSchema(options policyStatementSchemaOptions) *schema.Schema { - return &schema.Schema{ + schema := &schema.Schema{ Type: schema.TypeList, - Optional: true, + Optional: !options.required, + Required: options.required, MinItems: 1, Description: options.description, Deprecated: options.deprecated, @@ -64,13 +66,14 @@ func policyStatementsSchema(options policyStatementSchemaOptions) *schema.Schema MinItems: 1, }, EFFECT: { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.StringInSlice([]string{"allow", "deny"}, false), + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice([]string{"allow", "deny"}, false)), }, }, }, } + return schema } func validatePolicyStatement(statement map[string]interface{}) error { @@ -97,40 +100,41 @@ func policyStatementsFromResourceData(schemaStatements []interface{}) ([]ldapi.S statements := make([]ldapi.StatementPost, 0, len(schemaStatements)) for _, stmt := range schemaStatements { statement := stmt.(map[string]interface{}) - err := validatePolicyStatement(statement) + s, err := policyStatementFromResourceData(statement) if err != nil { return statements, err } - s := policyStatementFromResourceData(statement) statements = append(statements, s) } return statements, nil } -func policyStatementFromResourceData(statement map[string]interface{}) ldapi.StatementPost { +func policyStatementFromResourceData(statement map[string]interface{}) (ldapi.StatementPost, error) { + err := validatePolicyStatement(statement) + if err != nil { + return ldapi.StatementPost{}, err + } ret := ldapi.StatementPost{ Effect: statement[EFFECT].(string), } - for _, r := range statement[RESOURCES].([]interface{}) { - ret.Resources = append(ret.Resources, r.(string)) + resources := interfaceSliceToStringSlice(statement[RESOURCES].([]interface{})) + if len(resources) > 0 { + ret.SetResources(resources) } - for _, a := range statement[ACTIONS].([]interface{}) { - ret.Actions = append(ret.Actions, a.(string)) + notResources := interfaceSliceToStringSlice(statement[NOT_RESOURCES].([]interface{})) + if len(notResources) > 0 { + ret.SetNotResources(notResources) } - // optional fields - rawNotResources := statement[NOT_RESOURCES].([]interface{}) - var notResources []string - for _, n := range rawNotResources { - notResources = append(notResources, n.(string)) - ret.NotResources = ¬Resources + actions := interfaceSliceToStringSlice(statement[ACTIONS].([]interface{})) + if len(actions) > 0 { + ret.SetActions(actions) } - rawNotActions := statement[NOT_ACTIONS].([]interface{}) - var notActions []string - for _, n := range rawNotActions { - notActions = append(notActions, n.(string)) - ret.NotActions = ¬Actions + notActions := interfaceSliceToStringSlice(statement[NOT_ACTIONS].([]interface{})) + if len(notActions) > 0 { + ret.SetNotActions(notActions) } - return ret + + return ret, nil } func policyStatementsToResourceData(statements []ldapi.StatementRep) []interface{} { @@ -140,18 +144,10 @@ func policyStatementsToResourceData(statements []ldapi.StatementRep) []interface EFFECT: s.Effect, } if s.Resources != nil && len(*s.Resources) > 0 { - var resources []interface{} - for _, v := range *s.Resources { - resources = append(resources, v) - } - t[RESOURCES] = resources + t[RESOURCES] = stringSliceToInterfaceSlice(*s.Resources) } if s.NotResources != nil && len(*s.NotResources) > 0 { - var notResources []interface{} - for _, v := range *s.NotResources { - notResources = append(notResources, v) - } - t[NOT_RESOURCES] = notResources + t[NOT_RESOURCES] = stringSliceToInterfaceSlice(*s.NotResources) } if s.Actions != nil && len(*s.Actions) > 0 { t[ACTIONS] = stringSliceToInterfaceSlice(*s.Actions) @@ -167,13 +163,17 @@ func policyStatementsToResourceData(statements []ldapi.StatementRep) []interface func statementsToStatementReps(policies []ldapi.Statement) []ldapi.StatementRep { statements := make([]ldapi.StatementRep, 0, len(policies)) for _, p := range policies { - rep := ldapi.StatementRep{ - Resources: p.Resources, - Actions: p.Actions, - NotResources: p.NotResources, - NotActions: p.NotActions, - Effect: p.Effect, - } + rep := ldapi.StatementRep(p) + statements = append(statements, rep) + } + return statements +} + +// The relay proxy config api requires a statementRep in the POST body +func statementPostsToStatementReps(policies []ldapi.StatementPost) []ldapi.StatementRep { + statements := make([]ldapi.StatementRep, 0, len(policies)) + for _, p := range policies { + rep := ldapi.StatementRep(p) statements = append(statements, rep) } return statements diff --git a/launchdarkly/policy_statements_helper_test.go b/launchdarkly/policy_statements_helper_test.go index 2efddfe1..b898cd6a 100644 --- a/launchdarkly/policy_statements_helper_test.go +++ b/launchdarkly/policy_statements_helper_test.go @@ -10,6 +10,12 @@ import ( ) func TestPolicyStatementsRoundTripConversion(t *testing.T) { + statementResources := []string{"proj/*"} + statementActions := []string{"*"} + statementPostResources1 := []string{"proj/*:env/*;qa_*"} + statementPostResources2 := []string{"proj/*:env/*;qa_*:/flag/*"} + statementPostActions := []string{"*"} + testCases := []struct { name string policyStatements map[string]interface{} @@ -28,8 +34,8 @@ func TestPolicyStatementsRoundTripConversion(t *testing.T) { }, expected: []ldapi.StatementPost{ { - Resources: []string{"proj/*"}, - Actions: []string{"*"}, + Resources: &statementResources, + Actions: &statementActions, Effect: "allow", }, }, @@ -52,19 +58,19 @@ func TestPolicyStatementsRoundTripConversion(t *testing.T) { }, expected: []ldapi.StatementPost{ { - Resources: []string{"proj/*:env/*;qa_*"}, - Actions: []string{"*"}, + Resources: &statementPostResources1, + Actions: &statementPostActions, Effect: "allow", }, { - Resources: []string{"proj/*:env/*;qa_*:/flag/*"}, - Actions: []string{"*"}, + Resources: &statementPostResources2, + Actions: &statementPostActions, Effect: "allow", }, }, }, { - name: "not_resource example", + name: "not_resources example", policyStatements: map[string]interface{}{ POLICY_STATEMENTS: []interface{}{ map[string]interface{}{ @@ -77,11 +83,30 @@ func TestPolicyStatementsRoundTripConversion(t *testing.T) { expected: []ldapi.StatementPost{ { NotResources: strArrayPtr([]string{"proj/*:env/production:flag/*"}), - Actions: []string{"*"}, + Actions: &statementPostActions, Effect: "allow", }, }, }, + { + name: "not_actions example", + policyStatements: map[string]interface{}{ + POLICY_STATEMENTS: []interface{}{ + map[string]interface{}{ + RESOURCES: []interface{}{"proj/*:env/production:flag/*"}, + NOT_ACTIONS: []interface{}{"*"}, + EFFECT: "allow", + }, + }, + }, + expected: []ldapi.StatementPost{ + { + Resources: strArrayPtr([]string{"proj/*:env/production:flag/*"}), + NotActions: &statementPostActions, + Effect: "allow", + }, + }, + }, } for _, tc := range testCases { @@ -170,13 +195,7 @@ func statementPostsToStatements(posts []ldapi.StatementPost) []ldapi.Statement { var statements []ldapi.Statement for _, p := range posts { p := p - statement := ldapi.Statement{ - Resources: &p.Resources, - NotResources: p.NotResources, - Actions: &p.Actions, - NotActions: p.NotActions, - Effect: p.Effect, - } + statement := ldapi.Statement(p) statements = append(statements, statement) } return statements diff --git a/launchdarkly/prerequisite_helper.go b/launchdarkly/prerequisite_helper.go index 682da0a0..f4a71079 100644 --- a/launchdarkly/prerequisite_helper.go +++ b/launchdarkly/prerequisite_helper.go @@ -16,17 +16,17 @@ func prerequisitesSchema() *schema.Schema { Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ FLAG_KEY: { - Type: schema.TypeString, - Required: true, - Description: "The prerequisite feature flag's key", - ValidateFunc: validateKey(), + Type: schema.TypeString, + Required: true, + Description: "The prerequisite feature flag's key", + ValidateDiagFunc: validateKey(), }, VARIATION: { - Type: schema.TypeInt, - Elem: &schema.Schema{Type: schema.TypeInt}, - Required: true, - Description: "The index of the prerequisite feature flag's variation to target", - ValidateFunc: validation.IntAtLeast(0), + Type: schema.TypeInt, + Elem: &schema.Schema{Type: schema.TypeInt}, + Required: true, + Description: "The index of the prerequisite feature flag's variation to target", + ValidateDiagFunc: validation.ToDiagFunc(validation.IntAtLeast(0)), }, }, }, diff --git a/launchdarkly/project_helper.go b/launchdarkly/project_helper.go index abcbfb81..88108b3e 100644 --- a/launchdarkly/project_helper.go +++ b/launchdarkly/project_helper.go @@ -1,32 +1,35 @@ package launchdarkly import ( + "context" "fmt" "log" - "net/http" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - ldapi "github.com/launchdarkly/api-client-go/v7" ) -func projectRead(d *schema.ResourceData, meta interface{}, isDataSource bool) error { +func projectRead(ctx context.Context, d *schema.ResourceData, meta interface{}, isDataSource bool) diag.Diagnostics { + var diags diag.Diagnostics client := meta.(*Client) projectKey := d.Get(KEY).(string) - rawProject, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.ProjectsApi.GetProject(client.ctx, projectKey).Execute() - }) + project, res, err := client.ld.ProjectsApi.GetProject(client.ctx, projectKey).Execute() + // return nil error for resource reads but 404 for data source reads if isStatusNotFound(res) && !isDataSource { log.Printf("[WARN] failed to find project with key %q, removing from state if present", projectKey) + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("[WARN] failed to find project with key %q, removing from state if present", projectKey), + }) d.SetId("") - return nil + return diags } if err != nil { - return fmt.Errorf("failed to get project with key %q: %v", projectKey, err) + return diag.Errorf("failed to get project with key %q: %v", projectKey, err) } - project := rawProject.(ldapi.Project) defaultCSA := *project.DefaultClientSideAvailability clientSideAvailability := []map[string]interface{}{{ "using_environment_id": defaultCSA.UsingEnvironmentId, @@ -37,7 +40,7 @@ func projectRead(d *schema.ResourceData, meta interface{}, isDataSource bool) er d.SetId(project.Id) err = d.Set(CLIENT_SIDE_AVAILABILITY, clientSideAvailability) if err != nil { - return fmt.Errorf("could not set client_side_availability on project with key %q: %v", project.Key, err) + return diag.Errorf("could not set client_side_availability on project with key %q: %v", project.Key, err) } } _ = d.Set(KEY, project.Key) @@ -75,24 +78,29 @@ func projectRead(d *schema.ResourceData, meta interface{}, isDataSource bool) er err = d.Set(ENVIRONMENTS, environments) if err != nil { - return fmt.Errorf("could not set environments on project with key %q: %v", project.Key, err) + return diag.Errorf("could not set environments on project with key %q: %v", project.Key, err) + } + + err = d.Set(INCLUDE_IN_SNIPPET, project.IncludeInSnippetByDefault) + if err != nil { + return diag.Errorf("could not set include_in_snippet on project with key %q: %v", project.Key, err) } err = d.Set(INCLUDE_IN_SNIPPET, project.IncludeInSnippetByDefault) if err != nil { - return fmt.Errorf("could not set include_in_snippet on project with key %q: %v", project.Key, err) + return diag.Errorf("could not set include_in_snippet on project with key %q: %v", project.Key, err) } } err = d.Set(TAGS, project.Tags) if err != nil { - return fmt.Errorf("could not set tags on project with key %q: %v", project.Key, err) + return diag.Errorf("could not set tags on project with key %q: %v", project.Key, err) } err = d.Set(DEFAULT_CLIENT_SIDE_AVAILABILITY, clientSideAvailability) if err != nil { - return fmt.Errorf("could not set default_client_side_availability on project with key %q: %v", project.Key, err) + return diag.Errorf("could not set default_client_side_availability on project with key %q: %v", project.Key, err) } - return nil + return diags } diff --git a/launchdarkly/provider.go b/launchdarkly/provider.go index 9cf1b6d7..f718b456 100644 --- a/launchdarkly/provider.go +++ b/launchdarkly/provider.go @@ -1,10 +1,11 @@ package launchdarkly import ( - "fmt" + "context" "net/url" "strings" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -46,31 +47,41 @@ func Provider() *schema.Provider { }, }, ResourcesMap: map[string]*schema.Resource{ - "launchdarkly_project": resourceProject(), - "launchdarkly_environment": resourceEnvironment(), - "launchdarkly_feature_flag": resourceFeatureFlag(), - "launchdarkly_webhook": resourceWebhook(), - "launchdarkly_custom_role": resourceCustomRole(), - "launchdarkly_segment": resourceSegment(), - "launchdarkly_team_member": resourceTeamMember(), - "launchdarkly_feature_flag_environment": resourceFeatureFlagEnvironment(), - "launchdarkly_destination": resourceDestination(), - "launchdarkly_access_token": resourceAccessToken(), + "launchdarkly_project": resourceProject(), + "launchdarkly_environment": resourceEnvironment(), + "launchdarkly_feature_flag": resourceFeatureFlag(), + "launchdarkly_webhook": resourceWebhook(), + "launchdarkly_custom_role": resourceCustomRole(), + "launchdarkly_segment": resourceSegment(), + "launchdarkly_team_member": resourceTeamMember(), + "launchdarkly_feature_flag_environment": resourceFeatureFlagEnvironment(), + "launchdarkly_destination": resourceDestination(), + "launchdarkly_access_token": resourceAccessToken(), + "launchdarkly_flag_trigger": resourceFlagTrigger(), + "launchdarkly_audit_log_subscription": resourceAuditLogSubscription(), + "launchdarkly_relay_proxy_configuration": resourceRelayProxyConfig(), + "launchdarkly_metric": resourceMetric(), }, DataSourcesMap: map[string]*schema.Resource{ - "launchdarkly_team_member": dataSourceTeamMember(), - "launchdarkly_project": dataSourceProject(), - "launchdarkly_environment": dataSourceEnvironment(), - "launchdarkly_feature_flag": dataSourceFeatureFlag(), - "launchdarkly_feature_flag_environment": dataSourceFeatureFlagEnvironment(), - "launchdarkly_webhook": dataSourceWebhook(), - "launchdarkly_segment": dataSourceSegment(), + "launchdarkly_team_member": dataSourceTeamMember(), + "launchdarkly_team_members": dataSourceTeamMembers(), + "launchdarkly_project": dataSourceProject(), + "launchdarkly_environment": dataSourceEnvironment(), + "launchdarkly_feature_flag": dataSourceFeatureFlag(), + "launchdarkly_feature_flag_environment": dataSourceFeatureFlagEnvironment(), + "launchdarkly_webhook": dataSourceWebhook(), + "launchdarkly_segment": dataSourceSegment(), + "launchdarkly_flag_trigger": dataSourceFlagTrigger(), + "launchdarkly_audit_log_subscription": dataSourceAuditLogSubscription(), + "launchdarkly_relay_proxy_configuration": dataSourceRelayProxyConfig(), + "launchdarkly_metric": dataSourceMetric(), }, - ConfigureFunc: providerConfigure, + ConfigureContextFunc: providerConfigure, } } -func providerConfigure(d *schema.ResourceData) (interface{}, error) { +func providerConfigure(ctx context.Context, d *schema.ResourceData) (interface{}, diag.Diagnostics) { + var diags diag.Diagnostics host := d.Get(api_host).(string) if strings.HasPrefix(host, "http") { u, _ := url.Parse(host) @@ -80,12 +91,20 @@ func providerConfigure(d *schema.ResourceData) (interface{}, error) { oauthToken := d.Get(oauth_token).(string) if oauthToken == "" && accessToken == "" { - return nil, fmt.Errorf("either an %q or %q must be specified", access_token, oauth_token) + return nil, diag.Errorf("either an %q or %q must be specified", access_token, oauth_token) } if oauthToken != "" { - return newClient(oauthToken, host, true) + client, err := newClient(oauthToken, host, true) + if err != nil { + return client, diag.FromErr(err) + } + return client, diags } - return newClient(accessToken, host, false) + client, err := newClient(accessToken, host, false) + if err != nil { + return client, diag.FromErr(err) + } + return client, diags } diff --git a/launchdarkly/resource_launchdarkly_access_token.go b/launchdarkly/resource_launchdarkly_access_token.go index 34df7b7e..7c7feb82 100644 --- a/launchdarkly/resource_launchdarkly_access_token.go +++ b/launchdarkly/resource_launchdarkly_access_token.go @@ -2,6 +2,7 @@ package launchdarkly import ( "bytes" + "context" "encoding/json" "fmt" "io" @@ -9,6 +10,7 @@ import ( "net/http" "strings" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" @@ -27,11 +29,11 @@ func resourceAccessToken() *schema.Resource { conflictsWith: []string{ROLE, CUSTOM_ROLES, INLINE_ROLES}, }) return &schema.Resource{ - Create: resourceAccessTokenCreate, - Read: resourceAccessTokenRead, - Update: resourceAccessTokenUpdate, - Delete: resourceAccessTokenDelete, - Exists: resourceAccessTokenExists, + CreateContext: resourceAccessTokenCreate, + ReadContext: resourceAccessTokenRead, + UpdateContext: resourceAccessTokenUpdate, + DeleteContext: resourceAccessTokenDelete, + Exists: resourceAccessTokenExists, Schema: map[string]*schema.Schema{ NAME: { @@ -40,11 +42,11 @@ func resourceAccessToken() *schema.Resource { Optional: true, }, ROLE: { - Type: schema.TypeString, - Description: `The default built-in role for the token. Available options are "reader", "writer", and "admin"`, - Optional: true, - ValidateFunc: validation.StringInSlice([]string{"reader", "writer", "admin"}, false), - ConflictsWith: []string{CUSTOM_ROLES, POLICY_STATEMENTS}, + Type: schema.TypeString, + Description: `The default built-in role for the token. Available options are "reader", "writer", and "admin"`, + Optional: true, + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice([]string{"reader", "writer", "admin"}, false)), + ConflictsWith: []string{CUSTOM_ROLES, POLICY_STATEMENTS}, }, CUSTOM_ROLES: { Type: schema.TypeSet, @@ -64,12 +66,12 @@ func resourceAccessToken() *schema.Resource { Default: false, }, DEFAULT_API_VERSION: { - Type: schema.TypeInt, - Description: "The default API version for this token", - Optional: true, - ForceNew: true, - Computed: true, - ValidateFunc: validateAPIVersion, + Type: schema.TypeInt, + Description: "The default API version for this token", + Optional: true, + ForceNew: true, + Computed: true, + ValidateDiagFunc: validation.ToDiagFunc(validateAPIVersion), }, TOKEN: { Type: schema.TypeString, @@ -78,11 +80,11 @@ func resourceAccessToken() *schema.Resource { Sensitive: true, }, EXPIRE: { - Deprecated: "'expire' is deprecated and will be removed in the next major release of the LaunchDarkly provider", - Type: schema.TypeInt, - Description: "Replace the computed token secret with a new value. The expired secret will no longer be able to authorize usage of the LaunchDarkly API. Should be an expiration time for the current token secret, expressed as a Unix epoch time in milliseconds. Setting this to a negative value will expire the existing token immediately. To reset the token value again, change 'expire' to a new value. Setting this field at resource creation time WILL NOT set an expiration time for the token.", - Optional: true, - ValidateFunc: validation.NoZeroValues, + Deprecated: "'expire' is deprecated and will be removed in the next major release of the LaunchDarkly provider", + Type: schema.TypeInt, + Description: "Replace the computed token secret with a new value. The expired secret will no longer be able to authorize usage of the LaunchDarkly API. Should be an expiration time for the current token secret, expressed as a Unix epoch time in milliseconds. Setting this to a negative value will expire the existing token immediately. To reset the token value again, change 'expire' to a new value. Setting this field at resource creation time WILL NOT set an expiration time for the token.", + Optional: true, + ValidateDiagFunc: validation.ToDiagFunc(validation.NoZeroValues), }, }, } @@ -119,10 +121,10 @@ func validateAccessTokenResource(d *schema.ResourceData) error { return nil } -func resourceAccessTokenCreate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceAccessTokenCreate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { err := validateAccessTokenResource(d) if err != nil { - return err + return diag.FromErr(err) } client := metaRaw.(*Client) @@ -156,34 +158,36 @@ func resourceAccessTokenCreate(d *schema.ResourceData, metaRaw interface{}) erro accessTokenBody.Role = ldapi.PtrString(accessTokenRole.(string)) } - tokenRaw, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.AccessTokensApi.PostToken(client.ctx).AccessTokenPost(accessTokenBody).Execute() - }) - token := tokenRaw.(ldapi.Token) + token, _, err := client.ld.AccessTokensApi.PostToken(client.ctx).AccessTokenPost(accessTokenBody).Execute() + if err != nil { - return fmt.Errorf("failed to create access token with name %q: %s", accessTokenName, handleLdapiErr(err)) + return diag.Errorf("failed to create access token with name %q: %s", accessTokenName, handleLdapiErr(err)) } _ = d.Set(TOKEN, token.Token) d.SetId(token.Id) - return resourceAccessTokenRead(d, metaRaw) + return resourceAccessTokenRead(ctx, d, metaRaw) } -func resourceAccessTokenRead(d *schema.ResourceData, metaRaw interface{}) error { +func resourceAccessTokenRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) accessTokenID := d.Id() - accessTokenRaw, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.AccessTokensApi.GetToken(client.ctx, accessTokenID).Execute() - }) - accessToken := accessTokenRaw.(ldapi.Token) + accessToken, res, err := client.ld.AccessTokensApi.GetToken(client.ctx, accessTokenID).Execute() + if isStatusNotFound(res) { log.Printf("[WARN] failed to find access token with id %q, removing from state", accessTokenID) + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("[WARN] failed to find access token with id %q, removing from state", accessTokenID), + }) d.SetId("") - return nil + return diags } if err != nil { - return fmt.Errorf("failed to get access token with id %q: %s", accessTokenID, handleLdapiErr(err)) + return diag.Errorf("failed to get access token with id %q: %s", accessTokenID, handleLdapiErr(err)) } _ = d.Set(NAME, accessToken.Name) @@ -193,7 +197,7 @@ func resourceAccessTokenRead(d *schema.ResourceData, metaRaw interface{}) error if accessToken.CustomRoleIds != nil && len(*accessToken.CustomRoleIds) > 0 { customRoleKeys, err := customRoleIDsToKeys(client, *accessToken.CustomRoleIds) if err != nil { - return err + return diag.FromErr(err) } _ = d.Set(CUSTOM_ROLES, customRoleKeys) } @@ -209,17 +213,17 @@ func resourceAccessTokenRead(d *schema.ResourceData, metaRaw interface{}) error err = d.Set(INLINE_ROLES, policyStatementsToResourceData(*policies)) } if err != nil { - return fmt.Errorf("could not set policy on access token with id %q: %v", accessTokenID, err) + return diag.Errorf("could not set policy on access token with id %q: %v", accessTokenID, err) } } - return nil + return diags } -func resourceAccessTokenUpdate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceAccessTokenUpdate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { err := validateAccessTokenResource(d) if err != nil { - return err + return diag.FromErr(err) } client := metaRaw.(*Client) @@ -234,7 +238,7 @@ func resourceAccessTokenUpdate(d *schema.ResourceData, metaRaw interface{}) erro } customRoleIds, err := customRoleKeysToIDs(client, customRoleKeys) if err != nil { - return err + return diag.FromErr(err) } inlineRoles, _ := policyStatementsFromResourceData(d.Get(POLICY_STATEMENTS).([]interface{})) @@ -275,11 +279,9 @@ func resourceAccessTokenUpdate(d *schema.ResourceData, metaRaw interface{}) erro patch = append(patch, op) } - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.AccessTokensApi.PatchToken(client.ctx, accessTokenID).PatchOperation(patch).Execute() - }) + _, _, err = client.ld.AccessTokensApi.PatchToken(client.ctx, accessTokenID).PatchOperation(patch).Execute() if err != nil { - return fmt.Errorf("failed to update access token with id %q: %s", accessTokenID, handleLdapiErr(err)) + return diag.Errorf("failed to update access token with id %q: %s", accessTokenID, handleLdapiErr(err)) } // Reset the access token if the expire field has been updated @@ -290,29 +292,29 @@ func resourceAccessTokenUpdate(d *schema.ResourceData, metaRaw interface{}) erro if oldExpire != newExpire && newExpire != 0 { token, err := resetAccessToken(client, accessTokenID, newExpire) if err != nil { - return fmt.Errorf("failed to reset access token with id %q: %s", accessTokenID, handleLdapiErr(err)) + return diag.Errorf("failed to reset access token with id %q: %s", accessTokenID, handleLdapiErr(err)) } _ = d.Set(EXPIRE, newExpire) _ = d.Set(TOKEN, token.Token) } } - return resourceAccessTokenRead(d, metaRaw) + return resourceAccessTokenRead(ctx, d, metaRaw) } -func resourceAccessTokenDelete(d *schema.ResourceData, metaRaw interface{}) error { +func resourceAccessTokenDelete(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) accessTokenID := d.Id() - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - res, err := client.ld.AccessTokensApi.DeleteToken(client.ctx, accessTokenID).Execute() - return nil, res, err - }) + _, err := client.ld.AccessTokensApi.DeleteToken(client.ctx, accessTokenID).Execute() + if err != nil { - return fmt.Errorf("failed to delete access token with id %q: %s", accessTokenID, handleLdapiErr(err)) + return diag.Errorf("failed to delete access token with id %q: %s", accessTokenID, handleLdapiErr(err)) } - return nil + return diags } func resourceAccessTokenExists(d *schema.ResourceData, metaRaw interface{}) (bool, error) { diff --git a/launchdarkly/resource_launchdarkly_access_token_test.go b/launchdarkly/resource_launchdarkly_access_token_test.go index cf14544d..3b2cd376 100644 --- a/launchdarkly/resource_launchdarkly_access_token_test.go +++ b/launchdarkly/resource_launchdarkly_access_token_test.go @@ -123,13 +123,13 @@ func TestAccAccessToken_Create(t *testing.T) { Config: fmt.Sprintf(testAccAccessTokenCreate, name), Check: resource.ComposeTestCheckFunc( testAccCheckAccessTokenExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Access token - "+name), - resource.TestCheckResourceAttr(resourceName, "role", "reader"), - resource.TestCheckResourceAttr(resourceName, "service_token", "false"), - resource.TestCheckResourceAttrSet(resourceName, "default_api_version"), - resource.TestCheckResourceAttrSet(resourceName, "token"), - resource.TestCheckNoResourceAttr(resourceName, "policy"), - resource.TestCheckNoResourceAttr(resourceName, "custom_roles"), + resource.TestCheckResourceAttr(resourceName, NAME, "Access token - "+name), + resource.TestCheckResourceAttr(resourceName, ROLE, "reader"), + resource.TestCheckResourceAttr(resourceName, SERVICE_TOKEN, "false"), + resource.TestCheckResourceAttrSet(resourceName, DEFAULT_API_VERSION), + resource.TestCheckResourceAttrSet(resourceName, TOKEN), + resource.TestCheckNoResourceAttr(resourceName, POLICY), + resource.TestCheckNoResourceAttr(resourceName, CUSTOM_ROLES), ), }, }, @@ -149,13 +149,13 @@ func TestAccAccessToken_CreateWithCustomRole(t *testing.T) { Config: fmt.Sprintf(testAccAccessTokenCreateWithCustomRole, name, name, name), Check: resource.ComposeTestCheckFunc( testAccCheckAccessTokenExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Access token - "+name), + resource.TestCheckResourceAttr(resourceName, NAME, "Access token - "+name), resource.TestCheckResourceAttr(resourceName, "custom_roles.#", "1"), - resource.TestCheckResourceAttr(resourceName, "service_token", "false"), - resource.TestCheckResourceAttrSet(resourceName, "default_api_version"), - resource.TestCheckResourceAttrSet(resourceName, "token"), - resource.TestCheckNoResourceAttr(resourceName, "policy"), - resource.TestCheckNoResourceAttr(resourceName, "role"), + resource.TestCheckResourceAttr(resourceName, SERVICE_TOKEN, "false"), + resource.TestCheckResourceAttrSet(resourceName, DEFAULT_API_VERSION), + resource.TestCheckResourceAttrSet(resourceName, TOKEN), + resource.TestCheckNoResourceAttr(resourceName, POLICY), + resource.TestCheckNoResourceAttr(resourceName, ROLE), ), }, }, @@ -175,13 +175,13 @@ func TestAccAccessToken_CreateWithImmutableParams(t *testing.T) { Config: fmt.Sprintf(testAccAccessTokenCreateWithImmutableParams, name), Check: resource.ComposeTestCheckFunc( testAccCheckAccessTokenExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Access token - "+name), - resource.TestCheckResourceAttr(resourceName, "role", "reader"), - resource.TestCheckResourceAttr(resourceName, "service_token", "true"), - resource.TestCheckResourceAttr(resourceName, "default_api_version", "20160426"), - resource.TestCheckResourceAttrSet(resourceName, "token"), - resource.TestCheckNoResourceAttr(resourceName, "policy"), - resource.TestCheckNoResourceAttr(resourceName, "custom_roles"), + resource.TestCheckResourceAttr(resourceName, NAME, "Access token - "+name), + resource.TestCheckResourceAttr(resourceName, ROLE, "reader"), + resource.TestCheckResourceAttr(resourceName, SERVICE_TOKEN, "true"), + resource.TestCheckResourceAttr(resourceName, DEFAULT_API_VERSION, "20160426"), + resource.TestCheckResourceAttrSet(resourceName, TOKEN), + resource.TestCheckNoResourceAttr(resourceName, POLICY), + resource.TestCheckNoResourceAttr(resourceName, CUSTOM_ROLES), ), }, }, @@ -201,18 +201,18 @@ func TestAccAccessToken_CreateWithInlineRoles(t *testing.T) { Config: fmt.Sprintf(testAccAccessTokenCreateWithInlineRoles, name), Check: resource.ComposeTestCheckFunc( testAccCheckAccessTokenExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Access token - "+name), + resource.TestCheckResourceAttr(resourceName, NAME, "Access token - "+name), resource.TestCheckResourceAttr(resourceName, "inline_roles.#", "1"), resource.TestCheckResourceAttr(resourceName, "inline_roles.0.actions.#", "1"), resource.TestCheckResourceAttr(resourceName, "inline_roles.0.actions.0", "*"), resource.TestCheckResourceAttr(resourceName, "inline_roles.0.resources.#", "1"), resource.TestCheckResourceAttr(resourceName, "inline_roles.0.resources.0", "proj/*:env/staging"), resource.TestCheckResourceAttr(resourceName, "inline_roles.0.effect", "allow"), - resource.TestCheckResourceAttr(resourceName, "service_token", "false"), - resource.TestCheckResourceAttrSet(resourceName, "default_api_version"), - resource.TestCheckResourceAttrSet(resourceName, "token"), - resource.TestCheckNoResourceAttr(resourceName, "role"), - resource.TestCheckNoResourceAttr(resourceName, "custom_roles"), + resource.TestCheckResourceAttr(resourceName, SERVICE_TOKEN, "false"), + resource.TestCheckResourceAttrSet(resourceName, DEFAULT_API_VERSION), + resource.TestCheckResourceAttrSet(resourceName, TOKEN), + resource.TestCheckNoResourceAttr(resourceName, ROLE), + resource.TestCheckNoResourceAttr(resourceName, CUSTOM_ROLES), ), }, }, @@ -232,18 +232,18 @@ func TestAccAccessToken_CreateWithPolicyStatements(t *testing.T) { Config: fmt.Sprintf(testAccAccessTokenCreateWithPolicyStatements, name), Check: resource.ComposeTestCheckFunc( testAccCheckAccessTokenExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Access token - "+name), + resource.TestCheckResourceAttr(resourceName, NAME, "Access token - "+name), resource.TestCheckResourceAttr(resourceName, "policy_statements.#", "1"), resource.TestCheckResourceAttr(resourceName, "policy_statements.0.actions.#", "1"), resource.TestCheckResourceAttr(resourceName, "policy_statements.0.actions.0", "*"), resource.TestCheckResourceAttr(resourceName, "policy_statements.0.resources.#", "1"), resource.TestCheckResourceAttr(resourceName, "policy_statements.0.resources.0", "proj/*:env/staging"), resource.TestCheckResourceAttr(resourceName, "policy_statements.0.effect", "allow"), - resource.TestCheckResourceAttr(resourceName, "service_token", "false"), - resource.TestCheckResourceAttrSet(resourceName, "default_api_version"), - resource.TestCheckResourceAttrSet(resourceName, "token"), - resource.TestCheckNoResourceAttr(resourceName, "role"), - resource.TestCheckNoResourceAttr(resourceName, "custom_roles"), + resource.TestCheckResourceAttr(resourceName, SERVICE_TOKEN, "false"), + resource.TestCheckResourceAttrSet(resourceName, DEFAULT_API_VERSION), + resource.TestCheckResourceAttrSet(resourceName, TOKEN), + resource.TestCheckNoResourceAttr(resourceName, ROLE), + resource.TestCheckNoResourceAttr(resourceName, CUSTOM_ROLES), ), }, }, @@ -269,7 +269,7 @@ func TestAccAccessToken_Update(t *testing.T) { Config: fmt.Sprintf(testAccAccessTokenUpdate, name), // update regular role to policy_statements roles Check: resource.ComposeTestCheckFunc( testAccCheckAccessTokenExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Updated - "+name), + resource.TestCheckResourceAttr(resourceName, NAME, "Updated - "+name), resource.TestCheckResourceAttr(resourceName, "inline_roles.#", "1"), resource.TestCheckResourceAttr(resourceName, "inline_roles.0.actions.#", "1"), resource.TestCheckResourceAttr(resourceName, "inline_roles.0.actions.0", "*"), @@ -303,7 +303,7 @@ func TestAccAccessToken_UpdateCustomRole(t *testing.T) { Config: fmt.Sprintf(testAccAccessTokenUpdateCustomRole, name, name, name, name, name), Check: resource.ComposeTestCheckFunc( testAccCheckAccessTokenExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Updated - "+name), + resource.TestCheckResourceAttr(resourceName, NAME, "Updated - "+name), resource.TestCheckResourceAttr(resourceName, "custom_roles.#", "2"), resource.TestCheckResourceAttr(resourceName, "custom_roles.0", name), resource.TestCheckResourceAttr(resourceName, "custom_roles.1", name+"2"), diff --git a/launchdarkly/resource_launchdarkly_audit_log_subscription.go b/launchdarkly/resource_launchdarkly_audit_log_subscription.go new file mode 100644 index 00000000..b2da548f --- /dev/null +++ b/launchdarkly/resource_launchdarkly_audit_log_subscription.go @@ -0,0 +1,139 @@ +package launchdarkly + +import ( + "context" + "fmt" + "log" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + ldapi "github.com/launchdarkly/api-client-go/v7" +) + +func resourceAuditLogSubscription() *schema.Resource { + return &schema.Resource{ + CreateContext: resourceAuditLogSubscriptionCreate, + UpdateContext: resourceAuditLogSubscriptionUpdate, + DeleteContext: resourceAuditLogSubscriptionDelete, + ReadContext: resourceAuditLogSubscriptionRead, + Exists: resourceAuditLogSubscriptionExists, + + Importer: &schema.ResourceImporter{ + State: resourceAuditLogSubscriptionImport, + }, + + Schema: auditLogSubscriptionSchema(false), + } +} + +func resourceAuditLogSubscriptionCreate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + client := metaRaw.(*Client) + integrationKey := d.Get(INTEGRATION_KEY).(string) + name := d.Get(NAME).(string) + on := d.Get(ON).(bool) + tags := stringsFromSchemaSet(d.Get(TAGS).(*schema.Set)) + config, err := configFromResourceData(d) + if err != nil { + return diag.Errorf("failed to create %s integration with name %s: %v", integrationKey, name, err.Error()) + } + + statements, err := policyStatementsFromResourceData(d.Get(STATEMENTS).([]interface{})) + if err != nil { + return diag.Errorf("failed to create %s integration with name %s: %v", integrationKey, name, err.Error()) + } + + subscriptionBody := ldapi.SubscriptionPost{ + Name: name, + On: &on, + Tags: &tags, + Config: config, + Statements: &statements, + } + + sub, _, err := client.ld.IntegrationAuditLogSubscriptionsApi.CreateSubscription(client.ctx, integrationKey).SubscriptionPost(subscriptionBody).Execute() + + if err != nil { + return diag.Errorf("failed to create %s integration with name %s: %v", integrationKey, name, handleLdapiErr(err)) + } + d.SetId(*sub.Id) + return resourceAuditLogSubscriptionRead(ctx, d, metaRaw) +} + +func resourceAuditLogSubscriptionUpdate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + client := metaRaw.(*Client) + integrationKey := d.Get(INTEGRATION_KEY).(string) + name := d.Get(NAME).(string) + tags := stringsFromResourceData(d, TAGS) + on := d.Get(ON).(bool) + config, err := configFromResourceData(d) + if err != nil { + return diag.FromErr(err) + } + id := d.Id() + + statements, err := policyStatementsFromResourceData(d.Get(STATEMENTS).([]interface{})) + if err != nil { + return diag.FromErr(err) + } + + patch := []ldapi.PatchOperation{ + patchReplace("/name", &name), + patchReplace("/tags", &tags), + patchReplace("/config", &config), + patchReplace("/on", &on), + patchReplace("/statements", &statements), + } + + _, _, err = client.ld.IntegrationAuditLogSubscriptionsApi.UpdateSubscription(client.ctx, integrationKey, id).PatchOperation(patch).Execute() + if err != nil { + return diag.Errorf("failed to update %q integration with name %q and ID %q: %s", integrationKey, name, id, handleLdapiErr(err)) + } + return resourceAuditLogSubscriptionRead(ctx, d, metaRaw) +} + +func resourceAuditLogSubscriptionDelete(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + client := metaRaw.(*Client) + id := d.Id() + integrationKey := d.Get(INTEGRATION_KEY).(string) + + _, err := client.ld.IntegrationAuditLogSubscriptionsApi.DeleteSubscription(client.ctx, integrationKey, id).Execute() + + if err != nil { + return diag.Errorf("failed to delete integration with ID %q: %s", id, handleLdapiErr(err)) + } + return diag.Diagnostics{} +} + +func resourceAuditLogSubscriptionRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + return auditLogSubscriptionRead(ctx, d, metaRaw, false) +} + +func resourceAuditLogSubscriptionExists(d *schema.ResourceData, metaRaw interface{}) (bool, error) { + client := metaRaw.(*Client) + id := d.Id() + integrationKey := d.Get(INTEGRATION_KEY).(string) + + _, res, err := client.ld.IntegrationAuditLogSubscriptionsApi.GetSubscriptionByID(client.ctx, integrationKey, id).Execute() + if isStatusNotFound(res) { + log.Println("got 404 when getting integration. returning false.") + return false, nil + } + if err != nil { + return false, fmt.Errorf("failed to get integration with ID %q: %v", id, handleLdapiErr(err)) + } + return true, nil +} + +func resourceAuditLogSubscriptionImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + id := d.Id() + parts := strings.Split(d.Id(), "/") + if len(parts) != 2 { + return nil, fmt.Errorf("found unexpected id format for import: %q. expected format: 'integrationKey/integration_id'", id) + } + + integrationKey, integrationID := parts[0], parts[1] + _ = d.Set(INTEGRATION_KEY, integrationKey) + d.SetId(integrationID) + return []*schema.ResourceData{d}, nil +} diff --git a/launchdarkly/resource_launchdarkly_audit_log_subscription_test.go b/launchdarkly/resource_launchdarkly_audit_log_subscription_test.go new file mode 100644 index 00000000..dbdb4a2b --- /dev/null +++ b/launchdarkly/resource_launchdarkly_audit_log_subscription_test.go @@ -0,0 +1,263 @@ +package launchdarkly + +import ( + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +const ( + testAccAuditLogSubscriptionCreate = ` +resource "launchdarkly_audit_log_subscription" "%s_tf_test" { + integration_key = "%s" + name = "terraform test" + config = %s + tags = [ + "integrations", + "terraform" + ] + on = true + statements { + actions = ["*"] + effect = "deny" + resources = ["proj/*:env/*:flag/*"] + } +} +` + + testAccAuditLogSubscriptionUpdate = ` +resource "launchdarkly_audit_log_subscription" "%s_tf_test" { + integration_key = "%s" + name = "terraform test v2" + config = %s + on = false + tags = [ + "integrations" + ] + statements { + actions = ["*"] + effect = "allow" + resources = ["proj/*:env/production"] + } +} +` +) + +func TestAccAuditLogSubscription_CreateUpdateDatadog(t *testing.T) { + integrationKey := "datadog" + // omitting host_url = "https://api.datadoghq.com" to test the handling of attributes with default values + config := `{ + api_key = "thisisasecretkey" + } + ` + + resourceName := fmt.Sprintf("launchdarkly_audit_log_subscription.%s_tf_test", integrationKey) + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccAuditLogSubscriptionCreate, integrationKey, integrationKey, config), + Check: resource.ComposeTestCheckFunc( + testAccCheckIntegrationExists(resourceName), + resource.TestCheckResourceAttrSet(resourceName, ID), + resource.TestCheckResourceAttr(resourceName, INTEGRATION_KEY, integrationKey), + resource.TestCheckResourceAttr(resourceName, NAME, "terraform test"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), + resource.TestCheckResourceAttr(resourceName, "config.api_key", "thisisasecretkey"), + // resource.TestCheckResourceAttr(resourceName, "config.host_url", "https://api.datadoghq.com"), + resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), + resource.TestCheckResourceAttr(resourceName, "tags.0", "integrations"), + resource.TestCheckResourceAttr(resourceName, "tags.1", "terraform"), + resource.TestCheckResourceAttr(resourceName, "statements.#", "1"), + resource.TestCheckResourceAttr(resourceName, "statements.0.actions.0", "*"), + resource.TestCheckResourceAttr(resourceName, "statements.0.resources.0", "proj/*:env/*:flag/*"), + resource.TestCheckResourceAttr(resourceName, "statements.0.effect", "deny"), + ), + }, + { + Config: fmt.Sprintf(testAccAuditLogSubscriptionUpdate, integrationKey, integrationKey, config), + Check: resource.ComposeTestCheckFunc( + testAccCheckIntegrationExists(resourceName), + resource.TestCheckResourceAttrSet(resourceName, ID), + resource.TestCheckResourceAttr(resourceName, INTEGRATION_KEY, integrationKey), + resource.TestCheckResourceAttr(resourceName, NAME, "terraform test v2"), + resource.TestCheckResourceAttr(resourceName, ON, "false"), + resource.TestCheckResourceAttr(resourceName, "tags.#", "1"), + resource.TestCheckResourceAttr(resourceName, "tags.0", "integrations"), + resource.TestCheckResourceAttr(resourceName, "statements.#", "1"), + resource.TestCheckResourceAttr(resourceName, "statements.0.actions.#", "1"), + resource.TestCheckResourceAttr(resourceName, "statements.0.actions.0", "*"), + resource.TestCheckResourceAttr(resourceName, "statements.0.effect", "allow"), + resource.TestCheckResourceAttr(resourceName, "statements.0.resources.0", "proj/*:env/production"), + ), + }, + }, + }) +} + +func TestAccAuditLogSubscription_CreateDynatrace(t *testing.T) { + integrationKey := "dynatrace" + config := `{ + api_token = "verysecrettoken" + url = "https://launchdarkly.appdynamics.com" + entity = "APPLICATION_METHOD" + } + ` + + resourceName := fmt.Sprintf("launchdarkly_audit_log_subscription.%s_tf_test", integrationKey) + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccAuditLogSubscriptionCreate, integrationKey, integrationKey, config), + Check: resource.ComposeTestCheckFunc( + testAccCheckIntegrationExists(resourceName), + resource.TestCheckResourceAttrSet(resourceName, ID), + resource.TestCheckResourceAttr(resourceName, INTEGRATION_KEY, integrationKey), + resource.TestCheckResourceAttr(resourceName, NAME, "terraform test"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), + resource.TestCheckResourceAttr(resourceName, "config.api_token", "verysecrettoken"), + resource.TestCheckResourceAttr(resourceName, "config.url", "https://launchdarkly.appdynamics.com"), + resource.TestCheckResourceAttr(resourceName, "config.entity", "APPLICATION_METHOD"), + resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), + resource.TestCheckResourceAttr(resourceName, "tags.0", "integrations"), + resource.TestCheckResourceAttr(resourceName, "tags.1", "terraform"), + resource.TestCheckResourceAttr(resourceName, "statements.#", "1"), + resource.TestCheckResourceAttr(resourceName, "statements.0.actions.0", "*"), + resource.TestCheckResourceAttr(resourceName, "statements.0.resources.0", "proj/*:env/*:flag/*"), + resource.TestCheckResourceAttr(resourceName, "statements.0.effect", "deny"), + ), + }, + }, + }) +} + +func TestAccAuditLogSubscription_CreateMSTeams(t *testing.T) { + integrationKey := "msteams" + config := `{ + url = "https://outlook.office.com/webhook/terraform-test" + } + ` + + resourceName := fmt.Sprintf("launchdarkly_audit_log_subscription.%s_tf_test", integrationKey) + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccAuditLogSubscriptionCreate, integrationKey, integrationKey, config), + Check: resource.ComposeTestCheckFunc( + testAccCheckIntegrationExists(resourceName), + resource.TestCheckResourceAttrSet(resourceName, ID), + resource.TestCheckResourceAttr(resourceName, INTEGRATION_KEY, integrationKey), + resource.TestCheckResourceAttr(resourceName, NAME, "terraform test"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), + resource.TestCheckResourceAttr(resourceName, "config.url", "https://outlook.office.com/webhook/terraform-test"), + resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), + resource.TestCheckResourceAttr(resourceName, "tags.0", "integrations"), + resource.TestCheckResourceAttr(resourceName, "tags.1", "terraform"), + resource.TestCheckResourceAttr(resourceName, "statements.#", "1"), + resource.TestCheckResourceAttr(resourceName, "statements.0.actions.0", "*"), + resource.TestCheckResourceAttr(resourceName, "statements.0.resources.0", "proj/*:env/*:flag/*"), + resource.TestCheckResourceAttr(resourceName, "statements.0.effect", "deny"), + ), + }, + }, + }) +} + +func TestAccAuditLogSubscription_CreateSplunk(t *testing.T) { + // splunk specifically needs to be converted to kebab case, so we need to handle it specially + integrationKey := "splunk" + config := `{ + base_url = "https://launchdarkly.splunk.com" + token = "averysecrettoken" + skip_ca_verification = true + } + ` + + resourceName := fmt.Sprintf("launchdarkly_audit_log_subscription.%s_tf_test", integrationKey) + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccAuditLogSubscriptionCreate, integrationKey, integrationKey, config), + Check: resource.ComposeTestCheckFunc( + testAccCheckIntegrationExists(resourceName), + resource.TestCheckResourceAttrSet(resourceName, ID), + resource.TestCheckResourceAttr(resourceName, INTEGRATION_KEY, integrationKey), + resource.TestCheckResourceAttr(resourceName, NAME, "terraform test"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), + resource.TestCheckResourceAttr(resourceName, "config.base_url", "https://launchdarkly.splunk.com"), + resource.TestCheckResourceAttr(resourceName, "config.token", "averysecrettoken"), + resource.TestCheckResourceAttr(resourceName, "config.skip_ca_verification", "true"), + resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), + resource.TestCheckResourceAttr(resourceName, "tags.0", "integrations"), + resource.TestCheckResourceAttr(resourceName, "tags.1", "terraform"), + resource.TestCheckResourceAttr(resourceName, "statements.#", "1"), + resource.TestCheckResourceAttr(resourceName, "statements.0.actions.0", "*"), + resource.TestCheckResourceAttr(resourceName, "statements.0.resources.0", "proj/*:env/*:flag/*"), + resource.TestCheckResourceAttr(resourceName, "statements.0.effect", "deny"), + ), + }, + }, + }) +} + +func TestAccAuditLogSubscription_WrongConfigReturnsError(t *testing.T) { + integrationKey := "honeycomb" + config := `{ + url = "https://bad-config.com/terraform-test" + } + ` + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccAuditLogSubscriptionCreate, integrationKey, integrationKey, config), + ExpectError: regexp.MustCompile(`Error: failed to create honeycomb integration with name terraform test: config variable url not valid for integration type honeycomb`), + }, + }, + }) +} + +func testAccCheckIntegrationExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("not found: %s", resourceName) + } + integrationKey, ok := rs.Primary.Attributes[INTEGRATION_KEY] + if !ok { + return fmt.Errorf("integration integrationKey not found: %s", resourceName) + } + integrationID, ok := rs.Primary.Attributes[ID] + if !ok { + return fmt.Errorf("integration not found: %s", resourceName) + } + client := testAccProvider.Meta().(*Client) + _, _, err := client.ld.IntegrationAuditLogSubscriptionsApi.GetSubscriptionByID(client.ctx, integrationKey, integrationID).Execute() + if err != nil { + return fmt.Errorf("error getting %s integration: %s", integrationKey, err) + } + + return nil + } +} diff --git a/launchdarkly/resource_launchdarkly_custom_role.go b/launchdarkly/resource_launchdarkly_custom_role.go index 6447372b..e40b3d89 100644 --- a/launchdarkly/resource_launchdarkly_custom_role.go +++ b/launchdarkly/resource_launchdarkly_custom_role.go @@ -1,10 +1,11 @@ package launchdarkly import ( + "context" "fmt" "log" - "net/http" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ldapi "github.com/launchdarkly/api-client-go/v7" @@ -12,11 +13,11 @@ import ( func resourceCustomRole() *schema.Resource { return &schema.Resource{ - Create: resourceCustomRoleCreate, - Read: resourceCustomRoleRead, - Update: resourceCustomRoleUpdate, - Delete: resourceCustomRoleDelete, - Exists: resourceCustomRoleExists, + CreateContext: resourceCustomRoleCreate, + ReadContext: resourceCustomRoleRead, + UpdateContext: resourceCustomRoleUpdate, + DeleteContext: resourceCustomRoleDelete, + Exists: resourceCustomRoleExists, Importer: &schema.ResourceImporter{ State: resourceCustomRoleImport, @@ -24,11 +25,11 @@ func resourceCustomRole() *schema.Resource { Schema: map[string]*schema.Schema{ KEY: { - Type: schema.TypeString, - Required: true, - Description: "A unique key that will be used to reference the custom role in your code", - ForceNew: true, - ValidateFunc: validateKey(), + Type: schema.TypeString, + Required: true, + Description: "A unique key that will be used to reference the custom role in your code", + ForceNew: true, + ValidateDiagFunc: validateKey(), }, NAME: { Type: schema.TypeString, @@ -46,7 +47,7 @@ func resourceCustomRole() *schema.Resource { } } -func resourceCustomRoleCreate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceCustomRoleCreate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) customRoleKey := d.Get(KEY).(string) customRoleName := d.Get(NAME).(string) @@ -54,7 +55,7 @@ func resourceCustomRoleCreate(d *schema.ResourceData, metaRaw interface{}) error customRolePolicies := policiesFromResourceData(d) policyStatements, err := policyStatementsFromResourceData(d.Get(POLICY_STATEMENTS).([]interface{})) if err != nil { - return err + return diag.FromErr(err) } if len(policyStatements) > 0 { customRolePolicies = policyStatements @@ -67,32 +68,35 @@ func resourceCustomRoleCreate(d *schema.ResourceData, metaRaw interface{}) error Policy: customRolePolicies, } - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.CustomRolesApi.PostCustomRole(client.ctx).CustomRolePost(customRoleBody).Execute() - }) + _, _, err = client.ld.CustomRolesApi.PostCustomRole(client.ctx).CustomRolePost(customRoleBody).Execute() + if err != nil { - return fmt.Errorf("failed to create custom role with name %q: %s", customRoleName, handleLdapiErr(err)) + return diag.Errorf("failed to create custom role with name %q: %s", customRoleName, handleLdapiErr(err)) } d.SetId(customRoleKey) - return resourceCustomRoleRead(d, metaRaw) + return resourceCustomRoleRead(ctx, d, metaRaw) } -func resourceCustomRoleRead(d *schema.ResourceData, metaRaw interface{}) error { +func resourceCustomRoleRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) customRoleID := d.Id() - customRoleRaw, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.CustomRolesApi.GetCustomRole(client.ctx, customRoleID).Execute() - }) - customRole := customRoleRaw.(ldapi.CustomRole) + customRole, res, err := client.ld.CustomRolesApi.GetCustomRole(client.ctx, customRoleID).Execute() + if isStatusNotFound(res) { log.Printf("[WARN] failed to find custom role with id %q, removing from state", customRoleID) + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("[WARN] failed to find custom role with id %q, removing from state", customRoleID), + }) d.SetId("") - return nil + return diags } if err != nil { - return fmt.Errorf("failed to get custom role with id %q: %s", customRoleID, handleLdapiErr(err)) + return diag.Errorf("failed to get custom role with id %q: %s", customRoleID, handleLdapiErr(err)) } _ = d.Set(KEY, customRole.Key) @@ -101,19 +105,25 @@ func resourceCustomRoleRead(d *schema.ResourceData, metaRaw interface{}) error { // Because "policy" is now deprecated in favor of "policy_statements", only set "policy" if it has // already been set by the user. + // TODO: Somehow this seems to also add an empty policystatement of + // policy { + // + actions = [] + // + resources = [] + // } if _, ok := d.GetOk(POLICY); ok { - err = d.Set(POLICY, policiesToResourceData(customRole.Policy)) + policies := policiesToResourceData(customRole.Policy) + err = d.Set(POLICY, policies) } else { err = d.Set(POLICY_STATEMENTS, policyStatementsToResourceData(statementsToStatementReps(customRole.Policy))) } if err != nil { - return fmt.Errorf("could not set policy on custom role with id %q: %v", customRoleID, err) + return diag.Errorf("could not set policy on custom role with id %q: %v", customRoleID, err) } - return nil + return diags } -func resourceCustomRoleUpdate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceCustomRoleUpdate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) customRoleKey := d.Get(KEY).(string) customRoleName := d.Get(NAME).(string) @@ -121,7 +131,7 @@ func resourceCustomRoleUpdate(d *schema.ResourceData, metaRaw interface{}) error customRolePolicies := policiesFromResourceData(d) policyStatements, err := policyStatementsFromResourceData(d.Get(POLICY_STATEMENTS).([]interface{})) if err != nil { - return err + return diag.FromErr(err) } if len(policyStatements) > 0 { customRolePolicies = policyStatements @@ -134,32 +144,27 @@ func resourceCustomRoleUpdate(d *schema.ResourceData, metaRaw interface{}) error patchReplace("/policy", &customRolePolicies), }} - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict(func() (interface{}, *http.Response, error) { - return client.ld.CustomRolesApi.PatchCustomRole(client.ctx, customRoleKey).PatchWithComment(patch).Execute() - }) - }) + _, _, err = client.ld.CustomRolesApi.PatchCustomRole(client.ctx, customRoleKey).PatchWithComment(patch).Execute() if err != nil { - return fmt.Errorf("failed to update custom role with key %q: %s", customRoleKey, handleLdapiErr(err)) + return diag.Errorf("failed to update custom role with key %q: %s", customRoleKey, handleLdapiErr(err)) } - return resourceCustomRoleRead(d, metaRaw) + return resourceCustomRoleRead(ctx, d, metaRaw) } -func resourceCustomRoleDelete(d *schema.ResourceData, metaRaw interface{}) error { +func resourceCustomRoleDelete(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) customRoleKey := d.Id() - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - res, err := client.ld.CustomRolesApi.DeleteCustomRole(client.ctx, customRoleKey).Execute() - return nil, res, err - }) + _, err := client.ld.CustomRolesApi.DeleteCustomRole(client.ctx, customRoleKey).Execute() if err != nil { - return fmt.Errorf("failed to delete custom role with key %q: %s", customRoleKey, handleLdapiErr(err)) + return diag.Errorf("failed to delete custom role with key %q: %s", customRoleKey, handleLdapiErr(err)) } - return nil + return diags } func resourceCustomRoleExists(d *schema.ResourceData, metaRaw interface{}) (bool, error) { diff --git a/launchdarkly/resource_launchdarkly_custom_role_test.go b/launchdarkly/resource_launchdarkly_custom_role_test.go index 06e1ec94..745e6f85 100644 --- a/launchdarkly/resource_launchdarkly_custom_role_test.go +++ b/launchdarkly/resource_launchdarkly_custom_role_test.go @@ -44,6 +44,18 @@ resource "launchdarkly_custom_role" "test" { resources = ["proj/*:env/staging"] } } +` + testAccCustomRoleCreateWithNotStatements = ` +resource "launchdarkly_custom_role" "test" { + key = "%s" + name = "Custom role - %s" + description = "Don't allow all actions on non-staging environments" + policy_statements { + not_actions = ["*"] + effect = "allow" + not_resources = ["proj/*:env/staging"] + } +} ` testAccCustomRoleUpdateWithStatements = ` resource "launchdarkly_custom_role" "test" { @@ -56,6 +68,18 @@ resource "launchdarkly_custom_role" "test" { resources = ["proj/*:env/production"] } } +` + testAccCustomRoleUpdateWithNotStatements = ` +resource "launchdarkly_custom_role" "test" { + key = "%s" + name = "Updated role - %s" + description= "Don't deny all actions on non production environments" + policy_statements { + not_actions = ["*"] + effect = "deny" + not_resources = ["proj/*:env/production"] + } +} ` ) @@ -73,9 +97,9 @@ func TestAccCustomRole_Create(t *testing.T) { Config: fmt.Sprintf(testAccCustomRoleCreate, key, name), Check: resource.ComposeTestCheckFunc( testAccCheckCustomRoleExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", key), - resource.TestCheckResourceAttr(resourceName, "name", "Custom role - "+name), - resource.TestCheckResourceAttr(resourceName, "description", "Deny all actions on production environments"), + resource.TestCheckResourceAttr(resourceName, KEY, key), + resource.TestCheckResourceAttr(resourceName, NAME, "Custom role - "+name), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "Deny all actions on production environments"), resource.TestCheckResourceAttr(resourceName, "policy.#", "1"), resource.TestCheckResourceAttr(resourceName, "policy.0.actions.#", "1"), resource.TestCheckResourceAttr(resourceName, "policy.0.actions.0", "*"), @@ -102,9 +126,9 @@ func TestAccCustomRole_CreateWithStatements(t *testing.T) { Config: fmt.Sprintf(testAccCustomRoleCreateWithStatements, key, name), Check: resource.ComposeTestCheckFunc( testAccCheckCustomRoleExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", key), - resource.TestCheckResourceAttr(resourceName, "name", "Custom role - "+name), - resource.TestCheckResourceAttr(resourceName, "description", "Allow all actions on staging environments"), + resource.TestCheckResourceAttr(resourceName, KEY, key), + resource.TestCheckResourceAttr(resourceName, NAME, "Custom role - "+name), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "Allow all actions on staging environments"), resource.TestCheckResourceAttr(resourceName, "policy.#", "0"), resource.TestCheckResourceAttr(resourceName, "policy_statements.#", "1"), resource.TestCheckResourceAttr(resourceName, "policy_statements.0.actions.#", "1"), @@ -123,6 +147,41 @@ func TestAccCustomRole_CreateWithStatements(t *testing.T) { }) } +func TestAccCustomRole_CreateWithNotStatements(t *testing.T) { + key := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) + name := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) + resourceName := "launchdarkly_custom_role.test" + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccCustomRoleCreateWithNotStatements, key, name), + Check: resource.ComposeTestCheckFunc( + testAccCheckCustomRoleExists(resourceName), + resource.TestCheckResourceAttr(resourceName, KEY, key), + resource.TestCheckResourceAttr(resourceName, NAME, "Custom role - "+name), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "Don't allow all actions on non-staging environments"), + resource.TestCheckResourceAttr(resourceName, "policy.#", "0"), + resource.TestCheckResourceAttr(resourceName, "policy_statements.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy_statements.0.not_actions.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy_statements.0.not_actions.0", "*"), + resource.TestCheckResourceAttr(resourceName, "policy_statements.0.not_resources.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy_statements.0.not_resources.0", "proj/*:env/staging"), + resource.TestCheckResourceAttr(resourceName, "policy_statements.0.effect", "allow"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func TestAccCustomRole_Update(t *testing.T) { key := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) name := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) @@ -143,9 +202,9 @@ func TestAccCustomRole_Update(t *testing.T) { Config: fmt.Sprintf(testAccCustomRoleUpdate, key, name), Check: resource.ComposeTestCheckFunc( testAccCheckCustomRoleExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", key), - resource.TestCheckResourceAttr(resourceName, "name", "Updated - "+name), - resource.TestCheckResourceAttr(resourceName, "description", ""), // should be empty after removal + resource.TestCheckResourceAttr(resourceName, KEY, key), + resource.TestCheckResourceAttr(resourceName, NAME, "Updated - "+name), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, ""), // should be empty after removal resource.TestCheckResourceAttr(resourceName, "policy.#", "1"), resource.TestCheckResourceAttr(resourceName, "policy.0.actions.#", "1"), resource.TestCheckResourceAttr(resourceName, "policy.0.actions.0", "*"), @@ -178,9 +237,9 @@ func TestAccCustomRole_UpdateWithStatements(t *testing.T) { Config: fmt.Sprintf(testAccCustomRoleUpdateWithStatements, key, name), Check: resource.ComposeTestCheckFunc( testAccCheckCustomRoleExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", key), - resource.TestCheckResourceAttr(resourceName, "name", "Updated role - "+name), - resource.TestCheckResourceAttr(resourceName, "description", "Deny all actions on production environments"), + resource.TestCheckResourceAttr(resourceName, KEY, key), + resource.TestCheckResourceAttr(resourceName, NAME, "Updated role - "+name), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "Deny all actions on production environments"), resource.TestCheckResourceAttr(resourceName, "policy.#", "0"), resource.TestCheckResourceAttr(resourceName, "policy_statements.#", "1"), resource.TestCheckResourceAttr(resourceName, "policy_statements.0.actions.#", "1"), @@ -194,6 +253,42 @@ func TestAccCustomRole_UpdateWithStatements(t *testing.T) { }) } +func TestAccCustomRole_UpdateWithNotStatements(t *testing.T) { + key := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) + name := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) + resourceName := "launchdarkly_custom_role.test" + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: fmt.Sprintf(testAccCustomRoleCreateWithStatements, key, name), + Check: resource.ComposeTestCheckFunc( + testAccCheckCustomRoleExists(resourceName), + ), + }, + { + Config: fmt.Sprintf(testAccCustomRoleUpdateWithNotStatements, key, name), + Check: resource.ComposeTestCheckFunc( + testAccCheckCustomRoleExists(resourceName), + resource.TestCheckResourceAttr(resourceName, KEY, key), + resource.TestCheckResourceAttr(resourceName, NAME, "Updated role - "+name), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "Don't deny all actions on non production environments"), + resource.TestCheckResourceAttr(resourceName, "policy.#", "0"), + resource.TestCheckResourceAttr(resourceName, "policy_statements.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy_statements.0.not_actions.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy_statements.0.not_actions.0", "*"), + resource.TestCheckResourceAttr(resourceName, "policy_statements.0.not_resources.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy_statements.0.not_resources.0", "proj/*:env/production"), + resource.TestCheckResourceAttr(resourceName, "policy_statements.0.effect", "deny"), + ), + }, + }, + }) +} + func testAccCheckCustomRoleExists(resourceName string) resource.TestCheckFunc { return func(s *terraform.State) error { rs, ok := s.RootModule().Resources[resourceName] diff --git a/launchdarkly/resource_launchdarkly_destination.go b/launchdarkly/resource_launchdarkly_destination.go index 2a8561eb..2e3320b6 100644 --- a/launchdarkly/resource_launchdarkly_destination.go +++ b/launchdarkly/resource_launchdarkly_destination.go @@ -1,11 +1,12 @@ package launchdarkly import ( + "context" "fmt" "log" - "net/http" "strings" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ldapi "github.com/launchdarkly/api-client-go/v7" @@ -13,11 +14,11 @@ import ( func resourceDestination() *schema.Resource { return &schema.Resource{ - Create: resourceDestinationCreate, - Read: resourceDestinationRead, - Update: resourceDestinationUpdate, - Delete: resourceDestinationDelete, - Exists: resourceDestinationExists, + CreateContext: resourceDestinationCreate, + ReadContext: resourceDestinationRead, + UpdateContext: resourceDestinationUpdate, + DeleteContext: resourceDestinationDelete, + Exists: resourceDestinationExists, Importer: &schema.ResourceImporter{ State: resourceDestinationImport, @@ -25,11 +26,11 @@ func resourceDestination() *schema.Resource { Schema: map[string]*schema.Schema{ PROJECT_KEY: { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: "The LaunchDarkly project key", - ValidateFunc: validateKey(), + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The LaunchDarkly project key", + ValidateDiagFunc: validateKey(), }, ENV_KEY: { Type: schema.TypeString, @@ -44,11 +45,11 @@ func resourceDestination() *schema.Resource { }, // kind can only be one of five types (kinesis, google-pubsub, mparticle, azure-event-hubs, or segment) KIND: { - Type: schema.TypeString, - Required: true, - Description: "The data export destination type. Available choices are 'kinesis', 'google-pubsub', 'segment', 'azure-event-hubs', and 'mparticle'", - ValidateFunc: validation.StringInSlice([]string{"kinesis", "google-pubsub", "mparticle", "azure-event-hubs", "segment"}, false), - ForceNew: true, + Type: schema.TypeString, + Required: true, + Description: "The data export destination type. Available choices are 'kinesis', 'google-pubsub', 'segment', 'azure-event-hubs', and 'mparticle'", + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice([]string{"kinesis", "google-pubsub", "mparticle", "azure-event-hubs", "segment"}, false)), + ForceNew: true, }, CONFIG: { Type: schema.TypeMap, @@ -66,7 +67,7 @@ func resourceDestination() *schema.Resource { } } -func resourceDestinationCreate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceDestinationCreate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) destinationProjKey := d.Get(PROJECT_KEY).(string) destinationEnvKey := d.Get(ENV_KEY).(string) @@ -76,7 +77,7 @@ func resourceDestinationCreate(d *schema.ResourceData, metaRaw interface{}) erro destinationConfig, err := destinationConfigFromResourceData(d) if err != nil { - return err + return diag.FromErr(err) } destinationBody := ldapi.DestinationPost{ @@ -86,42 +87,42 @@ func resourceDestinationCreate(d *schema.ResourceData, metaRaw interface{}) erro On: &destinationOn, } - destinationRaw, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.DataExportDestinationsApi.PostDestination(client.ctx, destinationProjKey, destinationEnvKey).DestinationPost(destinationBody).Execute() - }) - destination := destinationRaw.(ldapi.Destination) + destination, _, err := client.ld.DataExportDestinationsApi.PostDestination(client.ctx, destinationProjKey, destinationEnvKey).DestinationPost(destinationBody).Execute() if err != nil { d.SetId("") - return fmt.Errorf("failed to create destination with project key %q and env key %q: %s", destinationProjKey, destinationEnvKey, handleLdapiErr(err)) + return diag.Errorf("failed to create destination with project key %q and env key %q: %s", destinationProjKey, destinationEnvKey, handleLdapiErr(err)) } // destination defined in api-client-go/model_destination.go d.SetId(strings.Join([]string{destinationProjKey, destinationEnvKey, *destination.Id}, "/")) - return resourceDestinationRead(d, metaRaw) + return resourceDestinationRead(ctx, d, metaRaw) } -func resourceDestinationRead(d *schema.ResourceData, metaRaw interface{}) error { +func resourceDestinationRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics client := metaRaw.(*Client) _, _, destinationID, err := destinationImportIDtoKeys(d.Id()) if err != nil { - return err + return diag.FromErr(err) } destinationProjKey := d.Get(PROJECT_KEY).(string) destinationEnvKey := d.Get(ENV_KEY).(string) - destinationRaw, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.DataExportDestinationsApi.GetDestination(client.ctx, destinationProjKey, destinationEnvKey, destinationID).Execute() - }) - destination := destinationRaw.(ldapi.Destination) + destination, res, err := client.ld.DataExportDestinationsApi.GetDestination(client.ctx, destinationProjKey, destinationEnvKey, destinationID).Execute() + if isStatusNotFound(res) { log.Printf("[WARN] failed to find destination with id: %q in project %q, environment: %q, removing from state", destinationID, destinationProjKey, destinationEnvKey) + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("[WARN] failed to find destination with id: %q in project %q, environment: %q, removing from state", destinationID, destinationProjKey, destinationEnvKey), + }) d.SetId("") - return nil + return diags } if err != nil { - return fmt.Errorf("failed to get destination with id %q: %s", destinationID, handleLdapiErr(err)) + return diag.Errorf("failed to get destination with id %q: %s", destinationID, handleLdapiErr(err)) } cfg := destinationConfigToResourceData(*destination.Kind, destination.Config) @@ -133,14 +134,14 @@ func resourceDestinationRead(d *schema.ResourceData, metaRaw interface{}) error _ = d.Set(ON, destination.On) d.SetId(strings.Join([]string{destinationProjKey, destinationEnvKey, *destination.Id}, "/")) - return nil + return diags } -func resourceDestinationUpdate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceDestinationUpdate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) _, _, destinationID, err := destinationImportIDtoKeys(d.Id()) if err != nil { - return err + return diag.FromErr(err) } destinationProjKey := d.Get(PROJECT_KEY).(string) destinationEnvKey := d.Get(ENV_KEY).(string) @@ -148,7 +149,7 @@ func resourceDestinationUpdate(d *schema.ResourceData, metaRaw interface{}) erro destinationKind := d.Get(KIND).(string) destinationConfig, err := destinationConfigFromResourceData(d) if err != nil { - return err + return diag.FromErr(err) } destinationOn := d.Get(ON).(bool) @@ -159,37 +160,31 @@ func resourceDestinationUpdate(d *schema.ResourceData, metaRaw interface{}) erro patchReplace("/config", &destinationConfig), } - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict((func() (interface{}, *http.Response, error) { - return client.ld.DataExportDestinationsApi.PatchDestination(client.ctx, destinationProjKey, destinationEnvKey, destinationID).PatchOperation(patch).Execute() - })) - }) + _, _, err = client.ld.DataExportDestinationsApi.PatchDestination(client.ctx, destinationProjKey, destinationEnvKey, destinationID).PatchOperation(patch).Execute() if err != nil { - return fmt.Errorf("failed to update destination with id %q: %s", destinationID, handleLdapiErr(err)) + return diag.Errorf("failed to update destination with id %q: %s", destinationID, handleLdapiErr(err)) } - return resourceDestinationRead(d, metaRaw) + return resourceDestinationRead(ctx, d, metaRaw) } -func resourceDestinationDelete(d *schema.ResourceData, metaRaw interface{}) error { +func resourceDestinationDelete(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) _, _, destinationID, err := destinationImportIDtoKeys(d.Id()) if err != nil { - return err + return diag.FromErr(err) } destinationProjKey := d.Get(PROJECT_KEY).(string) destinationEnvKey := d.Get(ENV_KEY).(string) - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - res, err := client.ld.DataExportDestinationsApi.DeleteDestination(client.ctx, destinationProjKey, destinationEnvKey, destinationID).Execute() - return nil, res, err - }) - + _, err = client.ld.DataExportDestinationsApi.DeleteDestination(client.ctx, destinationProjKey, destinationEnvKey, destinationID).Execute() if err != nil { - return fmt.Errorf("failed to delete destination with id %q: %s", destinationID, handleLdapiErr(err)) + return diag.Errorf("failed to delete destination with id %q: %s", destinationID, handleLdapiErr(err)) } - return nil + return diags } func resourceDestinationExists(d *schema.ResourceData, metaRaw interface{}) (bool, error) { @@ -201,9 +196,7 @@ func resourceDestinationExists(d *schema.ResourceData, metaRaw interface{}) (boo destinationProjKey := d.Get(PROJECT_KEY).(string) destinationEnvKey := d.Get(ENV_KEY).(string) - _, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.DataExportDestinationsApi.GetDestination(client.ctx, destinationProjKey, destinationEnvKey, destinationID).Execute() - }) + _, res, err := client.ld.DataExportDestinationsApi.GetDestination(client.ctx, destinationProjKey, destinationEnvKey, destinationID).Execute() if isStatusNotFound(res) { return false, nil } diff --git a/launchdarkly/resource_launchdarkly_destination_test.go b/launchdarkly/resource_launchdarkly_destination_test.go index 0a9a45c3..5014f519 100644 --- a/launchdarkly/resource_launchdarkly_destination_test.go +++ b/launchdarkly/resource_launchdarkly_destination_test.go @@ -181,9 +181,9 @@ func TestAccDestination_CreateKinesis(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "kinesis-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "kinesis"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "kinesis-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "kinesis"), resource.TestCheckResourceAttr(resourceName, "config.region", "us-east-1"), resource.TestCheckResourceAttr(resourceName, "tags.#", "1"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), @@ -212,9 +212,9 @@ func TestAccDestination_CreateMparticle(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "mparticle-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "mparticle"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "mparticle-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "mparticle"), resource.TestCheckResourceAttr(resourceName, "config.api_key", "apiKeyfromMParticle"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), ), @@ -237,10 +237,10 @@ func TestAccDestination_CreatePubsub(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "pubsub-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "google-pubsub"), - resource.TestCheckResourceAttr(resourceName, "on", "false"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "pubsub-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "google-pubsub"), + resource.TestCheckResourceAttr(resourceName, ON, "false"), resource.TestCheckResourceAttr(resourceName, "config.project", "test-project"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), ), @@ -263,9 +263,9 @@ func TestAccDestination_CreateSegment(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "segment-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "segment"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "segment-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "segment"), resource.TestCheckResourceAttr(resourceName, "config.write_key", "super-secret-write-key"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), ), @@ -288,9 +288,9 @@ func TestAccDestination_CreateAzureEventHubs(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "azure-event-hubs-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "azure-event-hubs"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "azure-event-hubs-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "azure-event-hubs"), resource.TestCheckResourceAttr(resourceName, "config.namespace", "namespace"), resource.TestCheckResourceAttr(resourceName, "config.name", "name"), resource.TestCheckResourceAttr(resourceName, "config.policy_name", "policy-name"), @@ -316,9 +316,9 @@ func TestAccDestination_UpdateKinesis(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "kinesis-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "kinesis"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "kinesis-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "kinesis"), resource.TestCheckResourceAttr(resourceName, "config.role_arn", "arn:aws:iam::123456789012:role/marketingadmin"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), ), @@ -327,9 +327,9 @@ func TestAccDestination_UpdateKinesis(t *testing.T) { Config: withRandomProject(projectKey, testAccDestinationUpdateKinesis), Check: resource.ComposeTestCheckFunc( testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "updated-kinesis-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "kinesis"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "updated-kinesis-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "kinesis"), resource.TestCheckResourceAttr(resourceName, "config.role_arn", "arn:aws:iam::123456789012:role/marketingadmin"), resource.TestCheckResourceAttr(resourceName, "tags.1", "updated"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), @@ -353,9 +353,9 @@ func TestAccDestination_UpdatePubsub(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "pubsub-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "google-pubsub"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "pubsub-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "google-pubsub"), resource.TestCheckResourceAttr(resourceName, "config.project", "test-project"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), ), @@ -364,9 +364,9 @@ func TestAccDestination_UpdatePubsub(t *testing.T) { Config: withRandomProject(projectKey, testAccDestinationUpdatePubsub), Check: resource.ComposeTestCheckFunc( testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "updated-pubsub-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "google-pubsub"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "updated-pubsub-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "google-pubsub"), resource.TestCheckResourceAttr(resourceName, "config.project", "renamed-project"), resource.TestCheckResourceAttr(resourceName, "tags.1", "updated"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), @@ -390,9 +390,9 @@ func TestAccDestination_UpdateMparticle(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "mparticle-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "mparticle"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "mparticle-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "mparticle"), resource.TestCheckResourceAttr(resourceName, "config.secret", "mParticleSecret"), resource.TestCheckResourceAttr(resourceName, "config.environment", "production"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), @@ -402,9 +402,9 @@ func TestAccDestination_UpdateMparticle(t *testing.T) { Config: withRandomProject(projectKey, testAccDestinationUpdateMparticle), Check: resource.ComposeTestCheckFunc( testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "updated-mparticle-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "mparticle"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "updated-mparticle-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "mparticle"), resource.TestCheckResourceAttr(resourceName, "config.secret", "updatedSecret"), resource.TestCheckResourceAttr(resourceName, "config.environment", "production"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), @@ -429,10 +429,10 @@ func TestAccDestination_UpdateSegment(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "segment-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "segment"), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "segment-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "segment"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "config.write_key", "super-secret-write-key"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), ), @@ -442,10 +442,10 @@ func TestAccDestination_UpdateSegment(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "segment-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "segment"), - resource.TestCheckResourceAttr(resourceName, "on", "false"), // should default to false when removed + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "segment-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "segment"), + resource.TestCheckResourceAttr(resourceName, ON, "false"), // should default to false when removed resource.TestCheckResourceAttr(resourceName, "config.write_key", "updated-write-key"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), ), @@ -468,14 +468,14 @@ func TestAccDestination_UpdateAzureEventHubs(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "azure-event-hubs-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "azure-event-hubs"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "azure-event-hubs-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "azure-event-hubs"), resource.TestCheckResourceAttr(resourceName, "config.namespace", "namespace"), - resource.TestCheckResourceAttr(resourceName, "config.name", "name"), + resource.TestCheckResourceAttr(resourceName, "config.name", NAME), resource.TestCheckResourceAttr(resourceName, "config.policy_name", "policy-name"), resource.TestCheckResourceAttr(resourceName, "config.policy_key", "super-secret-policy-key"), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), ), }, @@ -484,14 +484,14 @@ func TestAccDestination_UpdateAzureEventHubs(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckDestinationExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "updated-azure-event-hubs-dest"), - resource.TestCheckResourceAttr(resourceName, "kind", "azure-event-hubs"), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "updated-azure-event-hubs-dest"), + resource.TestCheckResourceAttr(resourceName, KIND, "azure-event-hubs"), resource.TestCheckResourceAttr(resourceName, "config.namespace", "namespace"), resource.TestCheckResourceAttr(resourceName, "config.name", "updated-name"), resource.TestCheckResourceAttr(resourceName, "config.policy_name", "updated-policy-name"), resource.TestCheckResourceAttr(resourceName, "config.policy_key", "updated-policy-key"), - resource.TestCheckResourceAttr(resourceName, "on", "false"), + resource.TestCheckResourceAttr(resourceName, ON, "false"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), ), }, diff --git a/launchdarkly/resource_launchdarkly_environment.go b/launchdarkly/resource_launchdarkly_environment.go index 31665b33..c4e507a1 100644 --- a/launchdarkly/resource_launchdarkly_environment.go +++ b/launchdarkly/resource_launchdarkly_environment.go @@ -1,10 +1,11 @@ package launchdarkly import ( + "context" "fmt" - "net/http" "strings" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ldapi "github.com/launchdarkly/api-client-go/v7" ) @@ -12,19 +13,19 @@ import ( func resourceEnvironment() *schema.Resource { envSchema := environmentSchema(false) envSchema[PROJECT_KEY] = &schema.Schema{ - Type: schema.TypeString, - Required: true, - Description: "The LaunchDarkly project key", - ForceNew: true, - ValidateFunc: validateKey(), + Type: schema.TypeString, + Required: true, + Description: "The LaunchDarkly project key", + ForceNew: true, + ValidateDiagFunc: validateKey(), } return &schema.Resource{ - Create: resourceEnvironmentCreate, - Read: resourceEnvironmentRead, - Update: resourceEnvironmentUpdate, - Delete: resourceEnvironmentDelete, - Exists: resourceEnvironmentExists, + CreateContext: resourceEnvironmentCreate, + ReadContext: resourceEnvironmentRead, + UpdateContext: resourceEnvironmentUpdate, + DeleteContext: resourceEnvironmentDelete, + Exists: resourceEnvironmentExists, Importer: &schema.ResourceImporter{ State: resourceEnvironmentImport, @@ -33,7 +34,7 @@ func resourceEnvironment() *schema.Resource { } } -func resourceEnvironmentCreate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceEnvironmentCreate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) projectKey := d.Get(PROJECT_KEY).(string) key := d.Get(KEY).(string) @@ -58,36 +59,36 @@ func resourceEnvironmentCreate(d *schema.ResourceData, metaRaw interface{}) erro ConfirmChanges: &confirmChanges, } - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.EnvironmentsApi.PostEnvironment(client.ctx, projectKey).EnvironmentPost(envPost).Execute() - }) + _, _, err := client.ld.EnvironmentsApi.PostEnvironment(client.ctx, projectKey).EnvironmentPost(envPost).Execute() if err != nil { - return fmt.Errorf("failed to create environment: [%+v] for project key: %s: %s", envPost, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to create environment: [%+v] for project key: %s: %s", envPost, projectKey, handleLdapiErr(err)) } approvalSettings := d.Get(APPROVAL_SETTINGS) if len(approvalSettings.([]interface{})) > 0 { - err = resourceEnvironmentUpdate(d, metaRaw) - if err != nil { + updateDiags := resourceEnvironmentUpdate(ctx, d, metaRaw) + if updateDiags.HasError() { // if there was a problem in the update state, we need to clean up completely by deleting the env _, deleteErr := client.ld.EnvironmentsApi.DeleteEnvironment(client.ctx, projectKey, key).Execute() + // TODO: Figure out if we can get the err out of updateDiag (not looking likely) to use in hanldeLdapiErr if deleteErr != nil { - return fmt.Errorf("failed to clean up environment %q from project %q: %s", key, projectKey, handleLdapiErr(err)) + return updateDiags + // return diag.Errorf("failed to clean up environment %q from project %q: %s", key, projectKey, handleLdapiErr(errs)) } - return fmt.Errorf("failed to update environment with name %q key %q for projectKey %q: %s", + return diag.Errorf("failed to update environment with name %q key %q for projectKey %q: %s", name, key, projectKey, handleLdapiErr(err)) } } d.SetId(projectKey + "/" + key) - return resourceEnvironmentRead(d, metaRaw) + return resourceEnvironmentRead(ctx, d, metaRaw) } -func resourceEnvironmentRead(d *schema.ResourceData, metaRaw interface{}) error { - return environmentRead(d, metaRaw, false) +func resourceEnvironmentRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + return environmentRead(ctx, d, metaRaw, false) } -func resourceEnvironmentUpdate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceEnvironmentUpdate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) //required fields @@ -113,36 +114,30 @@ func resourceEnvironmentUpdate(d *schema.ResourceData, metaRaw interface{}) erro oldApprovalSettings, newApprovalSettings := d.GetChange(APPROVAL_SETTINGS) approvalPatch, err := approvalPatchFromSettings(oldApprovalSettings, newApprovalSettings) if err != nil { - return err + return diag.FromErr(err) } patch = append(patch, approvalPatch...) - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict(func() (interface{}, *http.Response, error) { - return client.ld.EnvironmentsApi.PatchEnvironment(client.ctx, projectKey, key).PatchOperation(patch).Execute() - }) - }) + _, _, err = client.ld.EnvironmentsApi.PatchEnvironment(client.ctx, projectKey, key).PatchOperation(patch).Execute() if err != nil { - return fmt.Errorf("failed to update environment with key %q for project: %q: %s", key, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to update environment with key %q for project: %q: %s", key, projectKey, handleLdapiErr(err)) } - return resourceEnvironmentRead(d, metaRaw) + return resourceEnvironmentRead(ctx, d, metaRaw) } -func resourceEnvironmentDelete(d *schema.ResourceData, metaRaw interface{}) error { +func resourceEnvironmentDelete(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) projectKey := d.Get(PROJECT_KEY).(string) key := d.Get(KEY).(string) - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - res, err := client.ld.EnvironmentsApi.DeleteEnvironment(client.ctx, projectKey, key).Execute() - return nil, res, err - }) - + _, err := client.ld.EnvironmentsApi.DeleteEnvironment(client.ctx, projectKey, key).Execute() if err != nil { - return fmt.Errorf("failed to delete project with key %q for project %q: %s", key, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to delete project with key %q for project %q: %s", key, projectKey, handleLdapiErr(err)) } - return nil + return diags } func resourceEnvironmentExists(d *schema.ResourceData, metaRaw interface{}) (bool, error) { @@ -150,9 +145,7 @@ func resourceEnvironmentExists(d *schema.ResourceData, metaRaw interface{}) (boo } func environmentExists(projectKey string, key string, meta *Client) (bool, error) { - _, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return meta.ld.EnvironmentsApi.GetEnvironment(meta.ctx, projectKey, key).Execute() - }) + _, res, err := meta.ld.EnvironmentsApi.GetEnvironment(meta.ctx, projectKey, key).Execute() if isStatusNotFound(res) { return false, nil } diff --git a/launchdarkly/resource_launchdarkly_environment_test.go b/launchdarkly/resource_launchdarkly_environment_test.go index e269f4f6..c2eace76 100644 --- a/launchdarkly/resource_launchdarkly_environment_test.go +++ b/launchdarkly/resource_launchdarkly_environment_test.go @@ -115,16 +115,16 @@ func TestAccEnvironment_Create(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Staging1"), - resource.TestCheckResourceAttr(resourceName, "key", "staging1"), - resource.TestCheckResourceAttr(resourceName, "color", "ff00ff"), - resource.TestCheckResourceAttr(resourceName, "secure_mode", "true"), - resource.TestCheckResourceAttr(resourceName, "default_track_events", "true"), - resource.TestCheckResourceAttr(resourceName, "default_ttl", "50"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "Staging1"), + resource.TestCheckResourceAttr(resourceName, KEY, "staging1"), + resource.TestCheckResourceAttr(resourceName, COLOR, "ff00ff"), + resource.TestCheckResourceAttr(resourceName, SECURE_MODE, "true"), + resource.TestCheckResourceAttr(resourceName, DEFAULT_TRACK_EVENTS, "true"), + resource.TestCheckResourceAttr(resourceName, DEFAULT_TTL, "50"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), - resource.TestCheckResourceAttr(resourceName, "require_comments", "true"), - resource.TestCheckResourceAttr(resourceName, "confirm_changes", "true"), + resource.TestCheckResourceAttr(resourceName, REQUIRE_COMMENTS, "true"), + resource.TestCheckResourceAttr(resourceName, CONFIRM_CHANGES, "true"), resource.TestCheckResourceAttr(resourceName, "tags.1", "terraform"), resource.TestCheckResourceAttr(resourceName, "tags.0", "tagged"), ), @@ -152,13 +152,13 @@ func TestAccEnvironment_Update(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Staging1"), - resource.TestCheckResourceAttr(resourceName, "key", "staging1"), - resource.TestCheckResourceAttr(resourceName, "color", "ff00ff"), - resource.TestCheckResourceAttr(resourceName, "secure_mode", "true"), - resource.TestCheckResourceAttr(resourceName, "default_track_events", "true"), - resource.TestCheckResourceAttr(resourceName, "default_ttl", "50"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "Staging1"), + resource.TestCheckResourceAttr(resourceName, KEY, "staging1"), + resource.TestCheckResourceAttr(resourceName, COLOR, "ff00ff"), + resource.TestCheckResourceAttr(resourceName, SECURE_MODE, "true"), + resource.TestCheckResourceAttr(resourceName, DEFAULT_TRACK_EVENTS, "true"), + resource.TestCheckResourceAttr(resourceName, DEFAULT_TTL, "50"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), ), }, { @@ -166,15 +166,15 @@ func TestAccEnvironment_Update(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "The real staging1"), - resource.TestCheckResourceAttr(resourceName, "key", "staging1"), - resource.TestCheckResourceAttr(resourceName, "color", "000000"), - resource.TestCheckResourceAttr(resourceName, "secure_mode", "false"), - resource.TestCheckResourceAttr(resourceName, "default_track_events", "false"), - resource.TestCheckResourceAttr(resourceName, "default_ttl", "3"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "require_comments", "false"), - resource.TestCheckResourceAttr(resourceName, "confirm_changes", "false"), + resource.TestCheckResourceAttr(resourceName, NAME, "The real staging1"), + resource.TestCheckResourceAttr(resourceName, KEY, "staging1"), + resource.TestCheckResourceAttr(resourceName, COLOR, "000000"), + resource.TestCheckResourceAttr(resourceName, SECURE_MODE, "false"), + resource.TestCheckResourceAttr(resourceName, DEFAULT_TRACK_EVENTS, "false"), + resource.TestCheckResourceAttr(resourceName, DEFAULT_TTL, "3"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, REQUIRE_COMMENTS, "false"), + resource.TestCheckResourceAttr(resourceName, CONFIRM_CHANGES, "false"), ), }, }, @@ -195,13 +195,13 @@ func TestAccEnvironment_RemoveAttributes(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Staging1"), - resource.TestCheckResourceAttr(resourceName, "key", "staging1"), - resource.TestCheckResourceAttr(resourceName, "color", "ff00ff"), - resource.TestCheckResourceAttr(resourceName, "secure_mode", "true"), - resource.TestCheckResourceAttr(resourceName, "default_track_events", "true"), - resource.TestCheckResourceAttr(resourceName, "default_ttl", "50"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "Staging1"), + resource.TestCheckResourceAttr(resourceName, KEY, "staging1"), + resource.TestCheckResourceAttr(resourceName, COLOR, "ff00ff"), + resource.TestCheckResourceAttr(resourceName, SECURE_MODE, "true"), + resource.TestCheckResourceAttr(resourceName, DEFAULT_TRACK_EVENTS, "true"), + resource.TestCheckResourceAttr(resourceName, DEFAULT_TTL, "50"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), ), }, { @@ -209,15 +209,15 @@ func TestAccEnvironment_RemoveAttributes(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "The real staging1"), - resource.TestCheckResourceAttr(resourceName, "key", "staging1"), - resource.TestCheckResourceAttr(resourceName, "color", "000000"), - resource.TestCheckResourceAttr(resourceName, "secure_mode", "false"), - resource.TestCheckResourceAttr(resourceName, "default_track_events", "false"), - resource.TestCheckResourceAttr(resourceName, "default_ttl", "0"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "require_comments", "false"), - resource.TestCheckResourceAttr(resourceName, "confirm_changes", "false"), + resource.TestCheckResourceAttr(resourceName, NAME, "The real staging1"), + resource.TestCheckResourceAttr(resourceName, KEY, "staging1"), + resource.TestCheckResourceAttr(resourceName, COLOR, "000000"), + resource.TestCheckResourceAttr(resourceName, SECURE_MODE, "false"), + resource.TestCheckResourceAttr(resourceName, DEFAULT_TRACK_EVENTS, "false"), + resource.TestCheckResourceAttr(resourceName, DEFAULT_TTL, "0"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, REQUIRE_COMMENTS, "false"), + resource.TestCheckResourceAttr(resourceName, CONFIRM_CHANGES, "false"), ), }, }, @@ -242,15 +242,15 @@ func TestAccEnvironment_Invalid(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "The real staging1"), - resource.TestCheckResourceAttr(resourceName, "key", "staging1"), - resource.TestCheckResourceAttr(resourceName, "color", "000000"), - resource.TestCheckResourceAttr(resourceName, "secure_mode", "false"), - resource.TestCheckResourceAttr(resourceName, "default_track_events", "false"), - resource.TestCheckResourceAttr(resourceName, "default_ttl", "3"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "require_comments", "false"), - resource.TestCheckResourceAttr(resourceName, "confirm_changes", "false"), + resource.TestCheckResourceAttr(resourceName, NAME, "The real staging1"), + resource.TestCheckResourceAttr(resourceName, KEY, "staging1"), + resource.TestCheckResourceAttr(resourceName, COLOR, "000000"), + resource.TestCheckResourceAttr(resourceName, SECURE_MODE, "false"), + resource.TestCheckResourceAttr(resourceName, DEFAULT_TRACK_EVENTS, "false"), + resource.TestCheckResourceAttr(resourceName, DEFAULT_TTL, "3"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, REQUIRE_COMMENTS, "false"), + resource.TestCheckResourceAttr(resourceName, CONFIRM_CHANGES, "false"), ), }, }, @@ -271,10 +271,10 @@ func TestAccEnvironmentWithApprovals(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Approvals Test"), - resource.TestCheckResourceAttr(resourceName, "key", "approvals-test"), - resource.TestCheckResourceAttr(resourceName, "color", "ababab"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "Approvals Test"), + resource.TestCheckResourceAttr(resourceName, KEY, "approvals-test"), + resource.TestCheckResourceAttr(resourceName, COLOR, "ababab"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), resource.TestCheckResourceAttr(resourceName, "approval_settings.0.can_review_own_request", "false"), resource.TestCheckResourceAttr(resourceName, "approval_settings.0.can_apply_declined_changes", "true"), // should default to true resource.TestCheckResourceAttr(resourceName, "approval_settings.0.min_num_approvals", "2"), @@ -291,10 +291,10 @@ func TestAccEnvironmentWithApprovals(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Approvals Test 2.0"), - resource.TestCheckResourceAttr(resourceName, "key", "approvals-test"), - resource.TestCheckResourceAttr(resourceName, "color", "bababa"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "Approvals Test 2.0"), + resource.TestCheckResourceAttr(resourceName, KEY, "approvals-test"), + resource.TestCheckResourceAttr(resourceName, COLOR, "bababa"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), resource.TestCheckResourceAttr(resourceName, "approval_settings.0.required", "true"), resource.TestCheckResourceAttr(resourceName, "approval_settings.0.can_review_own_request", "true"), resource.TestCheckResourceAttr(resourceName, "approval_settings.0.can_apply_declined_changes", "false"), @@ -312,11 +312,11 @@ func TestAccEnvironmentWithApprovals(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Approvals Test 2.1"), - resource.TestCheckResourceAttr(resourceName, "key", "approvals-test"), - resource.TestCheckResourceAttr(resourceName, "color", "bababa"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckNoResourceAttr(resourceName, "approval_settings"), + resource.TestCheckResourceAttr(resourceName, NAME, "Approvals Test 2.1"), + resource.TestCheckResourceAttr(resourceName, KEY, "approvals-test"), + resource.TestCheckResourceAttr(resourceName, COLOR, "bababa"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckNoResourceAttr(resourceName, APPROVAL_SETTINGS), ), }, }, diff --git a/launchdarkly/resource_launchdarkly_feature_flag.go b/launchdarkly/resource_launchdarkly_feature_flag.go index be715bac..6767112c 100644 --- a/launchdarkly/resource_launchdarkly_feature_flag.go +++ b/launchdarkly/resource_launchdarkly_feature_flag.go @@ -3,8 +3,8 @@ package launchdarkly import ( "context" "fmt" - "net/http" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ldapi "github.com/launchdarkly/api-client-go/v7" ) @@ -38,11 +38,17 @@ func customizeFlagDiff(ctx context.Context, diff *schema.ResourceDiff, v interfa // AND the customer removes the INCLUDE_IN_SNIPPET key from the config without replacing with defaultCSA // The read would assume no changes are needed, HOWEVER we need to jump back to project level set defaults // Hence the setting below - diff.SetNew(INCLUDE_IN_SNIPPET, includeInSnippetByDefault) - diff.SetNew(CLIENT_SIDE_AVAILABILITY, []map[string]interface{}{{ + err := diff.SetNew(INCLUDE_IN_SNIPPET, includeInSnippetByDefault) + if err != nil { + return err + } + err = diff.SetNew(CLIENT_SIDE_AVAILABILITY, []map[string]interface{}{{ USING_ENVIRONMENT_ID: defaultCSA.UsingEnvironmentId, USING_MOBILE_KEY: defaultCSA.UsingMobileKey, }}) + if err != nil { + return err + } } } @@ -60,11 +66,11 @@ func resourceFeatureFlag() *schema.Resource { } schemaMap[VARIATION_TYPE] = variationTypeSchema() return &schema.Resource{ - Create: resourceFeatureFlagCreate, - Read: resourceFeatureFlagRead, - Update: resourceFeatureFlagUpdate, - Delete: resourceFeatureFlagDelete, - Exists: resourceFeatureFlagExists, + CreateContext: resourceFeatureFlagCreate, + ReadContext: resourceFeatureFlagRead, + UpdateContext: resourceFeatureFlagUpdate, + DeleteContext: resourceFeatureFlagDelete, + Exists: resourceFeatureFlagExists, Importer: &schema.ResourceImporter{ State: resourceFeatureFlagImport, @@ -74,15 +80,15 @@ func resourceFeatureFlag() *schema.Resource { } } -func resourceFeatureFlagCreate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceFeatureFlagCreate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) projectKey := d.Get(PROJECT_KEY).(string) if exists, err := projectExists(projectKey, client); !exists { if err != nil { - return err + return diag.FromErr(err) } - return fmt.Errorf("cannot find project with key %q", projectKey) + return diag.Errorf("cannot find project with key %q", projectKey) } key := d.Get(KEY).(string) @@ -92,6 +98,7 @@ func resourceFeatureFlagCreate(d *schema.ResourceData, metaRaw interface{}) erro includeInSnippet := d.Get(INCLUDE_IN_SNIPPET).(bool) // GetOkExists is 'deprecated', but needed as optional booleans set to false return a 'false' ok value from GetOk // Also not really deprecated as they are keeping it around pending a replacement https://github.com/hashicorp/terraform-plugin-sdk/pull/350#issuecomment-597888969 + //nolint:staticcheck // SA1019 _, includeInSnippetOk := d.GetOkExists(INCLUDE_IN_SNIPPET) _, clientSideAvailabilityOk := d.GetOk(CLIENT_SIDE_AVAILABILITY) clientSideAvailability := &ldapi.ClientSideAvailabilityPost{ @@ -102,12 +109,12 @@ func resourceFeatureFlagCreate(d *schema.ResourceData, metaRaw interface{}) erro variations, err := variationsFromResourceData(d) if err != nil { - return fmt.Errorf("invalid variations: %v", err) + return diag.Errorf("invalid variations: %v", err) } defaults, err := defaultVariationsFromResourceData(d) if err != nil { - return fmt.Errorf("invalid default variations: %v", err) + return diag.Errorf("invalid default variations: %v", err) } flag := ldapi.FeatureFlagBody{ @@ -133,43 +140,42 @@ func resourceFeatureFlagCreate(d *schema.ResourceData, metaRaw interface{}) erro // IncludeInSnippetdefault is the same as defaultCSA.UsingEnvironmentId, so we can _ it defaultCSA, _, err := getProjectDefaultCSAandIncludeInSnippet(client, projectKey) if err != nil { - return fmt.Errorf("failed to get project level client side availability defaults. %v", err) + return diag.Errorf("failed to get project level client side availability defaults. %v", err) } flag.ClientSideAvailability = &ldapi.ClientSideAvailabilityPost{ UsingEnvironmentId: *defaultCSA.UsingEnvironmentId, UsingMobileKey: *defaultCSA.UsingMobileKey, } } - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.FeatureFlagsApi.PostFeatureFlag(client.ctx, projectKey).FeatureFlagBody(flag).Execute() - }) - + _, _, err = client.ld.FeatureFlagsApi.PostFeatureFlag(client.ctx, projectKey).FeatureFlagBody(flag).Execute() if err != nil { - return fmt.Errorf("failed to create flag %q in project %q: %s", key, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to create flag %q in project %q: %s", key, projectKey, handleLdapiErr(err)) } // ld's api does not allow some fields to be passed in during flag creation so we do an update: // https://apidocs.launchdarkly.com/docs/create-feature-flag - err = resourceFeatureFlagUpdate(d, metaRaw) - if err != nil { + updateDiags := resourceFeatureFlagUpdate(ctx, d, metaRaw) + if updateDiags.HasError() { // if there was a problem in the update state, we need to clean up completely by deleting the flag _, deleteErr := client.ld.FeatureFlagsApi.DeleteFeatureFlag(client.ctx, projectKey, key).Execute() if deleteErr != nil { - return fmt.Errorf("failed to delete flag %q from project %q: %s", key, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to delete flag %q from project %q: %s", key, projectKey, handleLdapiErr(deleteErr)) } - return fmt.Errorf("failed to update flag with name %q key %q for projectKey %q: %s", - flagName, key, projectKey, handleLdapiErr(err)) + // TODO: Figure out if we can get the err out of updateDiag (not looking likely) to use in hanldeLdapiErr + return updateDiags + // return diag.Errorf("failed to update flag with name %q key %q for projectKey %q: %s", + // flagName, key, projectKey, handleLdapiErr(errs)) } d.SetId(projectKey + "/" + key) - return resourceFeatureFlagRead(d, metaRaw) + return resourceFeatureFlagRead(ctx, d, metaRaw) } -func resourceFeatureFlagRead(d *schema.ResourceData, metaRaw interface{}) error { - return featureFlagRead(d, metaRaw, false) +func resourceFeatureFlagRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + return featureFlagRead(ctx, d, metaRaw, false) } -func resourceFeatureFlagUpdate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceFeatureFlagUpdate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) key := d.Get(KEY).(string) projectKey := d.Get(PROJECT_KEY).(string) @@ -182,6 +188,7 @@ func resourceFeatureFlagUpdate(d *schema.ResourceData, metaRaw interface{}) erro clientSideHasChange := d.HasChange(CLIENT_SIDE_AVAILABILITY) // GetOkExists is 'deprecated', but needed as optional booleans set to false return a 'false' ok value from GetOk // Also not really deprecated as they are keeping it around pending a replacement https://github.com/hashicorp/terraform-plugin-sdk/pull/350#issuecomment-597888969 + //nolint:staticcheck // SA1019 _, includeInSnippetOk := d.GetOkExists(INCLUDE_IN_SNIPPET) _, clientSideAvailabilityOk := d.GetOk(CLIENT_SIDE_AVAILABILITY) temporary := d.Get(TEMPORARY).(bool) @@ -217,7 +224,7 @@ func resourceFeatureFlagUpdate(d *schema.ResourceData, metaRaw interface{}) erro // IncludeInSnippetdefault is the same as defaultCSA.UsingEnvironmentId, so we can _ it defaultCSA, _, err := getProjectDefaultCSAandIncludeInSnippet(client, projectKey) if err != nil { - return fmt.Errorf("failed to get project level client side availability defaults. %v", err) + return diag.Errorf("failed to get project level client side availability defaults. %v", err) } patch.Patch = append(patch.Patch, patchReplace("/clientSideAvailability", &ldapi.ClientSideAvailabilityPost{ UsingEnvironmentId: *defaultCSA.UsingEnvironmentId, @@ -227,14 +234,14 @@ func resourceFeatureFlagUpdate(d *schema.ResourceData, metaRaw interface{}) erro variationPatches, err := variationPatchesFromResourceData(d) if err != nil { - return fmt.Errorf("failed to build variation patches. %v", err) + return diag.Errorf("failed to build variation patches. %v", err) } patch.Patch = append(patch.Patch, variationPatches...) // Only update the defaults if they are specified in the schema defaults, err := defaultVariationsFromResourceData(d) if err != nil { - return fmt.Errorf("invalid default variations: %v", err) + return diag.Errorf("invalid default variations: %v", err) } if defaults != nil { patch.Patch = append(patch.Patch, patchReplace("/defaults", defaults)) @@ -246,33 +253,27 @@ func resourceFeatureFlagUpdate(d *schema.ResourceData, metaRaw interface{}) erro patch.Patch = append(patch.Patch, patchReplace("/maintainerId", maintainerID.(string))) } - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict(func() (interface{}, *http.Response, error) { - return client.ld.FeatureFlagsApi.PatchFeatureFlag(client.ctx, projectKey, key).PatchWithComment(*&patch).Execute() - }) - }) - + _, _, err = client.ld.FeatureFlagsApi.PatchFeatureFlag(client.ctx, projectKey, key).PatchWithComment(patch).Execute() if err != nil { - return fmt.Errorf("failed to update flag %q in project %q: %s", key, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to update flag %q in project %q: %s", key, projectKey, handleLdapiErr(err)) } - return resourceFeatureFlagRead(d, metaRaw) + return resourceFeatureFlagRead(ctx, d, metaRaw) } -func resourceFeatureFlagDelete(d *schema.ResourceData, metaRaw interface{}) error { +func resourceFeatureFlagDelete(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) projectKey := d.Get(PROJECT_KEY).(string) key := d.Get(KEY).(string) - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - res, err := client.ld.FeatureFlagsApi.DeleteFeatureFlag(client.ctx, projectKey, key).Execute() - return nil, res, err - }) + _, err := client.ld.FeatureFlagsApi.DeleteFeatureFlag(client.ctx, projectKey, key).Execute() if err != nil { - return fmt.Errorf("failed to delete flag %q from project %q: %s", key, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to delete flag %q from project %q: %s", key, projectKey, handleLdapiErr(err)) } - return nil + return diags } func resourceFeatureFlagExists(d *schema.ResourceData, metaRaw interface{}) (bool, error) { diff --git a/launchdarkly/resource_launchdarkly_feature_flag_environment.go b/launchdarkly/resource_launchdarkly_feature_flag_environment.go index c5a34fcf..83db34dc 100644 --- a/launchdarkly/resource_launchdarkly_feature_flag_environment.go +++ b/launchdarkly/resource_launchdarkly_feature_flag_environment.go @@ -1,21 +1,22 @@ package launchdarkly import ( + "context" "fmt" "log" - "net/http" "strings" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ldapi "github.com/launchdarkly/api-client-go/v7" ) func resourceFeatureFlagEnvironment() *schema.Resource { return &schema.Resource{ - Create: resourceFeatureFlagEnvironmentCreate, - Read: resourceFeatureFlagEnvironmentRead, - Update: resourceFeatureFlagEnvironmentUpdate, - Delete: resourceFeatureFlagEnvironmentDelete, + CreateContext: resourceFeatureFlagEnvironmentCreate, + ReadContext: resourceFeatureFlagEnvironmentRead, + UpdateContext: resourceFeatureFlagEnvironmentUpdate, + DeleteContext: resourceFeatureFlagEnvironmentDelete, Importer: &schema.ResourceImporter{ State: resourceFeatureFlagEnvironmentImport, @@ -30,7 +31,7 @@ func validateFlagID(val interface{}, key string) (warns []string, errs []error) return warns, append(errs, fmt.Errorf("%q must be in the format 'project_key/flag_key'. Got: %s", key, v)) } for _, part := range strings.SplitN(v, "/", 2) { - w, e := validateKey()(part, key) + w, e := validateKeyNoDiag()(part, key) if len(e) > 0 { return w, e } @@ -38,28 +39,28 @@ func validateFlagID(val interface{}, key string) (warns []string, errs []error) return warns, errs } -func resourceFeatureFlagEnvironmentCreate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceFeatureFlagEnvironmentCreate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) flagId := d.Get(FLAG_ID).(string) projectKey, flagKey, err := flagIdToKeys(flagId) if err != nil { - return err + return diag.FromErr(err) } envKey := d.Get(ENV_KEY).(string) if exists, err := projectExists(projectKey, client); !exists { if err != nil { - return err + return diag.FromErr(err) } - return fmt.Errorf("cannot find project with key %q", projectKey) + return diag.Errorf("cannot find project with key %q", projectKey) } if exists, err := environmentExists(projectKey, envKey, client); !exists { if err != nil { - return err + return diag.FromErr(err) } - return fmt.Errorf("failed to find environment with key %q", envKey) + return diag.Errorf("failed to find environment with key %q", envKey) } patches := make([]ldapi.PatchOperation, 0) @@ -80,7 +81,7 @@ func resourceFeatureFlagEnvironmentCreate(d *schema.ResourceData, metaRaw interf if ok { rules, err := rulesFromResourceData(d) if err != nil { - return err + return diag.FromErr(err) } patches = append(patches, patchReplace(patchFlagEnvPath(d, "rules"), rules)) } @@ -100,7 +101,7 @@ func resourceFeatureFlagEnvironmentCreate(d *schema.ResourceData, metaRaw interf // fallthrough is required fall, err := fallthroughFromResourceData(d) if err != nil { - return err + return diag.FromErr(err) } patches = append(patches, patchReplace(patchFlagEnvPath(d, "fallthrough"), fall)) @@ -112,51 +113,47 @@ func resourceFeatureFlagEnvironmentCreate(d *schema.ResourceData, metaRaw interf } log.Printf("[DEBUG] %+v\n", patch) - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict(func() (interface{}, *http.Response, error) { - return client.ld.FeatureFlagsApi.PatchFeatureFlag(client.ctx, projectKey, flagKey).PatchWithComment(patch).Execute() - }) - }) + _, _, err = client.ld.FeatureFlagsApi.PatchFeatureFlag(client.ctx, projectKey, flagKey).PatchWithComment(patch).Execute() if err != nil { - return fmt.Errorf("failed to update flag %q in project %q: %s", flagKey, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to update flag %q in project %q: %s", flagKey, projectKey, handleLdapiErr(err)) } } d.SetId(projectKey + "/" + envKey + "/" + flagKey) - return resourceFeatureFlagEnvironmentRead(d, metaRaw) + return resourceFeatureFlagEnvironmentRead(ctx, d, metaRaw) } -func resourceFeatureFlagEnvironmentRead(d *schema.ResourceData, metaRaw interface{}) error { - return featureFlagEnvironmentRead(d, metaRaw, false) +func resourceFeatureFlagEnvironmentRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + return featureFlagEnvironmentRead(ctx, d, metaRaw, false) } -func resourceFeatureFlagEnvironmentUpdate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceFeatureFlagEnvironmentUpdate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) flagId := d.Get(FLAG_ID).(string) projectKey, flagKey, err := flagIdToKeys(flagId) if err != nil { - return err + return diag.FromErr(err) } envKey := d.Get(ENV_KEY).(string) if exists, err := projectExists(projectKey, client); !exists { if err != nil { - return err + return diag.FromErr(err) } - return fmt.Errorf("cannot find project with key %q", projectKey) + return diag.Errorf("cannot find project with key %q", projectKey) } if exists, err := environmentExists(projectKey, envKey, client); !exists { if err != nil { - return err + return diag.FromErr(err) } - return fmt.Errorf("failed to find environment with key %q", envKey) + return diag.Errorf("failed to find environment with key %q", envKey) } on := d.Get(ON) rules, err := rulesFromResourceData(d) if err != nil { - return err + return diag.FromErr(err) } trackEvents := d.Get(TRACK_EVENTS).(bool) prerequisites := prerequisitesFromResourceData(d, PREREQUISITES) @@ -164,7 +161,7 @@ func resourceFeatureFlagEnvironmentUpdate(d *schema.ResourceData, metaRaw interf fall, err := fallthroughFromResourceData(d) if err != nil { - return err + return diag.FromErr(err) } offVariation := d.Get(OFF_VARIATION) @@ -182,43 +179,41 @@ func resourceFeatureFlagEnvironmentUpdate(d *schema.ResourceData, metaRaw interf }} log.Printf("[DEBUG] %+v\n", patch) - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict(func() (interface{}, *http.Response, error) { - return client.ld.FeatureFlagsApi.PatchFeatureFlag(client.ctx, projectKey, flagKey).PatchWithComment(patch).Execute() - }) - }) + _, _, err = client.ld.FeatureFlagsApi.PatchFeatureFlag(client.ctx, projectKey, flagKey).PatchWithComment(patch).Execute() if err != nil { - return fmt.Errorf("failed to update flag %q in project %q, environment %q: %s", flagKey, projectKey, envKey, handleLdapiErr(err)) + return diag.Errorf("failed to update flag %q in project %q, environment %q: %s", flagKey, projectKey, envKey, handleLdapiErr(err)) } - return resourceFeatureFlagEnvironmentRead(d, metaRaw) + return resourceFeatureFlagEnvironmentRead(ctx, d, metaRaw) } -func resourceFeatureFlagEnvironmentDelete(d *schema.ResourceData, metaRaw interface{}) error { +func resourceFeatureFlagEnvironmentDelete(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) flagId := d.Get(FLAG_ID).(string) projectKey, flagKey, err := flagIdToKeys(flagId) if err != nil { - return err + return diag.FromErr(err) } envKey := d.Get(ENV_KEY).(string) if exists, err := projectExists(projectKey, client); !exists { if err != nil { - return err + return diag.FromErr(err) } - return fmt.Errorf("cannot find project with key %q", projectKey) + return diag.Errorf("cannot find project with key %q", projectKey) } if exists, err := environmentExists(projectKey, envKey, client); !exists { if err != nil { - return err + return diag.FromErr(err) } - return fmt.Errorf("failed to find environment with key %q", envKey) + return diag.Errorf("failed to find environment with key %q", envKey) } flag, _, err := client.ld.FeatureFlagsApi.GetFeatureFlag(client.ctx, projectKey, flagKey).Execute() if err != nil { - return fmt.Errorf("failed to update flag %q in project %q, environment %q: %s", flagKey, projectKey, envKey, handleLdapiErr(err)) + return diag.Errorf("failed to update flag %q in project %q, environment %q: %s", flagKey, projectKey, envKey, handleLdapiErr(err)) } // Set off variation to match default with how a rule is created @@ -238,16 +233,12 @@ func resourceFeatureFlagEnvironmentDelete(d *schema.ResourceData, metaRaw interf }} log.Printf("[DEBUG] %+v\n", patch) - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict(func() (interface{}, *http.Response, error) { - return client.ld.FeatureFlagsApi.PatchFeatureFlag(client.ctx, projectKey, flagKey).PatchWithComment(patch).Execute() - }) - }) + _, _, err = client.ld.FeatureFlagsApi.PatchFeatureFlag(client.ctx, projectKey, flagKey).PatchWithComment(patch).Execute() if err != nil { - return fmt.Errorf("failed to update flag %q in project %q, environment %q: %s", flagKey, projectKey, envKey, handleLdapiErr(err)) + return diag.Errorf("failed to update flag %q in project %q, environment %q: %s", flagKey, projectKey, envKey, handleLdapiErr(err)) } - return nil + return diags } func resourceFeatureFlagEnvironmentImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { diff --git a/launchdarkly/resource_launchdarkly_feature_flag_environment_test.go b/launchdarkly/resource_launchdarkly_feature_flag_environment_test.go index cdeaf12f..20af0633 100644 --- a/launchdarkly/resource_launchdarkly_feature_flag_environment_test.go +++ b/launchdarkly/resource_launchdarkly_feature_flag_environment_test.go @@ -366,10 +366,10 @@ func TestAccFeatureFlagEnvironment_Basic(t *testing.T) { Config: withRandomProject(projectKey, testAccFeatureFlagEnvironmentBasic), Check: resource.ComposeTestCheckFunc( testAccCheckFeatureFlagEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "on", "false"), + resource.TestCheckResourceAttr(resourceName, ON, "false"), resource.TestCheckResourceAttr(resourceName, "fallthrough.#", "1"), resource.TestCheckResourceAttr(resourceName, "fallthrough.0.variation", "1"), - resource.TestCheckResourceAttr(resourceName, "off_variation", "2"), + resource.TestCheckResourceAttr(resourceName, OFF_VARIATION, "2"), resource.TestCheckResourceAttr(resourceName, "targets.#", "1"), resource.TestCheckResourceAttr(resourceName, "targets.0.values.0", "user1"), resource.TestCheckResourceAttr(resourceName, "targets.0.variation", "0"), @@ -397,15 +397,15 @@ func TestAccFeatureFlagEnvironment_Empty(t *testing.T) { Config: withRandomProject(projectKey, testAccFeatureFlagEnvironmentEmpty), Check: resource.ComposeTestCheckFunc( testAccCheckFeatureFlagEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "on", "false"), - resource.TestCheckResourceAttr(resourceName, "off_variation", "2"), + resource.TestCheckResourceAttr(resourceName, ON, "false"), + resource.TestCheckResourceAttr(resourceName, OFF_VARIATION, "2"), resource.TestCheckResourceAttr(resourceName, "fallthrough.0.variation", "0"), - resource.TestCheckResourceAttr(resourceName, "track_events", "false"), - resource.TestCheckNoResourceAttr(resourceName, "rules"), + resource.TestCheckResourceAttr(resourceName, TRACK_EVENTS, "false"), + resource.TestCheckNoResourceAttr(resourceName, RULES), resource.TestCheckNoResourceAttr(resourceName, "rules.#"), - resource.TestCheckNoResourceAttr(resourceName, "prerequisites"), + resource.TestCheckNoResourceAttr(resourceName, PREREQUISITES), resource.TestCheckNoResourceAttr(resourceName, "prerequisites.#"), - resource.TestCheckNoResourceAttr(resourceName, "targets"), + resource.TestCheckNoResourceAttr(resourceName, TARGETS), resource.TestCheckNoResourceAttr(resourceName, "targets.#"), ), }, @@ -431,7 +431,7 @@ func TestAccFeatureFlagEnvironment_Update(t *testing.T) { Config: withRandomProject(projectKey, testAccFeatureFlagEnvironmentBasic), Check: resource.ComposeTestCheckFunc( testAccCheckFeatureFlagEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "on", "false"), + resource.TestCheckResourceAttr(resourceName, ON, "false"), resource.TestCheckResourceAttr(resourceName, "fallthrough.#", "1"), resource.TestCheckResourceAttr(resourceName, "fallthrough.0.variation", "1"), resource.TestCheckResourceAttr(resourceName, "fallthrough.0.rollout.#", "0"), @@ -439,15 +439,15 @@ func TestAccFeatureFlagEnvironment_Update(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "targets.0.values.0", "user1"), resource.TestCheckResourceAttr(resourceName, "targets.0.variation", "0"), resource.TestCheckResourceAttr(resourceName, "rules.#", "0"), - resource.TestCheckResourceAttr(resourceName, "off_variation", "2"), + resource.TestCheckResourceAttr(resourceName, OFF_VARIATION, "2"), ), }, { Config: withRandomProject(projectKey, testAccFeatureFlagEnvironmentUpdate), Check: resource.ComposeTestCheckFunc( testAccCheckFeatureFlagEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "on", "true"), - resource.TestCheckResourceAttr(resourceName, "track_events", "true"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), + resource.TestCheckResourceAttr(resourceName, TRACK_EVENTS, "true"), resource.TestCheckResourceAttr(resourceName, "fallthrough.#", "1"), resource.TestCheckResourceAttr(resourceName, "fallthrough.0.variation", "0"), resource.TestCheckResourceAttr(resourceName, "fallthrough.0.rollout_weights.#", "3"), @@ -481,7 +481,7 @@ func TestAccFeatureFlagEnvironment_Update(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "rules.1.clauses.0.values.#", "1"), resource.TestCheckResourceAttr(resourceName, "rules.1.clauses.0.values.0", "h"), resource.TestCheckResourceAttr(resourceName, "rules.1.clauses.0.negate", "false"), - resource.TestCheckResourceAttr(resourceName, "off_variation", "1"), + resource.TestCheckResourceAttr(resourceName, OFF_VARIATION, "1"), ), }, // After changes have been made to the resource, removing optional values should revert to their default / null values. @@ -489,10 +489,10 @@ func TestAccFeatureFlagEnvironment_Update(t *testing.T) { Config: withRandomProject(projectKey, testAccFeatureFlagEnvironmentEmpty), Check: resource.ComposeTestCheckFunc( testAccCheckFeatureFlagEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "on", "false"), - resource.TestCheckResourceAttr(resourceName, "track_events", "false"), + resource.TestCheckResourceAttr(resourceName, ON, "false"), + resource.TestCheckResourceAttr(resourceName, TRACK_EVENTS, "false"), resource.TestCheckResourceAttr(resourceName, "fallthrough.0.variation", "0"), - resource.TestCheckResourceAttr(resourceName, "off_variation", "2"), + resource.TestCheckResourceAttr(resourceName, OFF_VARIATION, "2"), resource.TestCheckNoResourceAttr(resourceName, "targets.#"), resource.TestCheckNoResourceAttr(resourceName, "rules.#"), ), @@ -519,10 +519,10 @@ func TestAccFeatureFlagEnvironment_JSON_variations(t *testing.T) { Config: withRandomProject(projectKey, testAccFeatureFlagEnvironmentJSONVariations), Check: resource.ComposeTestCheckFunc( testAccCheckFeatureFlagEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "on", "false"), + resource.TestCheckResourceAttr(resourceName, ON, "false"), resource.TestCheckResourceAttr(resourceName, "fallthrough.#", "1"), resource.TestCheckResourceAttr(resourceName, "fallthrough.0.variation", "1"), - resource.TestCheckResourceAttr(resourceName, "off_variation", "0"), + resource.TestCheckResourceAttr(resourceName, OFF_VARIATION, "0"), ), }, { @@ -548,14 +548,14 @@ func TestAccFeatureFlagEnvironment_BoolClauseValue(t *testing.T) { Config: withRandomProject(projectKey, testAccFeatureFlagEnvironmentBoolClauseValue), Check: resource.ComposeTestCheckFunc( testAccCheckFeatureFlagEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "rules.#", "1"), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.#", "1"), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.0.value_type", "boolean"), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.0.values.#", "1"), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.0.values.0", "true"), resource.TestCheckResourceAttr(resourceName, "fallthrough.0.variation", "0"), - resource.TestCheckResourceAttr(resourceName, "off_variation", "1"), + resource.TestCheckResourceAttr(resourceName, OFF_VARIATION, "1"), ), }, { @@ -580,7 +580,7 @@ func TestAccFeatureFlagEnvironment_NumberClauseValue(t *testing.T) { Config: withRandomProject(projectKey, testAccFeatureFlagEnvironmentNumberClauseValue), Check: resource.ComposeTestCheckFunc( testAccCheckFeatureFlagEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "rules.#", "1"), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.#", "1"), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.0.value_type", "number"), @@ -588,7 +588,7 @@ func TestAccFeatureFlagEnvironment_NumberClauseValue(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.0.values.0", "42"), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.0.values.1", "84"), resource.TestCheckResourceAttr(resourceName, "fallthrough.0.variation", "0"), - resource.TestCheckResourceAttr(resourceName, "off_variation", "1"), + resource.TestCheckResourceAttr(resourceName, OFF_VARIATION, "1"), ), }, { @@ -634,21 +634,21 @@ func TestAccFeatureFlagEnvironment_Prereq(t *testing.T) { Config: withRandomProject(projectKey, testAccFeatureFlagEnvironmentPrereq), Check: resource.ComposeTestCheckFunc( testAccCheckFeatureFlagEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "prerequisites.#", "1"), resource.TestCheckResourceAttr(resourceName, "prerequisites.0.flag_key", "bool-flag"), resource.TestCheckResourceAttr(resourceName, "fallthrough.0.variation", "1"), - resource.TestCheckResourceAttr(resourceName, "off_variation", "0"), + resource.TestCheckResourceAttr(resourceName, OFF_VARIATION, "0"), ), }, { Config: withRandomProject(projectKey, testAccFeatureFlagEnvironmentRemovePrereq), Check: resource.ComposeTestCheckFunc( testAccCheckFeatureFlagEnvironmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "on", "false"), + resource.TestCheckResourceAttr(resourceName, ON, "false"), resource.TestCheckNoResourceAttr(resourceName, "prerequisites.#"), resource.TestCheckResourceAttr(resourceName, "fallthrough.0.variation", "1"), - resource.TestCheckResourceAttr(resourceName, "off_variation", "0"), + resource.TestCheckResourceAttr(resourceName, OFF_VARIATION, "0"), ), }, }, diff --git a/launchdarkly/resource_launchdarkly_feature_flag_test.go b/launchdarkly/resource_launchdarkly_feature_flag_test.go index 92ba1c86..d0c124f0 100644 --- a/launchdarkly/resource_launchdarkly_feature_flag_test.go +++ b/launchdarkly/resource_launchdarkly_feature_flag_test.go @@ -307,19 +307,6 @@ resource "launchdarkly_feature_flag" "defaults" { } } ` - testAccFeatureFlagDefaultsMissingOffInvalid = ` -resource "launchdarkly_feature_flag" "defaults" { - project_key = launchdarkly_project.test.key - key = "defaults-flag" - name = "Feature flag with defaults" - variation_type = "boolean" - defaults { - on_variation = 2 - off_variation = 3 - } -} -` - testAccFeatureFlagDefaultsMultivariate = ` resource "launchdarkly_feature_flag" "defaults-multivariate" { project_key = launchdarkly_project.test.key @@ -513,7 +500,7 @@ func TestAccFeatureFlag_Basic(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "variations.#", "2"), resource.TestCheckResourceAttr(resourceName, "variations.0.value", "true"), resource.TestCheckResourceAttr(resourceName, "variations.1.value", "false"), - resource.TestCheckNoResourceAttr(resourceName, "maintainer_id"), + resource.TestCheckNoResourceAttr(resourceName, MAINTAINER_ID), ), }, { @@ -540,9 +527,9 @@ func TestAccFeatureFlag_Update(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckFeatureFlagExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Basic feature flag"), - resource.TestCheckResourceAttr(resourceName, "key", "basic-flag"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "Basic feature flag"), + resource.TestCheckResourceAttr(resourceName, KEY, "basic-flag"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), ), }, { @@ -550,15 +537,15 @@ func TestAccFeatureFlag_Update(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckFeatureFlagExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Less basic feature flag"), - resource.TestCheckResourceAttr(resourceName, "key", "basic-flag"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "description", "this is a boolean flag by default becausethe variations field is omitted"), + resource.TestCheckResourceAttr(resourceName, NAME, "Less basic feature flag"), + resource.TestCheckResourceAttr(resourceName, KEY, "basic-flag"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "this is a boolean flag by default becausethe variations field is omitted"), resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), resource.TestCheckResourceAttr(resourceName, "tags.1", "update"), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "true"), - resource.TestCheckResourceAttr(resourceName, "temporary", "true"), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "true"), + resource.TestCheckResourceAttr(resourceName, TEMPORARY, "true"), resource.TestCheckResourceAttr(resourceName, "defaults.0.on_variation", "0"), resource.TestCheckResourceAttr(resourceName, "defaults.0.off_variation", "1"), ), @@ -670,10 +657,10 @@ func TestAccFeatureFlag_WithMaintainer(t *testing.T) { testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckMemberExists("launchdarkly_team_member.test"), testAccCheckFeatureFlagExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Maintained feature flag"), - resource.TestCheckResourceAttr(resourceName, "key", "maintained-flag"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttrPair(resourceName, "maintainer_id", "launchdarkly_team_member.test", "id"), + resource.TestCheckResourceAttr(resourceName, NAME, "Maintained feature flag"), + resource.TestCheckResourceAttr(resourceName, KEY, "maintained-flag"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttrPair(resourceName, MAINTAINER_ID, "launchdarkly_team_member.test", "id"), ), }, { @@ -681,11 +668,11 @@ func TestAccFeatureFlag_WithMaintainer(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckFeatureFlagExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Maintained feature flag"), - resource.TestCheckResourceAttr(resourceName, "key", "maintained-flag"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "Maintained feature flag"), + resource.TestCheckResourceAttr(resourceName, KEY, "maintained-flag"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), // when removed it should reset back to the most recently-set maintainer - resource.TestCheckResourceAttrPair(resourceName, "maintainer_id", "launchdarkly_team_member.test", "id"), + resource.TestCheckResourceAttrPair(resourceName, MAINTAINER_ID, "launchdarkly_team_member.test", "id"), ), }, { @@ -693,12 +680,12 @@ func TestAccFeatureFlag_WithMaintainer(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckFeatureFlagExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Maintained feature flag"), - resource.TestCheckResourceAttr(resourceName, "key", "maintained-flag"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "Maintained feature flag"), + resource.TestCheckResourceAttr(resourceName, KEY, "maintained-flag"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), // it will still be set to the most recently set one even if that member has been deleted // the UI will not show a maintainer because it will not be able to find the record post-member delete - resource.TestCheckResourceAttrSet(resourceName, "maintainer_id"), + resource.TestCheckResourceAttrSet(resourceName, MAINTAINER_ID), ), }, }, @@ -727,10 +714,10 @@ func TestAccFeatureFlag_InvalidMaintainer(t *testing.T) { testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckMemberExists("launchdarkly_team_member.test"), testAccCheckFeatureFlagExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Maintained feature flag"), - resource.TestCheckResourceAttr(resourceName, "key", "maintained-flag"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttrPair(resourceName, "maintainer_id", "launchdarkly_team_member.test", "id"), + resource.TestCheckResourceAttr(resourceName, NAME, "Maintained feature flag"), + resource.TestCheckResourceAttr(resourceName, KEY, "maintained-flag"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttrPair(resourceName, MAINTAINER_ID, "launchdarkly_team_member.test", "id"), ), }, { @@ -738,12 +725,12 @@ func TestAccFeatureFlag_InvalidMaintainer(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckFeatureFlagExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Maintained feature flag"), - resource.TestCheckResourceAttr(resourceName, "key", "maintained-flag"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "Maintained feature flag"), + resource.TestCheckResourceAttr(resourceName, KEY, "maintained-flag"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), // this is the best we can do. it should default back to the most recently-set maintainer but // we have no easy way of a - resource.TestCheckResourceAttrSet(resourceName, "maintainer_id"), + resource.TestCheckResourceAttrSet(resourceName, MAINTAINER_ID), ), }, }, @@ -764,10 +751,10 @@ func TestAccFeatureFlag_CreateMultivariate(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckFeatureFlagExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "multivariate flag 1 name"), - resource.TestCheckResourceAttr(resourceName, "key", "multivariate-flag-1"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "description", "this is a multivariate flag because we explicitly define the variations"), + resource.TestCheckResourceAttr(resourceName, NAME, "multivariate flag 1 name"), + resource.TestCheckResourceAttr(resourceName, KEY, "multivariate-flag-1"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "this is a multivariate flag because we explicitly define the variations"), resource.TestCheckResourceAttr(resourceName, "variations.#", "3"), resource.TestCheckResourceAttr(resourceName, "variations.0.description", "a description"), resource.TestCheckResourceAttr(resourceName, "variations.0.name", "variation1"), @@ -810,11 +797,11 @@ func TestAccFeatureFlag_CreateMultivariate2(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckFeatureFlagExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "multivariate flag 2 name"), - resource.TestCheckResourceAttr(resourceName, "key", "multivariate-flag-2"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "description", "this is a multivariate flag to test big number values"), - resource.TestCheckResourceAttr(resourceName, "variation_type", "number"), + resource.TestCheckResourceAttr(resourceName, NAME, "multivariate flag 2 name"), + resource.TestCheckResourceAttr(resourceName, KEY, "multivariate-flag-2"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "this is a multivariate flag to test big number values"), + resource.TestCheckResourceAttr(resourceName, VARIATION_TYPE, "number"), resource.TestCheckResourceAttr(resourceName, "variations.#", "3"), resource.TestCheckResourceAttr(resourceName, "variations.0.description", "a description"), resource.TestCheckResourceAttr(resourceName, "variations.0.name", "variation1"), @@ -869,10 +856,10 @@ func TestAccFeatureFlag_UpdateMultivariate(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckFeatureFlagExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "multivariate flag 1 name"), - resource.TestCheckResourceAttr(resourceName, "key", "multivariate-flag-1"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "description", "this is a multivariate flag because we explicitly define the variations"), + resource.TestCheckResourceAttr(resourceName, NAME, "multivariate flag 1 name"), + resource.TestCheckResourceAttr(resourceName, KEY, "multivariate-flag-1"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "this is a multivariate flag because we explicitly define the variations"), resource.TestCheckResourceAttr(resourceName, "variations.#", "4"), resource.TestCheckResourceAttr(resourceName, "variations.0.description", "a description"), resource.TestCheckResourceAttr(resourceName, "variations.0.name", "variation1"), @@ -1052,7 +1039,7 @@ func TestAccFeatureFlag_ClientSideAvailabilityUpdate(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "variations.#", "2"), resource.TestCheckResourceAttr(resourceName, "variations.0.value", "true"), resource.TestCheckResourceAttr(resourceName, "variations.1.value", "false"), - resource.TestCheckNoResourceAttr(resourceName, "maintainer_id"), + resource.TestCheckNoResourceAttr(resourceName, MAINTAINER_ID), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_environment_id", "true"), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_mobile_key", "true"), ), @@ -1069,7 +1056,7 @@ func TestAccFeatureFlag_ClientSideAvailabilityUpdate(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "variations.#", "2"), resource.TestCheckResourceAttr(resourceName, "variations.0.value", "true"), resource.TestCheckResourceAttr(resourceName, "variations.1.value", "false"), - resource.TestCheckNoResourceAttr(resourceName, "maintainer_id"), + resource.TestCheckNoResourceAttr(resourceName, MAINTAINER_ID), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_environment_id", "false"), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_mobile_key", "false"), ), @@ -1099,8 +1086,8 @@ func TestAccFeatureFlag_IncludeInSnippetToClientSide(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "variations.#", "2"), resource.TestCheckResourceAttr(resourceName, "variations.0.value", "true"), resource.TestCheckResourceAttr(resourceName, "variations.1.value", "false"), - resource.TestCheckNoResourceAttr(resourceName, "maintainer_id"), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "true"), + resource.TestCheckNoResourceAttr(resourceName, MAINTAINER_ID), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "true"), ), }, { @@ -1115,10 +1102,10 @@ func TestAccFeatureFlag_IncludeInSnippetToClientSide(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "variations.#", "2"), resource.TestCheckResourceAttr(resourceName, "variations.0.value", "true"), resource.TestCheckResourceAttr(resourceName, "variations.1.value", "false"), - resource.TestCheckNoResourceAttr(resourceName, "maintainer_id"), + resource.TestCheckNoResourceAttr(resourceName, MAINTAINER_ID), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_environment_id", "true"), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_mobile_key", "true"), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "true"), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "true"), ), }, { @@ -1133,10 +1120,10 @@ func TestAccFeatureFlag_IncludeInSnippetToClientSide(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "variations.#", "2"), resource.TestCheckResourceAttr(resourceName, "variations.0.value", "true"), resource.TestCheckResourceAttr(resourceName, "variations.1.value", "false"), - resource.TestCheckNoResourceAttr(resourceName, "maintainer_id"), + resource.TestCheckNoResourceAttr(resourceName, MAINTAINER_ID), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_environment_id", "false"), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_mobile_key", "false"), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "false"), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "false"), ), }, }, @@ -1164,10 +1151,10 @@ func TestAccFeatureFlag_ClientSideToIncludeInSnippet(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "variations.#", "2"), resource.TestCheckResourceAttr(resourceName, "variations.0.value", "true"), resource.TestCheckResourceAttr(resourceName, "variations.1.value", "false"), - resource.TestCheckNoResourceAttr(resourceName, "maintainer_id"), + resource.TestCheckNoResourceAttr(resourceName, MAINTAINER_ID), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_environment_id", "true"), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_mobile_key", "true"), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "true"), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "true"), ), }, { @@ -1182,10 +1169,10 @@ func TestAccFeatureFlag_ClientSideToIncludeInSnippet(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "variations.#", "2"), resource.TestCheckResourceAttr(resourceName, "variations.0.value", "true"), resource.TestCheckResourceAttr(resourceName, "variations.1.value", "false"), - resource.TestCheckNoResourceAttr(resourceName, "maintainer_id"), + resource.TestCheckNoResourceAttr(resourceName, MAINTAINER_ID), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_environment_id", "false"), resource.TestCheckResourceAttr(resourceName, "client_side_availability.0.using_mobile_key", "false"), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "false"), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "false"), ), }, }, @@ -1207,10 +1194,10 @@ func TestAccFeatureFlag_IncludeInSnippetRevertToDefault(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckFeatureFlagExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Basic feature flag"), - resource.TestCheckResourceAttr(resourceName, "key", "basic-flag-sdk-settings"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "true"), + resource.TestCheckResourceAttr(resourceName, NAME, "Basic feature flag"), + resource.TestCheckResourceAttr(resourceName, KEY, "basic-flag-sdk-settings"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "true"), ), }, // Replace default value with specific value @@ -1219,10 +1206,10 @@ func TestAccFeatureFlag_IncludeInSnippetRevertToDefault(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckFeatureFlagExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Basic feature flag"), - resource.TestCheckResourceAttr(resourceName, "key", "basic-flag-sdk-settings"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "false"), + resource.TestCheckResourceAttr(resourceName, NAME, "Basic feature flag"), + resource.TestCheckResourceAttr(resourceName, KEY, "basic-flag-sdk-settings"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "false"), ), }, // Clear specific value, check for default @@ -1231,10 +1218,10 @@ func TestAccFeatureFlag_IncludeInSnippetRevertToDefault(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckFeatureFlagExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Basic feature flag"), - resource.TestCheckResourceAttr(resourceName, "key", "basic-flag-sdk-settings"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "true"), + resource.TestCheckResourceAttr(resourceName, NAME, "Basic feature flag"), + resource.TestCheckResourceAttr(resourceName, KEY, "basic-flag-sdk-settings"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "true"), ), }, }, diff --git a/launchdarkly/resource_launchdarkly_flag_trigger.go b/launchdarkly/resource_launchdarkly_flag_trigger.go new file mode 100644 index 00000000..7ba39e71 --- /dev/null +++ b/launchdarkly/resource_launchdarkly_flag_trigger.go @@ -0,0 +1,160 @@ +package launchdarkly + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + ldapi "github.com/launchdarkly/api-client-go/v7" +) + +func resourceFlagTrigger() *schema.Resource { + return &schema.Resource{ + CreateContext: resourceFlagTriggerCreate, + ReadContext: resourceFlagTriggerRead, + UpdateContext: resourceFlagTriggerUpdate, + DeleteContext: resourceFlagTriggerDelete, + Exists: resourceFlagTriggerExists, + + Importer: &schema.ResourceImporter{ + StateContext: resourceFlagTriggerImport, + }, + Schema: baseFlagTriggerSchema(false), + } +} + +func resourceFlagTriggerCreate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + client := metaRaw.(*Client) + projectKey := d.Get(PROJECT_KEY).(string) + envKey := d.Get(ENV_KEY).(string) + flagKey := d.Get(FLAG_KEY).(string) + integrationKey := d.Get(INTEGRATION_KEY).(string) + instructions := instructionsFromResourceData(d, "POST") + + enabled := d.Get(ENABLED).(bool) + + triggerBody := ldapi.NewTriggerPost(integrationKey) + triggerBody.Instructions = &instructions + + preUpdateTrigger, _, err := client.ld.FlagTriggersApi.CreateTriggerWorkflow(client.ctx, projectKey, envKey, flagKey).TriggerPost(*triggerBody).Execute() + if err != nil { + return diag.Errorf("failed to create %s trigger for proj/env/flag %s/%s/%s: %s", integrationKey, projectKey, envKey, flagKey, err.Error()) + } + _ = d.Set(TRIGGER_URL, preUpdateTrigger.TriggerURL) + + // if enabled is false upon creation, we need to do a patch since the create endpoint + // does not accept multiple instructions + var postUpdateTrigger ldapi.TriggerWorkflowRep + if !enabled { + instructions = []map[string]interface{}{{ + KIND: "disableTrigger", + }} + input := ldapi.FlagTriggerInput{ + Instructions: &instructions, + } + + postUpdateTrigger, _, err = client.ld.FlagTriggersApi.PatchTriggerWorkflow(client.ctx, projectKey, envKey, flagKey, *preUpdateTrigger.Id).FlagTriggerInput(input).Execute() + if err != nil { + return diag.Errorf("failed to update %s trigger for proj/env/flag %s/%s/%s: %s", integrationKey, projectKey, envKey, flagKey, err.Error()) + } + } + + d.SetId(*postUpdateTrigger.Id) + return resourceFlagTriggerRead(ctx, d, metaRaw) +} + +func resourceFlagTriggerRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + return flagTriggerRead(ctx, d, metaRaw, false) +} + +func resourceFlagTriggerUpdate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + client := metaRaw.(*Client) + projectKey := d.Get(PROJECT_KEY).(string) + envKey := d.Get(ENV_KEY).(string) + flagKey := d.Get(FLAG_KEY).(string) + integrationKey := d.Get(INTEGRATION_KEY).(string) + instructions := instructionsFromResourceData(d, "PATCH") + + triggerId := d.Id() + + oldEnabled, newEnabled := d.GetChange(ENABLED) + if oldEnabled.(bool) != newEnabled.(bool) { + if newEnabled.(bool) { + instructions = append(instructions, map[string]interface{}{ + KIND: "enableTrigger", + }) + } else { + instructions = append(instructions, map[string]interface{}{ + KIND: "disableTrigger", + }) + } + } + input := ldapi.FlagTriggerInput{ + Instructions: &instructions, + } + + _, _, err := client.ld.FlagTriggersApi.PatchTriggerWorkflow(client.ctx, projectKey, envKey, flagKey, triggerId).FlagTriggerInput(input).Execute() + if err != nil { + return diag.Errorf("failed to update %s trigger for proj/env/flag %s/%s/%s", integrationKey, projectKey, envKey, flagKey) + } + return resourceFlagTriggerRead(ctx, d, metaRaw) +} + +func resourceFlagTriggerDelete(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + client := metaRaw.(*Client) + integrationKey := d.Get(INTEGRATION_KEY).(string) + projectKey := d.Get(PROJECT_KEY).(string) + envKey := d.Get(ENV_KEY).(string) + flagKey := d.Get(FLAG_KEY).(string) + + triggerId := d.Id() + + _, err := client.ld.FlagTriggersApi.DeleteTriggerWorkflow(client.ctx, projectKey, envKey, flagKey, triggerId).Execute() + if err != nil { + return diag.Errorf("failed to delete %s trigger with ID %s for proj/env/flag %s/%s/%s", integrationKey, triggerId, projectKey, envKey, flagKey) + } + return diag.Diagnostics{} +} + +func resourceFlagTriggerExists(d *schema.ResourceData, metaRaw interface{}) (bool, error) { + client := metaRaw.(*Client) + integrationKey := d.Get(INTEGRATION_KEY).(string) + projectKey := d.Get(PROJECT_KEY).(string) + envKey := d.Get(ENV_KEY).(string) + flagKey := d.Get(FLAG_KEY).(string) + + triggerId := d.Id() + + _, res, err := client.ld.FlagTriggersApi.GetTriggerWorkflowById(client.ctx, projectKey, flagKey, envKey, triggerId).Execute() + if isStatusNotFound(res) { + return false, nil + } + if err != nil { + return false, fmt.Errorf("failed to check if %s trigger with ID %s exists in proj/env/flag %s/%s/%s: %s", integrationKey, triggerId, projectKey, envKey, flagKey, handleLdapiErr(err)) + } + return true, nil +} + +func resourceFlagTriggerImport(ctx context.Context, d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + projectKey, envKey, flagKey, triggerId, err := triggerImportIdToKeys(d.Id()) + if err != nil { + return nil, err + } + d.SetId(triggerId) + + _ = d.Set(PROJECT_KEY, projectKey) + _ = d.Set(ENV_KEY, envKey) + _ = d.Set(FLAG_KEY, flagKey) + return []*schema.ResourceData{d}, nil +} + +func triggerImportIdToKeys(id string) (projectKey string, envKey string, flagKey string, triggerId string, err error) { + if strings.Count(id, "/") != 3 { + return "", "", "", "", fmt.Errorf("found unexpected trigger id format: %q expected format: 'project_key/env_key/flag_key/trigger_id'", triggerId) + } + parts := strings.SplitN(id, "/", 4) + projectKey, envKey, flagKey, triggerId = parts[0], parts[1], parts[2], parts[3] + return projectKey, envKey, flagKey, triggerId, nil +} diff --git a/launchdarkly/resource_launchdarkly_flag_trigger_test.go b/launchdarkly/resource_launchdarkly_flag_trigger_test.go new file mode 100644 index 00000000..ec61389a --- /dev/null +++ b/launchdarkly/resource_launchdarkly_flag_trigger_test.go @@ -0,0 +1,151 @@ +package launchdarkly + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +const ( + testAccFlagTriggerCreate = ` +resource "launchdarkly_flag_trigger" "basic" { + project_key = launchdarkly_project.test.key + env_key = "test" + flag_key = launchdarkly_feature_flag.trigger_flag.key + integration_key = "generic-trigger" + instructions { + kind = "turnFlagOn" + } + enabled = false +} +` + testAccFlagTriggerUpdate = ` +resource "launchdarkly_flag_trigger" "basic" { + project_key = launchdarkly_project.test.key + env_key = "test" + flag_key = launchdarkly_feature_flag.trigger_flag.key + integration_key = "generic-trigger" + instructions { + kind = "turnFlagOff" + } + enabled = true +} +` + + testAccFlagTriggerUpdate2 = ` +resource "launchdarkly_flag_trigger" "basic" { + project_key = launchdarkly_project.test.key + env_key = "test" + flag_key = launchdarkly_feature_flag.trigger_flag.key + integration_key = "generic-trigger" + instructions { + kind = "turnFlagOff" + } + enabled = false +} +` +) + +func withRandomFlag(randomFlag, resource string) string { + return fmt.Sprintf(` + resource "launchdarkly_feature_flag" "trigger_flag" { + project_key = launchdarkly_project.test.key + key = "%s" + name = "Basic feature flag" + variation_type = "boolean" + } + + %s`, randomFlag, resource) +} + +func TestAccFlagTrigger_CreateUpdate(t *testing.T) { + projectKey := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) + flagKey := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) + resourceName := "launchdarkly_flag_trigger.basic" + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: withRandomProject(projectKey, withRandomFlag(flagKey, testAccFlagTriggerCreate)), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists("launchdarkly_project.test"), + testAccCheckFlagExists(projectKey, "launchdarkly_feature_flag.trigger_flag"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, FLAG_KEY, flagKey), + resource.TestCheckResourceAttr(resourceName, INTEGRATION_KEY, "generic-trigger"), + resource.TestCheckResourceAttr(resourceName, "instructions.0.kind", "turnFlagOn"), + resource.TestCheckResourceAttr(resourceName, ENABLED, "false"), + resource.TestCheckResourceAttrSet(resourceName, TRIGGER_URL), + resource.TestCheckResourceAttrSet(resourceName, MAINTAINER_ID), + ), + }, + { + Config: withRandomProject(projectKey, withRandomFlag(flagKey, testAccFlagTriggerUpdate)), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists("launchdarkly_project.test"), + testAccCheckFlagExists(projectKey, "launchdarkly_feature_flag.trigger_flag"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, FLAG_KEY, flagKey), + resource.TestCheckResourceAttr(resourceName, INTEGRATION_KEY, "generic-trigger"), + resource.TestCheckResourceAttr(resourceName, "instructions.0.kind", "turnFlagOff"), + resource.TestCheckResourceAttr(resourceName, ENABLED, "true"), + resource.TestCheckResourceAttrSet(resourceName, TRIGGER_URL), + resource.TestCheckResourceAttrSet(resourceName, MAINTAINER_ID), + ), + }, + { + Config: withRandomProject(projectKey, withRandomFlag(flagKey, testAccFlagTriggerUpdate2)), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists("launchdarkly_project.test"), + testAccCheckFlagExists(projectKey, "launchdarkly_feature_flag.trigger_flag"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, FLAG_KEY, flagKey), + resource.TestCheckResourceAttr(resourceName, INTEGRATION_KEY, "generic-trigger"), + resource.TestCheckResourceAttr(resourceName, "instructions.0.kind", "turnFlagOff"), + resource.TestCheckResourceAttr(resourceName, ENABLED, "false"), + resource.TestCheckResourceAttrSet(resourceName, TRIGGER_URL), + resource.TestCheckResourceAttrSet(resourceName, MAINTAINER_ID), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateIdPrefix: fmt.Sprintf("%s/test/%s/", projectKey, flagKey), + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{TRIGGER_URL}, + }, + }, + }) +} + +func testAccCheckFlagExists(projectKey, resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("not found: %s", resourceName) + } + if rs.Primary.ID == "" { + return fmt.Errorf("flag ID is not set") + } + projectKey, flagKey, err := flagIdToKeys(rs.Primary.ID) + if err != nil { + return fmt.Errorf("flag ID is not set correctly") + } + + client := testAccProvider.Meta().(*Client) + _, _, err = client.ld.FeatureFlagsApi.GetFeatureFlag(client.ctx, projectKey, flagKey).Execute() + if err != nil { + return fmt.Errorf("received an error getting flag. %s", err) + } + return nil + } +} diff --git a/launchdarkly/resource_launchdarkly_metric.go b/launchdarkly/resource_launchdarkly_metric.go new file mode 100644 index 00000000..61acc108 --- /dev/null +++ b/launchdarkly/resource_launchdarkly_metric.go @@ -0,0 +1,303 @@ +package launchdarkly + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + ldapi "github.com/launchdarkly/api-client-go/v7" +) + +// Our required fields for metrics depend on the value of the 'kind' enum. +// As of now, TF does not support validating multiple attributes at once, so our only options are +// Validating at runtime in Create/Update (and only alerting at apply stage) +// Using CustomizeDiff below (and alerting at plan stage) +// https://github.com/hashicorp/terraform-plugin-sdk/issues/233 +func customizeMetricDiff(ctx context.Context, diff *schema.ResourceDiff, v interface{}) error { + config := diff.GetRawConfig() + + // Kind enum is validated using validateFunc + kindInConfig := diff.Get(KIND).(string) + selectorInConfig := config.GetAttr(SELECTOR) + urlsInConfig := config.GetAttr(URLS) + successCriteriaInConfig := config.GetAttr(SUCCESS_CRITERIA) + unitInConfig := config.GetAttr(UNIT) + eventKeyInConfig := config.GetAttr(EVENT_KEY) + + // Different validation logic depending on which kind of metric we are creating + switch kindInConfig { + case "click": + if selectorInConfig.IsNull() { + return fmt.Errorf("click metrics require 'selector' to be set") + } + // If we have no keys in the URLS block in the config (length is 0) we know the customer hasn't set any URL values + urlsSlice := urlsInConfig.AsValueSlice() + if len(urlsSlice) == 0 { + return fmt.Errorf("click metrics require an 'urls' block to be set") + } + // Determine if the URL blocks have the correct subfields for their kind set + earlyExit := urlsInConfig.ForEachElement(checkUrlConfigValues) + if earlyExit { + return fmt.Errorf("'urls' block is misconfigured, please check documentation for required fields") + } + // Disallow keys specific to other 'kind' values - these updates are ignored by the backend and lead to misleading plans being generated + if !successCriteriaInConfig.IsNull() { + return fmt.Errorf("click metrics do not accept 'success_criteria'") + } + if !unitInConfig.IsNull() { + return fmt.Errorf("click metrics do not accept 'unit'") + } + if !eventKeyInConfig.IsNull() { + return fmt.Errorf("click metrics do not accept 'event_key'") + } + case "custom": + isNumericInConfig := config.GetAttr(IS_NUMERIC) + // numeric custom metrics have extra required fields + if isNumericInConfig.True() { + // enum validation is done in validateFunction against attribute + if successCriteriaInConfig.IsNull() { + return fmt.Errorf("numeric custom metrics require 'success_criteria' to be set") + } + if unitInConfig.IsNull() { + return fmt.Errorf("numeric custom metrics require 'unit' to be set") + } + } + if eventKeyInConfig.IsNull() { + return fmt.Errorf("custom meterics require 'event_key' to be set") + } + // Disallow keys specific to other 'kind' values - these updates are ignored by the backend and lead to misleading plans being generated + urlsSlice := urlsInConfig.AsValueSlice() + if len(urlsSlice) != 0 { + return fmt.Errorf("custom metrics do not accept a 'urls' block") + } + if !selectorInConfig.IsNull() { + return fmt.Errorf("custom metrics do not accept 'selector'") + } + case "pageview": + // If we have no keys in the URLS block in the config (length is 0) we know the customer hasn't set any URL values + urlsSlice := urlsInConfig.AsValueSlice() + if len(urlsSlice) == 0 { + return fmt.Errorf("pageview metrics require an 'urls' block to be set") + } + // Determine if the URL blocks have the correct subfields for their kind set + earlyExit := urlsInConfig.ForEachElement(checkUrlConfigValues) + if earlyExit { + return fmt.Errorf("'urls' block is misconfigured, please check documentation for required fields") + } + + // Disallow keys specific to other 'kind' values - these updates are ignored by the backend and lead to misleading plans being generated + if !successCriteriaInConfig.IsNull() { + return fmt.Errorf("pageview metrics do not accept 'success_criteria'") + } + if !unitInConfig.IsNull() { + return fmt.Errorf("pageview metrics do not accept 'unit'") + } + if !eventKeyInConfig.IsNull() { + return fmt.Errorf("pageview metrics do not accept 'event_key'") + } + + if !selectorInConfig.IsNull() { + return fmt.Errorf("pageview metrics do not accept 'selector'") + } + } + + return nil +} + +func resourceMetric() *schema.Resource { + return &schema.Resource{ + CreateContext: resourceMetricCreate, + ReadContext: resourceMetricRead, + UpdateContext: resourceMetricUpdate, + DeleteContext: resourceMetricDelete, + Schema: baseMetricSchema(false), + CustomizeDiff: customizeMetricDiff, + Importer: &schema.ResourceImporter{ + State: resourceMetricImport, + }, + } +} + +func resourceMetricCreate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + client := metaRaw.(*Client) + + // Warning or errors can be collected in a slice type + var diags diag.Diagnostics + + projectKey := d.Get(PROJECT_KEY).(string) + + if exists, err := projectExists(projectKey, client); !exists { + if err != nil { + return diag.FromErr(err) + } + return diag.Errorf("cannot find project with key %q", projectKey) + } + + key := d.Get(KEY).(string) + name := d.Get(NAME).(string) + kind := d.Get(KIND).(string) + description := d.Get(DESCRIPTION).(string) + tags := stringsFromResourceData(d, TAGS) + isActive := d.Get(IS_ACTIVE).(bool) + isNumeric := d.Get(IS_NUMERIC).(bool) + urls := metricUrlsFromResourceData(d) + // Required depending on type + unit := d.Get(UNIT).(string) + selector := d.Get(SELECTOR).(string) + eventKey := d.Get(EVENT_KEY).(string) + + metric := ldapi.MetricPost{ + Name: &name, + Key: key, + Description: &description, + Tags: &tags, + Kind: kind, + IsActive: &isActive, + IsNumeric: &isNumeric, + Selector: &selector, + Urls: &urls, + Unit: &unit, + EventKey: &eventKey, + } + // Only add successCriteria if it has a value - empty string causes API errors + _, ok := d.GetOk(SUCCESS_CRITERIA) + if ok { + successCriteria := d.Get(SUCCESS_CRITERIA).(string) + metric.SuccessCriteria = &successCriteria + } + + _, _, err := client.ld.MetricsApi.PostMetric(client.ctx, projectKey).MetricPost(metric).Execute() + + if err != nil { + diags = append(diags, diag.Diagnostic{ + Severity: diag.Error, + Summary: fmt.Sprintf("Error creating metric resource: %q", key), + Detail: fmt.Sprintf("Details: \n %q", handleLdapiErr(err)), + }) + return diags + } + + // Docs imply we can set another maintainer if wanted, this can't be done during create + // So it has to be done in a subsequent update call + maintainerId, maintainerIdOk := d.GetOk(MAINTAINER_ID) + if maintainerIdOk { + _ = d.Set(MAINTAINER_ID, maintainerId) + diags = resourceMetricUpdate(ctx, d, metaRaw) + if diags.HasError() { + // if there was a problem in the update state, we need to clean up completely by deleting the flag + _, deleteErr := client.ld.MetricsApi.DeleteMetric(client.ctx, projectKey, key).Execute() + if deleteErr != nil { + diags = append(diags, diag.Diagnostic{ + Severity: diag.Error, + Summary: fmt.Sprintf("Error creating metric resource: %q", key), + Detail: fmt.Sprintf("failed to clean up metric %q from project %q: %s", key, projectKey, handleLdapiErr(err)), + }) + return diags + } + return diags + } + } + + d.SetId(projectKey + "/" + key) + + return resourceMetricRead(ctx, d, metaRaw) +} + +func resourceMetricRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + // Warning or errors can be collected in a slice type + return metricRead(ctx, d, metaRaw, false) +} + +func resourceMetricUpdate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + client := metaRaw.(*Client) + + // Warning or errors can be collected in a slice type + var diags diag.Diagnostics + + projectKey := d.Get(PROJECT_KEY).(string) + key := d.Get(KEY).(string) + name := d.Get(NAME).(string) + kind := d.Get(KIND).(string) + description := d.Get(DESCRIPTION).(string) + tags := stringsFromResourceData(d, TAGS) + isActive := d.Get(IS_ACTIVE).(bool) + isNumeric := d.Get(IS_NUMERIC).(bool) + urls := metricUrlsFromResourceData(d) + // Required depending on type + unit := d.Get(UNIT).(string) + selector := d.Get(SELECTOR).(string) + eventKey := d.Get(EVENT_KEY).(string) + + patch := []ldapi.PatchOperation{ + patchReplace("/name", name), + patchReplace("/description", description), + patchReplace("/tags", tags), + patchReplace("/kind", kind), + patchReplace("/isActive", isActive), + patchReplace("/isNumeric", isNumeric), + patchReplace("/urls", urls), + patchReplace("/unit", unit), + patchReplace("/selector", selector), + patchReplace("/eventKey", eventKey), + } + + // Only update successCriteria if it is specified in the schema (enum values) + successCriteria, ok := d.GetOk(SUCCESS_CRITERIA) + if ok { + patch = append(patch, patchReplace("/successCriteria", successCriteria.(string))) + } + + // Only update the maintainer ID if is specified in the schema + maintainerID, ok := d.GetOk(MAINTAINER_ID) + if ok { + patch = append(patch, patchReplace("/maintainerId", maintainerID.(string))) + } + + _, _, err := client.ld.MetricsApi.PatchMetric(client.ctx, projectKey, key).PatchOperation(patch).Execute() + + if err != nil { + diags = append(diags, diag.Diagnostic{ + Severity: diag.Error, + Summary: fmt.Sprintf("Error updating metric resource %q from project %q", key, projectKey), + Detail: fmt.Sprintf("Details: \n %q", handleLdapiErr(err)), + }) + return diags + } + return resourceMetricRead(ctx, d, metaRaw) +} + +func resourceMetricDelete(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + client := metaRaw.(*Client) + // Warning or errors can be collected in a slice type + var diags diag.Diagnostics + + projectKey := d.Get(PROJECT_KEY).(string) + key := d.Get(KEY).(string) + + _, err := client.ld.MetricsApi.DeleteMetric(client.ctx, projectKey, key).Execute() + if err != nil { + diags = append(diags, diag.Diagnostic{ + Severity: diag.Error, + Summary: fmt.Sprintf("Error deleting metric resource %q from project %q", key, projectKey), + Detail: fmt.Sprintf("Details: \n %q", handleLdapiErr(err)), + }) + return diags + } + + return resourceMetricRead(ctx, d, metaRaw) +} + +func resourceMetricImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + id := d.Id() + + projectKey, metricKey, err := metricIdToKeys(id) + if err != nil { + return nil, err + } + _ = d.Set(PROJECT_KEY, projectKey) + _ = d.Set(KEY, metricKey) + + return []*schema.ResourceData{d}, nil +} diff --git a/launchdarkly/resource_launchdarkly_metric_test.go b/launchdarkly/resource_launchdarkly_metric_test.go new file mode 100644 index 00000000..750b9d8d --- /dev/null +++ b/launchdarkly/resource_launchdarkly_metric_test.go @@ -0,0 +1,157 @@ +package launchdarkly + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +const ( + testAccMetricBasic = ` +resource "launchdarkly_metric" "basic" { + project_key = launchdarkly_project.test.key + key = "basic-metric" + name = "Basic Metric" + description = "Basic metric description." + kind = "pageview" + tags = [ + "test" + ] + urls { + kind = "substring" + substring = "foo" + } + urls { + kind = "regex" + pattern = "foo" + } +} +` + testAccMetricUpdate = ` +resource "launchdarkly_metric" "basic" { + project_key = launchdarkly_project.test.key + key = "basic-metric" + name = "Basic updated Metric" + description = "Basic updated metric description." + kind = "pageview" + tags = [ + "test" + ] + urls { + kind = "substring" + substring = "bar" + } + urls { + kind = "regex" + pattern = "bar" + } +} +` +) + +func TestAccMetric_Basic(t *testing.T) { + projectKey := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) + resourceName := "launchdarkly_metric.basic" + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: withRandomProject(projectKey, testAccMetricBasic), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists("launchdarkly_project.test"), + testAccCheckMetricExists(resourceName), + resource.TestCheckResourceAttr(resourceName, NAME, "Basic Metric"), + resource.TestCheckResourceAttr(resourceName, KEY, "basic-metric"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, KIND, "pageview"), + resource.TestCheckResourceAttr(resourceName, "urls.0.kind", "substring"), + resource.TestCheckResourceAttr(resourceName, "urls.0.substring", "foo"), + resource.TestCheckResourceAttr(resourceName, "urls.1.kind", "regex"), + resource.TestCheckResourceAttr(resourceName, "urls.1.pattern", "foo"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccMetric_Update(t *testing.T) { + projectKey := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum) + resourceName := "launchdarkly_metric.basic" + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: withRandomProject(projectKey, testAccMetricBasic), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists("launchdarkly_project.test"), + testAccCheckMetricExists(resourceName), + resource.TestCheckResourceAttr(resourceName, NAME, "Basic Metric"), + resource.TestCheckResourceAttr(resourceName, KEY, "basic-metric"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, KIND, "pageview"), + resource.TestCheckResourceAttr(resourceName, "urls.0.kind", "substring"), + resource.TestCheckResourceAttr(resourceName, "urls.0.substring", "foo"), + resource.TestCheckResourceAttr(resourceName, "urls.1.kind", "regex"), + resource.TestCheckResourceAttr(resourceName, "urls.1.pattern", "foo"), + ), + }, + { + Config: withRandomProject(projectKey, testAccMetricUpdate), + Check: resource.ComposeTestCheckFunc( + testAccCheckProjectExists("launchdarkly_project.test"), + testAccCheckMetricExists(resourceName), + resource.TestCheckResourceAttr(resourceName, NAME, "Basic updated Metric"), + resource.TestCheckResourceAttr(resourceName, KEY, "basic-metric"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, KIND, "pageview"), + resource.TestCheckResourceAttr(resourceName, "urls.0.kind", "substring"), + resource.TestCheckResourceAttr(resourceName, "urls.0.substring", "bar"), + resource.TestCheckResourceAttr(resourceName, "urls.1.kind", "regex"), + resource.TestCheckResourceAttr(resourceName, "urls.1.pattern", "bar"), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckMetricExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("not found: %s", resourceName) + } + metricKey, ok := rs.Primary.Attributes[KEY] + if !ok { + return fmt.Errorf("metric key not found: %s", resourceName) + } + projKey, ok := rs.Primary.Attributes[PROJECT_KEY] + if !ok { + return fmt.Errorf("project key not found: %s", resourceName) + } + client := testAccProvider.Meta().(*Client) + _, _, err := client.ld.MetricsApi.GetMetric(client.ctx, projKey, metricKey).Execute() + if err != nil { + return fmt.Errorf("received an error getting metric. %s", err) + } + return nil + } +} diff --git a/launchdarkly/resource_launchdarkly_project.go b/launchdarkly/resource_launchdarkly_project.go index 20deb29a..8a479cbb 100644 --- a/launchdarkly/resource_launchdarkly_project.go +++ b/launchdarkly/resource_launchdarkly_project.go @@ -4,8 +4,8 @@ import ( "context" "fmt" "log" - "net/http" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ldapi "github.com/launchdarkly/api-client-go/v7" ) @@ -32,11 +32,17 @@ func customizeProjectDiff(ctx context.Context, diff *schema.ResourceDiff, v inte // AND the customer removes the INCLUDE_IN_SNIPPET key from the config without replacing with defaultCSA // The read would assume no changes are needed, HOWEVER we need to jump back to LD set defaults // Hence the setting below - diff.SetNew(INCLUDE_IN_SNIPPET, false) - diff.SetNew(CLIENT_SIDE_AVAILABILITY, []map[string]interface{}{{ + err := diff.SetNew(INCLUDE_IN_SNIPPET, false) + if err != nil { + return err + } + err = diff.SetNew(DEFAULT_CLIENT_SIDE_AVAILABILITY, []map[string]interface{}{{ USING_ENVIRONMENT_ID: false, USING_MOBILE_KEY: true, }}) + if err != nil { + return err + } } @@ -46,11 +52,11 @@ func customizeProjectDiff(ctx context.Context, diff *schema.ResourceDiff, v inte } func resourceProject() *schema.Resource { return &schema.Resource{ - Create: resourceProjectCreate, - Read: resourceProjectRead, - Update: resourceProjectUpdate, - Delete: resourceProjectDelete, - Exists: resourceProjectExists, + CreateContext: resourceProjectCreate, + ReadContext: resourceProjectRead, + UpdateContext: resourceProjectUpdate, + DeleteContext: resourceProjectDelete, + Exists: resourceProjectExists, CustomizeDiff: customizeProjectDiff, @@ -64,7 +70,7 @@ func resourceProject() *schema.Resource { Required: true, Description: "The project's unique key", ForceNew: true, - ValidateFunc: validateKey(), + ValidateFunc: validateKeyAndLength(1, 20), }, NAME: { Type: schema.TypeString, @@ -116,7 +122,8 @@ func resourceProject() *schema.Resource { } } -func resourceProjectCreate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceProjectCreate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics client := metaRaw.(*Client) projectKey := d.Get(KEY).(string) name := d.Get(NAME).(string) @@ -132,26 +139,28 @@ func resourceProjectCreate(d *schema.ResourceData, metaRaw interface{}) error { projectBody.Environments = &envs } - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.ProjectsApi.PostProject(client.ctx).ProjectPost(projectBody).Execute() - }) + _, _, err := client.ld.ProjectsApi.PostProject(client.ctx).ProjectPost(projectBody).Execute() if err != nil { - return fmt.Errorf("failed to create project with name %s and projectKey %s: %v", name, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to create project with name %s and projectKey %s: %v", name, projectKey, handleLdapiErr(err)) } // ld's api does not allow tags to be passed in during project creation so we do an update - err = resourceProjectUpdate(d, metaRaw) - if err != nil { - return fmt.Errorf("failed to update project with name %s and projectKey %s: %v", name, projectKey, err) + updateDiags := resourceProjectUpdate(ctx, d, metaRaw) + if updateDiags.HasError() { + updateDiags = append(updateDiags, diag.Diagnostic{ + Severity: diag.Error, + Summary: fmt.Sprintf("failed to update project with name %s and projectKey %s: %v", name, projectKey, err), + }) + return updateDiags } - return nil + return diags } -func resourceProjectRead(d *schema.ResourceData, metaRaw interface{}) error { - return projectRead(d, metaRaw, false) +func resourceProjectRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + return projectRead(ctx, d, metaRaw, false) } -func resourceProjectUpdate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceProjectUpdate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) projectKey := d.Get(KEY).(string) projName := d.Get(NAME) @@ -162,6 +171,7 @@ func resourceProjectUpdate(d *schema.ResourceData, metaRaw interface{}) error { clientSideHasChange := d.HasChange(DEFAULT_CLIENT_SIDE_AVAILABILITY) // GetOkExists is 'deprecated', but needed as optional booleans set to false return a 'false' ok value from GetOk // Also not really deprecated as they are keeping it around pending a replacement https://github.com/hashicorp/terraform-plugin-sdk/pull/350#issuecomment-597888969 + //nolint:staticcheck // SA1019 _, includeInSnippetOk := d.GetOkExists(INCLUDE_IN_SNIPPET) _, clientSideAvailabilityOk := d.GetOk(DEFAULT_CLIENT_SIDE_AVAILABILITY) defaultClientSideAvailability := &ldapi.ClientSideAvailabilityPost{ @@ -190,24 +200,17 @@ func resourceProjectUpdate(d *schema.ResourceData, metaRaw interface{}) error { })) } - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict(func() (interface{}, *http.Response, error) { - return client.ld.ProjectsApi.PatchProject(client.ctx, projectKey).PatchOperation(patch).Execute() - }) - }) + _, _, err := client.ld.ProjectsApi.PatchProject(client.ctx, projectKey).PatchOperation(patch).Execute() if err != nil { - return fmt.Errorf("failed to update project with key %q: %s", projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to update project with key %q: %s", projectKey, handleLdapiErr(err)) } // Update environments if necessary oldSchemaEnvList, newSchemaEnvList := d.GetChange(ENVIRONMENTS) // Get the project so we can see if we need to create any environments or just update existing environments - rawProject, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.ProjectsApi.GetProject(client.ctx, projectKey).Execute() - }) + project, _, err := client.ld.ProjectsApi.GetProject(client.ctx, projectKey).Execute() if err != nil { - return fmt.Errorf("failed to load project %q before updating environments: %s", projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to load project %q before updating environments: %s", projectKey, handleLdapiErr(err)) } - project := rawProject.(ldapi.Project) environmentConfigs := newSchemaEnvList.([]interface{}) oldEnvironmentConfigs := oldSchemaEnvList.([]interface{}) @@ -228,11 +231,9 @@ func resourceProjectUpdate(d *schema.ResourceData, metaRaw interface{}) error { exists := environmentExistsInProject(project, envKey) if !exists { envPost := environmentPostFromResourceData(env) - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.EnvironmentsApi.PostEnvironment(client.ctx, projectKey).EnvironmentPost(envPost).Execute() - }) + _, _, err := client.ld.EnvironmentsApi.PostEnvironment(client.ctx, projectKey).EnvironmentPost(envPost).Execute() if err != nil { - return fmt.Errorf("failed to create environment %q in project %q: %s", envKey, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to create environment %q in project %q: %s", envKey, projectKey, handleLdapiErr(err)) } } @@ -243,15 +244,11 @@ func resourceProjectUpdate(d *schema.ResourceData, metaRaw interface{}) error { // by default patching an env that was not recently tracked in the state will import it into the tf state patch, err := getEnvironmentUpdatePatches(oldEnvConfig, envConfig) if err != nil { - return err + return diag.FromErr(err) } - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict(func() (interface{}, *http.Response, error) { - return client.ld.EnvironmentsApi.PatchEnvironment(client.ctx, projectKey, envKey).PatchOperation(patch).Execute() - }) - }) + _, _, err = client.ld.EnvironmentsApi.PatchEnvironment(client.ctx, projectKey, envKey).PatchOperation(patch).Execute() if err != nil { - return fmt.Errorf("failed to update environment with key %q for project: %q: %+v", envKey, projectKey, err) + return diag.Errorf("failed to update environment with key %q for project: %q: %+v", envKey, projectKey, err) } } // we also want to delete environments that were previously tracked in state and have been removed from the config @@ -261,33 +258,28 @@ func resourceProjectUpdate(d *schema.ResourceData, metaRaw interface{}) error { envConfig := env.(map[string]interface{}) envKey := envConfig[KEY].(string) if _, persists := envConfigsForCompare[envKey]; !persists { - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - res, err := client.ld.EnvironmentsApi.DeleteEnvironment(client.ctx, projectKey, envKey).Execute() - return nil, res, err - }) + _, err = client.ld.EnvironmentsApi.DeleteEnvironment(client.ctx, projectKey, envKey).Execute() if err != nil { - return fmt.Errorf("failed to delete environment %q in project %q: %s", envKey, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to delete environment %q in project %q: %s", envKey, projectKey, handleLdapiErr(err)) } } } - return resourceProjectRead(d, metaRaw) + return resourceProjectRead(ctx, d, metaRaw) } -func resourceProjectDelete(d *schema.ResourceData, metaRaw interface{}) error { +func resourceProjectDelete(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) projectKey := d.Get(KEY).(string) - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - res, err := client.ld.ProjectsApi.DeleteProject(client.ctx, projectKey).Execute() - return nil, res, err - }) - + _, err := client.ld.ProjectsApi.DeleteProject(client.ctx, projectKey).Execute() if err != nil { - return fmt.Errorf("failed to delete project with key %q: %s", projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to delete project with key %q: %s", projectKey, handleLdapiErr(err)) } - return nil + return diags } func resourceProjectExists(d *schema.ResourceData, metaRaw interface{}) (bool, error) { @@ -295,9 +287,7 @@ func resourceProjectExists(d *schema.ResourceData, metaRaw interface{}) (bool, e } func projectExists(projectKey string, meta *Client) (bool, error) { - _, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return meta.ld.ProjectsApi.GetProject(meta.ctx, projectKey).Execute() - }) + _, res, err := meta.ld.ProjectsApi.GetProject(meta.ctx, projectKey).Execute() if isStatusNotFound(res) { log.Println("got 404 when getting project. returning false.") return false, nil diff --git a/launchdarkly/resource_launchdarkly_project_test.go b/launchdarkly/resource_launchdarkly_project_test.go index 24d3c44d..581297b4 100644 --- a/launchdarkly/resource_launchdarkly_project_test.go +++ b/launchdarkly/resource_launchdarkly_project_test.go @@ -166,8 +166,8 @@ func TestAccProject_Create(t *testing.T) { Config: fmt.Sprintf(testAccProjectCreate, projectKey), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", projectKey), - resource.TestCheckResourceAttr(resourceName, "name", "test project"), + resource.TestCheckResourceAttr(resourceName, KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "test project"), resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), resource.TestCheckResourceAttr(resourceName, "tags.1", "test"), @@ -195,10 +195,10 @@ func TestAccProject_Update(t *testing.T) { Config: fmt.Sprintf(testAccProjectCreate, projectKey), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", projectKey), - resource.TestCheckResourceAttr(resourceName, "name", "test project"), + resource.TestCheckResourceAttr(resourceName, KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "test project"), resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "false"), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "false"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), resource.TestCheckResourceAttr(resourceName, "tags.1", "test"), resource.TestCheckResourceAttr(resourceName, "environments.#", "1"), @@ -211,9 +211,9 @@ func TestAccProject_Update(t *testing.T) { Config: fmt.Sprintf(testAccProjectUpdate, projectKey), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", projectKey), - resource.TestCheckResourceAttr(resourceName, "name", "awesome test project"), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "true"), + resource.TestCheckResourceAttr(resourceName, KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "awesome test project"), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "true"), resource.TestCheckResourceAttr(resourceName, "tags.#", "1"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), resource.TestCheckResourceAttr(resourceName, "environments.#", "1"), @@ -226,11 +226,11 @@ func TestAccProject_Update(t *testing.T) { Config: fmt.Sprintf(testAccProjectUpdateRemoveOptional, projectKey), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", projectKey), - resource.TestCheckResourceAttr(resourceName, "name", "awesome test project"), + resource.TestCheckResourceAttr(resourceName, KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "awesome test project"), resource.TestCheckNoResourceAttr(resourceName, "tags"), resource.TestCheckNoResourceAttr(resourceName, "tags.#"), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "false"), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "false"), ), }, { @@ -255,9 +255,9 @@ func TestAccProject_CSA_Update_And_Revert(t *testing.T) { Config: fmt.Sprintf(testAccProjectCreate, projectKey), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", projectKey), - resource.TestCheckResourceAttr(resourceName, "name", "test project"), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "false"), + resource.TestCheckResourceAttr(resourceName, KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "test project"), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "false"), resource.TestCheckResourceAttr(resourceName, "default_client_side_availability.0.using_environment_id", "false"), resource.TestCheckResourceAttr(resourceName, "default_client_side_availability.0.using_mobile_key", "true"), ), @@ -266,9 +266,9 @@ func TestAccProject_CSA_Update_And_Revert(t *testing.T) { Config: fmt.Sprintf(testAccProjectClientSideAvailabilityTrue, projectKey), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", projectKey), - resource.TestCheckResourceAttr(resourceName, "name", "test project"), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "true"), + resource.TestCheckResourceAttr(resourceName, KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "test project"), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "true"), resource.TestCheckResourceAttr(resourceName, "default_client_side_availability.0.using_environment_id", "true"), resource.TestCheckResourceAttr(resourceName, "default_client_side_availability.0.using_mobile_key", "true"), ), @@ -277,9 +277,9 @@ func TestAccProject_CSA_Update_And_Revert(t *testing.T) { Config: fmt.Sprintf(testAccProjectUpdateRemoveOptional, projectKey), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", projectKey), - resource.TestCheckResourceAttr(resourceName, "name", "awesome test project"), - resource.TestCheckResourceAttr(resourceName, "include_in_snippet", "false"), + resource.TestCheckResourceAttr(resourceName, KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "awesome test project"), + resource.TestCheckResourceAttr(resourceName, INCLUDE_IN_SNIPPET, "false"), resource.TestCheckResourceAttr(resourceName, "default_client_side_availability.0.using_environment_id", "false"), resource.TestCheckResourceAttr(resourceName, "default_client_side_availability.0.using_mobile_key", "true"), ), @@ -306,8 +306,8 @@ func TestAccProject_WithEnvironments(t *testing.T) { Config: fmt.Sprintf(testAccProjectWithEnvironment, projectKey), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", projectKey), - resource.TestCheckResourceAttr(resourceName, "name", "test project"), + resource.TestCheckResourceAttr(resourceName, KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "test project"), resource.TestCheckResourceAttr(resourceName, "environments.#", "1"), resource.TestCheckResourceAttr(resourceName, "environments.0.name", "test environment"), resource.TestCheckResourceAttr(resourceName, "environments.0.tags.#", "2"), @@ -330,8 +330,8 @@ func TestAccProject_WithEnvironments(t *testing.T) { Config: fmt.Sprintf(testAccProjectWithEnvironmentUpdate, projectKey), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", projectKey), - resource.TestCheckResourceAttr(resourceName, "name", "test project"), + resource.TestCheckResourceAttr(resourceName, KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "test project"), resource.TestCheckResourceAttr(resourceName, "environments.#", "2"), // Check environment 0 was updated @@ -368,8 +368,8 @@ func TestAccProject_WithEnvironments(t *testing.T) { Config: fmt.Sprintf(testAccProjectWithEnvironmentUpdateApprovalSettings, projectKey), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", projectKey), - resource.TestCheckResourceAttr(resourceName, "name", "test project"), + resource.TestCheckResourceAttr(resourceName, KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "test project"), resource.TestCheckResourceAttr(resourceName, "environments.#", "2"), // Check approval_settings have updated as expected @@ -397,8 +397,8 @@ func TestAccProject_WithEnvironments(t *testing.T) { Config: fmt.Sprintf(testAccProjectWithEnvironmentUpdateRemove, projectKey), Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", projectKey), - resource.TestCheckResourceAttr(resourceName, "name", "test project"), + resource.TestCheckResourceAttr(resourceName, KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, NAME, "test project"), resource.TestCheckResourceAttr(resourceName, "environments.#", "1"), // Check that optional attributes defaulted back to false diff --git a/launchdarkly/resource_launchdarkly_relay_proxy_configuration.go b/launchdarkly/resource_launchdarkly_relay_proxy_configuration.go new file mode 100644 index 00000000..8c79813c --- /dev/null +++ b/launchdarkly/resource_launchdarkly_relay_proxy_configuration.go @@ -0,0 +1,154 @@ +package launchdarkly + +import ( + "context" + "log" + + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + ldapi "github.com/launchdarkly/api-client-go/v7" +) + +func resourceRelayProxyConfig() *schema.Resource { + return &schema.Resource{ + CreateContext: relayProxyConfigCreate, + ReadContext: resourceRelayProxyConfigRead, + UpdateContext: relayProxyConfigUpdate, + DeleteContext: relayProxyConfigDelete, + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + + Schema: map[string]*schema.Schema{ + NAME: { + Type: schema.TypeString, + Required: true, + Description: "A human-friendly name for the Relay Proxy configuration", + }, + POLICY: policyStatementsSchema(policyStatementSchemaOptions{required: true}), + FULL_KEY: { + Type: schema.TypeString, + Sensitive: true, + Computed: true, + Description: "The unique key assigned to the Relay Proxy configuration during creation.", + }, + DISPLAY_KEY: { + Type: schema.TypeString, + Computed: true, + Description: "The last four characters of the full_key.", + }, + }, + } +} + +func relayProxyConfigCreate(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + client := m.(*Client) + + name := d.Get(NAME).(string) + policy, err := policyStatementsFromResourceData(d.Get(POLICY).([]interface{})) + if err != nil { + return diag.FromErr(err) + } + post := ldapi.RelayAutoConfigPost{ + Name: name, + Policy: statementPostsToStatementReps(policy), + } + + proxyConfig, _, err := client.ld.RelayProxyConfigurationsApi.PostRelayAutoConfig(client.ctx).RelayAutoConfigPost(post).Execute() + if err != nil { + return diag.Errorf("failed to create Relay Proxy configuration with name %q: %s", name, handleLdapiErr(err)) + } + + d.SetId(proxyConfig.Id) + + // We only have the valid FULL_KEY immediately after creation. + err = d.Set(FULL_KEY, proxyConfig.FullKey) + if err != nil { + return diag.FromErr(err) + } + + return resourceRelayProxyConfigRead(ctx, d, m) +} + +func resourceRelayProxyConfigRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + return relayProxyConfigRead(ctx, d, m, false) +} + +func relayProxyConfigRead(ctx context.Context, d *schema.ResourceData, m interface{}, isDataSource bool) diag.Diagnostics { + var diags diag.Diagnostics + client := m.(*Client) + + id := d.Id() + proxyConfig, res, err := client.ld.RelayProxyConfigurationsApi.GetRelayProxyConfig(client.ctx, id).Execute() + if isStatusNotFound(res) { + if isDataSource { + return diag.Errorf("Relay Proxy configuration with id %q not found.", id) + } + log.Printf("[DEBUG] Relay Proxy configuration with id %q not found on LaunchDarkly. Removing from state", id) + d.SetId("") + return diags + } + if err != nil { + return diag.Errorf("failed to get Relay Proxy configuration with id %q", id) + } + d.SetId(proxyConfig.Id) + + err = d.Set(NAME, proxyConfig.Name) + if err != nil { + return diag.FromErr(err) + } + + err = d.Set(POLICY, policyStatementsToResourceData(proxyConfig.Policy)) + if err != nil { + return diag.FromErr(err) + } + + err = d.Set(DISPLAY_KEY, proxyConfig.DisplayKey) + if err != nil { + return diag.FromErr(err) + } + + return diags +} + +func relayProxyConfigUpdate(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := m.(*Client) + + id := d.Id() + name := d.Get(NAME).(string) + policy, err := policyStatementsFromResourceData(d.Get(POLICY).([]interface{})) + if err != nil { + return diag.FromErr(err) + } + + patch := []ldapi.PatchOperation{ + patchReplace("/name", &name), + patchReplace("/policy", &policy), + } + + patchWithComment := ldapi.PatchWithComment{ + Patch: patch, + Comment: ldapi.PtrString("Terraform"), + } + + _, _, err = client.ld.RelayProxyConfigurationsApi.PatchRelayAutoConfig(client.ctx, id).PatchWithComment(patchWithComment).Execute() + if err != nil { + return diag.Errorf("failed to update relay proxy configuration with id: %q: %s", id, handleLdapiErr(err)) + } + + return diags +} + +func relayProxyConfigDelete(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := m.(*Client) + + id := d.Id() + _, err := client.ld.RelayProxyConfigurationsApi.DeleteRelayAutoConfig(client.ctx, id).Execute() + if err != nil { + return diag.Errorf("failed to delete relay proxy configuration with id: %q: %s", id, handleLdapiErr(err)) + } + + return diags +} diff --git a/launchdarkly/resource_launchdarkly_relay_proxy_configuration_test.go b/launchdarkly/resource_launchdarkly_relay_proxy_configuration_test.go new file mode 100644 index 00000000..1bf52b97 --- /dev/null +++ b/launchdarkly/resource_launchdarkly_relay_proxy_configuration_test.go @@ -0,0 +1,140 @@ +package launchdarkly + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +const ( + testAccRelayProxyConfigCreate = ` +resource "launchdarkly_relay_proxy_configuration" "test" { + name = "example-config" + policy { + actions = ["*"] + effect = "allow" + resources = ["proj/*:env/*"] + } +} +` + + testAccRelayProxyConfigUpdate = ` +resource "launchdarkly_relay_proxy_configuration" "test" { + name = "updated-config" + policy { + not_actions = ["*"] + effect = "deny" + not_resources = ["proj/*:env/test"] + } +} +` +) + +func getRelayProxyConfigImportStep(resourceName string) resource.TestStep { + return resource.TestStep{ + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + // Because the FULL_KEY is only revealed when the config is created we will never be able to import it + ImportStateVerifyIgnore: []string{FULL_KEY}, + } +} + +func TestAccRelayProxyConfig_Create(t *testing.T) { + resourceName := "launchdarkly_relay_proxy_configuration.test" + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccRelayProxyConfigCreate, + Check: resource.ComposeTestCheckFunc( + testAccCheckRelayProxyConfigExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "name", "example-config"), + resource.TestCheckResourceAttrSet(resourceName, "full_key"), + resource.TestCheckResourceAttrSet(resourceName, "display_key"), + resource.TestCheckResourceAttr(resourceName, "policy.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.actions.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.actions.0", "*"), + resource.TestCheckResourceAttr(resourceName, "policy.0.effect", "allow"), + resource.TestCheckResourceAttr(resourceName, "policy.0.resources.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.resources.0", "proj/*:env/*"), + ), + }, + getRelayProxyConfigImportStep(resourceName), + }, + }, + ) +} + +func TestAccRelayProxyConfig_Update(t *testing.T) { + resourceName := "launchdarkly_relay_proxy_configuration.test" + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { + testAccPreCheck(t) + }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccRelayProxyConfigCreate, + Check: resource.ComposeTestCheckFunc( + testAccCheckRelayProxyConfigExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "name", "example-config"), + resource.TestCheckResourceAttrSet(resourceName, "full_key"), + resource.TestCheckResourceAttrSet(resourceName, "display_key"), + resource.TestCheckResourceAttr(resourceName, "policy.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.actions.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.actions.0", "*"), + resource.TestCheckResourceAttr(resourceName, "policy.0.effect", "allow"), + resource.TestCheckResourceAttr(resourceName, "policy.0.resources.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.resources.0", "proj/*:env/*"), + ), + }, + getRelayProxyConfigImportStep(resourceName), + { + Config: testAccRelayProxyConfigUpdate, + Check: resource.ComposeTestCheckFunc( + testAccCheckRelayProxyConfigExists(resourceName), + resource.TestCheckResourceAttr(resourceName, "name", "updated-config"), + resource.TestCheckResourceAttrSet(resourceName, "full_key"), + resource.TestCheckResourceAttrSet(resourceName, "display_key"), + resource.TestCheckResourceAttr(resourceName, "policy.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.actions.#", "0"), + resource.TestCheckResourceAttr(resourceName, "policy.0.not_actions.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.not_actions.0", "*"), + resource.TestCheckResourceAttr(resourceName, "policy.0.effect", "deny"), + resource.TestCheckResourceAttr(resourceName, "policy.0.not_resources.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.not_resources.#", "1"), + resource.TestCheckResourceAttr(resourceName, "policy.0.not_resources.0", "proj/*:env/test"), + ), + }, + getRelayProxyConfigImportStep(resourceName), + }, + }, + ) +} + +func testAccCheckRelayProxyConfigExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("not found: %s", resourceName) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("webhook ID is not set") + } + + client := testAccProvider.Meta().(*Client) + _, _, err := client.ld.RelayProxyConfigurationsApi.GetRelayProxyConfig(client.ctx, rs.Primary.ID).Execute() + if err != nil { + return fmt.Errorf("received an error getting relay proxy config: %w", err) + } + + return nil + } +} diff --git a/launchdarkly/resource_launchdarkly_segment.go b/launchdarkly/resource_launchdarkly_segment.go index d8a3bd2d..98c376a9 100644 --- a/launchdarkly/resource_launchdarkly_segment.go +++ b/launchdarkly/resource_launchdarkly_segment.go @@ -1,10 +1,11 @@ package launchdarkly import ( + "context" "fmt" - "net/http" "strings" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ldapi "github.com/launchdarkly/api-client-go/v7" ) @@ -12,25 +13,25 @@ import ( func resourceSegment() *schema.Resource { schemaMap := baseSegmentSchema() schemaMap[PROJECT_KEY] = &schema.Schema{ - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validateKey(), - Description: "The segment's project key.", + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateDiagFunc: validateKey(), + Description: "The segment's project key.", } schemaMap[ENV_KEY] = &schema.Schema{ - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validateKey(), - Description: "The segment's environment key.", + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateDiagFunc: validateKey(), + Description: "The segment's environment key.", } schemaMap[KEY] = &schema.Schema{ - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validateKey(), - Description: "The unique key that references the segment.", + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateDiagFunc: validateKey(), + Description: "The unique key that references the segment.", } schemaMap[NAME] = &schema.Schema{ Type: schema.TypeString, @@ -38,11 +39,11 @@ func resourceSegment() *schema.Resource { Description: "The human-friendly name for the segment.", } return &schema.Resource{ - Create: resourceSegmentCreate, - Read: resourceSegmentRead, - Update: resourceSegmentUpdate, - Delete: resourceSegmentDelete, - Exists: resourceSegmentExists, + CreateContext: resourceSegmentCreate, + ReadContext: resourceSegmentRead, + UpdateContext: resourceSegmentUpdate, + DeleteContext: resourceSegmentDelete, + Exists: resourceSegmentExists, Importer: &schema.ResourceImporter{ State: resourceSegmentImport, @@ -52,7 +53,7 @@ func resourceSegment() *schema.Resource { } } -func resourceSegmentCreate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceSegmentCreate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) projectKey := d.Get(PROJECT_KEY).(string) envKey := d.Get(ENV_KEY).(string) @@ -69,31 +70,30 @@ func resourceSegmentCreate(d *schema.ResourceData, metaRaw interface{}) error { Tags: &tags, } - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.SegmentsApi.PostSegment(client.ctx, projectKey, envKey).SegmentBody(segment).Execute() - }) - + _, _, err := client.ld.SegmentsApi.PostSegment(client.ctx, projectKey, envKey).SegmentBody(segment).Execute() if err != nil { - return fmt.Errorf("failed to create segment %q in project %q: %s", key, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to create segment %q in project %q: %s", key, projectKey, handleLdapiErr(err)) } // ld's api does not allow some fields to be passed in during segment creation so we do an update: // https://apidocs.launchdarkly.com/reference#create-segment - err = resourceSegmentUpdate(d, metaRaw) - if err != nil { - return fmt.Errorf("failed to update segment with name %q key %q for projectKey %q: %s", - segmentName, key, projectKey, handleLdapiErr(err)) + updateDiags := resourceSegmentUpdate(ctx, d, metaRaw) + if updateDiags.HasError() { + // TODO: Figure out if we can get the err out of updateDiag (not looking likely) to use in hanldeLdapiErr + return updateDiags + // return diag.Errorf("failed to update segment with name %q key %q for projectKey %q: %s", + // segmentName, key, projectKey, handleLdapiErr(errs)) } d.SetId(projectKey + "/" + envKey + "/" + key) - return resourceSegmentRead(d, metaRaw) + return resourceSegmentRead(ctx, d, metaRaw) } -func resourceSegmentRead(d *schema.ResourceData, metaRaw interface{}) error { - return segmentRead(d, metaRaw, false) +func resourceSegmentRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + return segmentRead(ctx, d, metaRaw, false) } -func resourceSegmentUpdate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceSegmentUpdate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) key := d.Get(KEY).(string) projectKey := d.Get(PROJECT_KEY).(string) @@ -105,7 +105,7 @@ func resourceSegmentUpdate(d *schema.ResourceData, metaRaw interface{}) error { excluded := d.Get(EXCLUDED).([]interface{}) rules, err := segmentRulesFromResourceData(d, RULES) if err != nil { - return err + return diag.FromErr(err) } comment := "Terraform" patch := ldapi.PatchWithComment{ @@ -120,34 +120,28 @@ func resourceSegmentUpdate(d *schema.ResourceData, metaRaw interface{}) error { patchReplace("/rules", rules), }} - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict(func() (interface{}, *http.Response, error) { - return client.ld.SegmentsApi.PatchSegment(client.ctx, projectKey, envKey, key).PatchWithComment(patch).Execute() - }) - }) + _, _, err = client.ld.SegmentsApi.PatchSegment(client.ctx, projectKey, envKey, key).PatchWithComment(patch).Execute() if err != nil { - return fmt.Errorf("failed to update segment %q in project %q: %s", key, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to update segment %q in project %q: %s", key, projectKey, handleLdapiErr(err)) } - return resourceSegmentRead(d, metaRaw) + return resourceSegmentRead(ctx, d, metaRaw) } -func resourceSegmentDelete(d *schema.ResourceData, metaRaw interface{}) error { +func resourceSegmentDelete(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) projectKey := d.Get(PROJECT_KEY).(string) envKey := d.Get(ENV_KEY).(string) key := d.Get(KEY).(string) - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - res, err := client.ld.SegmentsApi.DeleteSegment(client.ctx, projectKey, envKey, key).Execute() - return nil, res, err - }) - + _, err := client.ld.SegmentsApi.DeleteSegment(client.ctx, projectKey, envKey, key).Execute() if err != nil { - return fmt.Errorf("failed to delete segment %q from project %q: %s", key, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to delete segment %q from project %q: %s", key, projectKey, handleLdapiErr(err)) } - return nil + return diags } func resourceSegmentExists(d *schema.ResourceData, metaRaw interface{}) (bool, error) { @@ -156,9 +150,7 @@ func resourceSegmentExists(d *schema.ResourceData, metaRaw interface{}) (bool, e envKey := d.Get(ENV_KEY).(string) key := d.Get(KEY).(string) - _, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.SegmentsApi.GetSegment(client.ctx, projectKey, envKey, key).Execute() - }) + _, res, err := client.ld.SegmentsApi.GetSegment(client.ctx, projectKey, envKey, key).Execute() if isStatusNotFound(res) { return false, nil } diff --git a/launchdarkly/resource_launchdarkly_segment_test.go b/launchdarkly/resource_launchdarkly_segment_test.go index 4db7d684..ac52de8a 100644 --- a/launchdarkly/resource_launchdarkly_segment_test.go +++ b/launchdarkly/resource_launchdarkly_segment_test.go @@ -97,11 +97,11 @@ func TestAccSegment_Create(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckSegmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", "segmentKey1"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "segment name"), - resource.TestCheckResourceAttr(resourceName, "description", "segment description"), + resource.TestCheckResourceAttr(resourceName, KEY, "segmentKey1"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "segment name"), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "segment description"), resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), resource.TestCheckResourceAttr(resourceName, "tags.0", "segmentTag1"), resource.TestCheckResourceAttr(resourceName, "tags.1", "segmentTag2"), @@ -111,7 +111,7 @@ func TestAccSegment_Create(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "excluded.#", "2"), resource.TestCheckResourceAttr(resourceName, "excluded.0", "user3"), resource.TestCheckResourceAttr(resourceName, "excluded.1", "user4"), - resource.TestCheckResourceAttrSet(resourceName, "creation_date"), + resource.TestCheckResourceAttrSet(resourceName, CREATION_DATE), ), }, { @@ -137,11 +137,11 @@ func TestAccSegment_Update(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckSegmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", "segmentKey1"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "segment name"), - resource.TestCheckResourceAttr(resourceName, "description", "segment description"), + resource.TestCheckResourceAttr(resourceName, KEY, "segmentKey1"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "segment name"), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "segment description"), resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), resource.TestCheckResourceAttr(resourceName, "tags.0", "segmentTag1"), resource.TestCheckResourceAttr(resourceName, "tags.1", "segmentTag2"), @@ -158,11 +158,11 @@ func TestAccSegment_Update(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckSegmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", "segmentKey1"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "segment name"), - resource.TestCheckResourceAttr(resourceName, "description", "segment description"), + resource.TestCheckResourceAttr(resourceName, KEY, "segmentKey1"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "segment name"), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "segment description"), resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), resource.TestCheckResourceAttr(resourceName, "tags.0", ".segmentTag2"), resource.TestCheckResourceAttr(resourceName, "tags.1", "segmentTag1"), @@ -194,11 +194,11 @@ func TestAccSegment_Update(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckSegmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", "segmentKey1"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "segment name"), - resource.TestCheckResourceAttr(resourceName, "description", "segment description"), + resource.TestCheckResourceAttr(resourceName, KEY, "segmentKey1"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "segment name"), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "segment description"), resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), resource.TestCheckResourceAttr(resourceName, "tags.0", "segmentTag1"), resource.TestCheckResourceAttr(resourceName, "tags.1", "segmentTag2"), @@ -208,7 +208,7 @@ func TestAccSegment_Update(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "excluded.#", "2"), resource.TestCheckResourceAttr(resourceName, "excluded.0", "user3"), resource.TestCheckResourceAttr(resourceName, "excluded.1", "user4"), - resource.TestCheckNoResourceAttr(resourceName, "rules"), + resource.TestCheckNoResourceAttr(resourceName, RULES), ), }, { @@ -234,11 +234,11 @@ func TestAccSegment_WithRules(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckProjectExists("launchdarkly_project.test"), testAccCheckSegmentExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "key", "segmentKey1"), - resource.TestCheckResourceAttr(resourceName, "project_key", projectKey), - resource.TestCheckResourceAttr(resourceName, "env_key", "test"), - resource.TestCheckResourceAttr(resourceName, "name", "segment name"), - resource.TestCheckResourceAttr(resourceName, "description", "segment description"), + resource.TestCheckResourceAttr(resourceName, KEY, "segmentKey1"), + resource.TestCheckResourceAttr(resourceName, PROJECT_KEY, projectKey), + resource.TestCheckResourceAttr(resourceName, ENV_KEY, "test"), + resource.TestCheckResourceAttr(resourceName, NAME, "segment name"), + resource.TestCheckResourceAttr(resourceName, DESCRIPTION, "segment description"), resource.TestCheckResourceAttr(resourceName, "rules.#", "2"), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.#", "1"), resource.TestCheckResourceAttr(resourceName, "rules.0.clauses.0.attribute", "test_att"), diff --git a/launchdarkly/resource_launchdarkly_team_member.go b/launchdarkly/resource_launchdarkly_team_member.go index 9fd93b15..810b9a96 100644 --- a/launchdarkly/resource_launchdarkly_team_member.go +++ b/launchdarkly/resource_launchdarkly_team_member.go @@ -1,10 +1,11 @@ package launchdarkly import ( + "context" "fmt" "log" - "net/http" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" @@ -13,11 +14,11 @@ import ( func resourceTeamMember() *schema.Resource { return &schema.Resource{ - Create: resourceTeamMemberCreate, - Read: resourceTeamMemberRead, - Update: resourceTeamMemberUpdate, - Delete: resourceTeamMemberDelete, - Exists: resourceTeamMemberExists, + CreateContext: resourceTeamMemberCreate, + ReadContext: resourceTeamMemberRead, + UpdateContext: resourceTeamMemberUpdate, + DeleteContext: resourceTeamMemberDelete, + Exists: resourceTeamMemberExists, Importer: &schema.ResourceImporter{ StateContext: schema.ImportStatePassthroughContext, @@ -41,12 +42,12 @@ func resourceTeamMember() *schema.Resource { Description: "The team member's last name", }, ROLE: { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: "The team member's role. This must be reader, writer, admin, or owner. Team members must have either a role or custom role", - ValidateFunc: validation.StringInSlice([]string{"reader", "writer", "admin"}, false), - AtLeastOneOf: []string{ROLE, CUSTOM_ROLES}, + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: "The team member's role. This must be reader, writer, admin, or owner. Team members must have either a role or custom role", + ValidateDiagFunc: validation.ToDiagFunc(validation.StringInSlice([]string{"reader", "writer", "admin"}, false)), + AtLeastOneOf: []string{ROLE, CUSTOM_ROLES}, }, CUSTOM_ROLES: { Type: schema.TypeSet, @@ -60,7 +61,7 @@ func resourceTeamMember() *schema.Resource { } } -func resourceTeamMemberCreate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceTeamMemberCreate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) memberEmail := d.Get(EMAIL).(string) firstName := d.Get(FIRST_NAME).(string) @@ -81,33 +82,33 @@ func resourceTeamMemberCreate(d *schema.ResourceData, metaRaw interface{}) error CustomRoles: &customRoles, } - membersRaw, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.AccountMembersApi.PostMembers(client.ctx).NewMemberForm([]ldapi.NewMemberForm{membersBody}).Execute() - }) - members := membersRaw.(ldapi.Members) + members, _, err := client.ld.AccountMembersApi.PostMembers(client.ctx).NewMemberForm([]ldapi.NewMemberForm{membersBody}).Execute() if err != nil { - return fmt.Errorf("failed to create team member with email: %s: %v", memberEmail, handleLdapiErr(err)) + return diag.Errorf("failed to create team member with email: %s: %v", memberEmail, handleLdapiErr(err)) } d.SetId(members.Items[0].Id) - return resourceTeamMemberRead(d, metaRaw) + return resourceTeamMemberRead(ctx, d, metaRaw) } -func resourceTeamMemberRead(d *schema.ResourceData, metaRaw interface{}) error { +func resourceTeamMemberRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) memberID := d.Id() - memberRaw, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.AccountMembersApi.GetMember(client.ctx, memberID).Execute() - }) - member := memberRaw.(ldapi.Member) + member, res, err := client.ld.AccountMembersApi.GetMember(client.ctx, memberID).Execute() if isStatusNotFound(res) { log.Printf("[WARN] failed to find member with id %q, removing from state", memberID) + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("[WARN] failed to find member with id %q, removing from state", memberID), + }) d.SetId("") - return nil + return diags } if err != nil { - return fmt.Errorf("failed to get member with id %q: %v", memberID, err) + return diag.Errorf("failed to get member with id %q: %v", memberID, err) } d.SetId(member.Id) @@ -118,16 +119,16 @@ func resourceTeamMemberRead(d *schema.ResourceData, metaRaw interface{}) error { customRoleKeys, err := customRoleIDsToKeys(client, member.CustomRoles) if err != nil { - return err + return diag.FromErr(err) } err = d.Set(CUSTOM_ROLES, customRoleKeys) if err != nil { - return fmt.Errorf("failed to set custom roles on team member with id %q: %v", member.Id, err) + return diag.Errorf("failed to set custom roles on team member with id %q: %v", member.Id, err) } - return nil + return diags } -func resourceTeamMemberUpdate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceTeamMemberUpdate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) memberID := d.Id() memberRole := d.Get(ROLE).(string) @@ -139,7 +140,7 @@ func resourceTeamMemberUpdate(d *schema.ResourceData, metaRaw interface{}) error } customRoleIds, err := customRoleKeysToIDs(client, customRoleKeys) if err != nil { - return err + return diag.FromErr(err) } patch := []ldapi.PatchOperation{ @@ -148,30 +149,25 @@ func resourceTeamMemberUpdate(d *schema.ResourceData, metaRaw interface{}) error patchReplace("/customRoles", &customRoleIds), } - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict(func() (interface{}, *http.Response, error) { - return client.ld.AccountMembersApi.PatchMember(client.ctx, memberID).PatchOperation(patch).Execute() - }) - }) + _, _, err = client.ld.AccountMembersApi.PatchMember(client.ctx, memberID).PatchOperation(patch).Execute() if err != nil { - return fmt.Errorf("failed to update team member with id %q: %s", memberID, handleLdapiErr(err)) + return diag.Errorf("failed to update team member with id %q: %s", memberID, handleLdapiErr(err)) } - return resourceTeamMemberRead(d, metaRaw) + return resourceTeamMemberRead(ctx, d, metaRaw) } -func resourceTeamMemberDelete(d *schema.ResourceData, metaRaw interface{}) error { +func resourceTeamMemberDelete(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - res, err := client.ld.AccountMembersApi.DeleteMember(client.ctx, d.Id()).Execute() - return nil, res, err - }) + _, err := client.ld.AccountMembersApi.DeleteMember(client.ctx, d.Id()).Execute() if err != nil { - return fmt.Errorf("failed to delete team member with id %q: %s", d.Id(), handleLdapiErr(err)) + return diag.Errorf("failed to delete team member with id %q: %s", d.Id(), handleLdapiErr(err)) } - return nil + return diags } func resourceTeamMemberExists(d *schema.ResourceData, metaRaw interface{}) (bool, error) { diff --git a/launchdarkly/resource_launchdarkly_team_member_test.go b/launchdarkly/resource_launchdarkly_team_member_test.go index 8bfec216..7dab9a29 100644 --- a/launchdarkly/resource_launchdarkly_team_member_test.go +++ b/launchdarkly/resource_launchdarkly_team_member_test.go @@ -93,10 +93,10 @@ func TestAccTeamMember_CreateGeneric(t *testing.T) { Config: fmt.Sprintf(testAccTeamMemberCreate, randomName), Check: resource.ComposeTestCheckFunc( testAccCheckMemberExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "email", fmt.Sprintf("%s@example.com", randomName)), - resource.TestCheckResourceAttr(resourceName, "first_name", "first"), - resource.TestCheckResourceAttr(resourceName, "last_name", "last"), - resource.TestCheckResourceAttr(resourceName, "role", "admin"), + resource.TestCheckResourceAttr(resourceName, EMAIL, fmt.Sprintf("%s@example.com", randomName)), + resource.TestCheckResourceAttr(resourceName, FIRST_NAME, "first"), + resource.TestCheckResourceAttr(resourceName, LAST_NAME, "last"), + resource.TestCheckResourceAttr(resourceName, ROLE, "admin"), resource.TestCheckResourceAttr(resourceName, "custom_roles.#", "0"), ), }, @@ -122,10 +122,10 @@ func TestAccTeamMember_UpdateGeneric(t *testing.T) { Config: fmt.Sprintf(testAccTeamMemberCreate, randomName), Check: resource.ComposeTestCheckFunc( testAccCheckMemberExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "email", fmt.Sprintf("%s@example.com", randomName)), - resource.TestCheckResourceAttr(resourceName, "first_name", "first"), - resource.TestCheckResourceAttr(resourceName, "last_name", "last"), - resource.TestCheckResourceAttr(resourceName, "role", "admin"), + resource.TestCheckResourceAttr(resourceName, EMAIL, fmt.Sprintf("%s@example.com", randomName)), + resource.TestCheckResourceAttr(resourceName, FIRST_NAME, "first"), + resource.TestCheckResourceAttr(resourceName, LAST_NAME, "last"), + resource.TestCheckResourceAttr(resourceName, ROLE, "admin"), resource.TestCheckResourceAttr(resourceName, "custom_roles.#", "0"), ), }, @@ -133,10 +133,10 @@ func TestAccTeamMember_UpdateGeneric(t *testing.T) { Config: fmt.Sprintf(testAccTeamMemberUpdate, randomName), Check: resource.ComposeTestCheckFunc( testAccCheckMemberExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "email", fmt.Sprintf("%s@example.com", randomName)), - resource.TestCheckResourceAttr(resourceName, "first_name", "first"), - resource.TestCheckResourceAttr(resourceName, "last_name", "last"), - resource.TestCheckResourceAttr(resourceName, "role", "writer"), + resource.TestCheckResourceAttr(resourceName, EMAIL, fmt.Sprintf("%s@example.com", randomName)), + resource.TestCheckResourceAttr(resourceName, FIRST_NAME, "first"), + resource.TestCheckResourceAttr(resourceName, LAST_NAME, "last"), + resource.TestCheckResourceAttr(resourceName, ROLE, "writer"), resource.TestCheckResourceAttr(resourceName, "custom_roles.#", "0"), ), }, @@ -160,9 +160,9 @@ func TestAccTeamMember_CreateWithCustomRole(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckCustomRoleExists(roleResourceName), testAccCheckMemberExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "email", fmt.Sprintf("%s@example.com", randomName)), - resource.TestCheckResourceAttr(resourceName, "first_name", "first"), - resource.TestCheckResourceAttr(resourceName, "last_name", "last"), + resource.TestCheckResourceAttr(resourceName, EMAIL, fmt.Sprintf("%s@example.com", randomName)), + resource.TestCheckResourceAttr(resourceName, FIRST_NAME, "first"), + resource.TestCheckResourceAttr(resourceName, LAST_NAME, "last"), resource.TestCheckResourceAttr(resourceName, "custom_roles.#", "1"), resource.TestCheckResourceAttr(resourceName, "custom_roles.0", roleKey), ), @@ -194,9 +194,9 @@ func TestAccTeamMember_UpdateWithCustomRole(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckCustomRoleExists(roleResourceName1), testAccCheckMemberExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "email", fmt.Sprintf("%s@example.com", randomName)), - resource.TestCheckResourceAttr(resourceName, "first_name", "first"), - resource.TestCheckResourceAttr(resourceName, "last_name", "last"), + resource.TestCheckResourceAttr(resourceName, EMAIL, fmt.Sprintf("%s@example.com", randomName)), + resource.TestCheckResourceAttr(resourceName, FIRST_NAME, "first"), + resource.TestCheckResourceAttr(resourceName, LAST_NAME, "last"), resource.TestCheckResourceAttr(resourceName, "custom_roles.#", "1"), resource.TestCheckResourceAttr(resourceName, "custom_roles.0", roleKey1), ), @@ -211,9 +211,9 @@ func TestAccTeamMember_UpdateWithCustomRole(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckCustomRoleExists(roleResourceName2), testAccCheckMemberExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "email", fmt.Sprintf("%s@example.com", randomName)), - resource.TestCheckResourceAttr(resourceName, "first_name", "first"), - resource.TestCheckResourceAttr(resourceName, "last_name", "last"), + resource.TestCheckResourceAttr(resourceName, EMAIL, fmt.Sprintf("%s@example.com", randomName)), + resource.TestCheckResourceAttr(resourceName, FIRST_NAME, "first"), + resource.TestCheckResourceAttr(resourceName, LAST_NAME, "last"), resource.TestCheckResourceAttr(resourceName, "custom_roles.#", "1"), resource.TestCheckResourceAttr(resourceName, "custom_roles.0", roleKey2), ), diff --git a/launchdarkly/resource_launchdarkly_webhook.go b/launchdarkly/resource_launchdarkly_webhook.go index 06067fd3..cd943c8c 100644 --- a/launchdarkly/resource_launchdarkly_webhook.go +++ b/launchdarkly/resource_launchdarkly_webhook.go @@ -1,9 +1,10 @@ package launchdarkly import ( + "context" "fmt" - "net/http" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ldapi "github.com/launchdarkly/api-client-go/v7" ) @@ -22,11 +23,11 @@ func resourceWebhook() *schema.Resource { Default: false, } return &schema.Resource{ - Create: resourceWebhookCreate, - Read: resourceWebhookRead, - Update: resourceWebhookUpdate, - Delete: resourceWebhookDelete, - Exists: resourceWebhookExists, + CreateContext: resourceWebhookCreate, + ReadContext: resourceWebhookRead, + UpdateContext: resourceWebhookUpdate, + DeleteContext: resourceWebhookDelete, + Exists: resourceWebhookExists, Importer: &schema.ResourceImporter{ StateContext: schema.ImportStatePassthroughContext, @@ -36,7 +37,7 @@ func resourceWebhook() *schema.Resource { } } -func resourceWebhookCreate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceWebhookCreate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) webhookURL := d.Get(URL).(string) webhookSecret := d.Get(SECRET).(string) @@ -53,7 +54,7 @@ func resourceWebhookCreate(d *schema.ResourceData, metaRaw interface{}) error { if rawStatements, ok := d.GetOk(STATEMENTS); ok { statements, err := policyStatementsFromResourceData(rawStatements.([]interface{})) if err != nil { - return err + return diag.FromErr(err) } webhookBody.Statements = &statements } @@ -65,30 +66,31 @@ func resourceWebhookCreate(d *schema.ResourceData, metaRaw interface{}) error { webhookBody.Sign = true } - webhookRaw, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.WebhooksApi.PostWebhook(client.ctx).WebhookPost(webhookBody).Execute() - }) - webhook := webhookRaw.(ldapi.Webhook) + webhook, _, err := client.ld.WebhooksApi.PostWebhook(client.ctx).WebhookPost(webhookBody).Execute() if err != nil { - return fmt.Errorf("failed to create webhook with name %q: %s", webhookName, handleLdapiErr(err)) + return diag.Errorf("failed to create webhook with name %q: %s", webhookName, handleLdapiErr(err)) } d.SetId(webhook.Id) // ld's api does not allow tags to be passed in during webhook creation so we do an update - err = resourceWebhookUpdate(d, metaRaw) - if err != nil { - return fmt.Errorf("error updating after webhook creation. Webhook name: %q", webhookName) + updateDiags := resourceWebhookUpdate(ctx, d, metaRaw) + if updateDiags.HasError() { + updateDiags = append(updateDiags, diag.Diagnostic{ + Severity: diag.Error, + Summary: fmt.Sprintf("error updating after webhook creation. Webhook name: %q", webhookName), + }) + return updateDiags } - return resourceWebhookRead(d, metaRaw) + return resourceWebhookRead(ctx, d, metaRaw) } -func resourceWebhookRead(d *schema.ResourceData, metaRaw interface{}) error { - return webhookRead(d, metaRaw, false) +func resourceWebhookRead(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + return webhookRead(ctx, d, metaRaw, false) } -func resourceWebhookUpdate(d *schema.ResourceData, metaRaw interface{}) error { +func resourceWebhookUpdate(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { client := metaRaw.(*Client) webhookID := d.Id() webhookURL := d.Get(URL).(string) @@ -107,7 +109,7 @@ func resourceWebhookUpdate(d *schema.ResourceData, metaRaw interface{}) error { statements, err := policyStatementsFromResourceData(d.Get(STATEMENTS).([]interface{})) if err != nil { - return err + return diag.FromErr(err) } if d.HasChange(STATEMENTS) { @@ -118,32 +120,26 @@ func resourceWebhookUpdate(d *schema.ResourceData, metaRaw interface{}) error { } } - _, _, err = handleRateLimit(func() (interface{}, *http.Response, error) { - return handleNoConflict(func() (interface{}, *http.Response, error) { - return client.ld.WebhooksApi.PatchWebhook(client.ctx, webhookID).PatchOperation(patch).Execute() - }) - }) + _, _, err = client.ld.WebhooksApi.PatchWebhook(client.ctx, webhookID).PatchOperation(patch).Execute() if err != nil { - return fmt.Errorf("failed to update webhook with id %q: %s", webhookID, handleLdapiErr(err)) + return diag.Errorf("failed to update webhook with id %q: %s", webhookID, handleLdapiErr(err)) } - return resourceWebhookRead(d, metaRaw) + return resourceWebhookRead(ctx, d, metaRaw) } -func resourceWebhookDelete(d *schema.ResourceData, metaRaw interface{}) error { +func resourceWebhookDelete(ctx context.Context, d *schema.ResourceData, metaRaw interface{}) diag.Diagnostics { + var diags diag.Diagnostics + client := metaRaw.(*Client) webhookID := d.Id() - _, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - res, err := client.ld.WebhooksApi.DeleteWebhook(client.ctx, webhookID).Execute() - return nil, res, err - }) - + _, err := client.ld.WebhooksApi.DeleteWebhook(client.ctx, webhookID).Execute() if err != nil { - return fmt.Errorf("failed to delete webhook with id %q: %s", webhookID, handleLdapiErr(err)) + return diag.Errorf("failed to delete webhook with id %q: %s", webhookID, handleLdapiErr(err)) } - return nil + return diags } func resourceWebhookExists(d *schema.ResourceData, metaRaw interface{}) (bool, error) { @@ -151,9 +147,7 @@ func resourceWebhookExists(d *schema.ResourceData, metaRaw interface{}) (bool, e } func webhookExists(webhookID string, meta *Client) (bool, error) { - _, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return meta.ld.WebhooksApi.GetWebhook(meta.ctx, webhookID).Execute() - }) + _, res, err := meta.ld.WebhooksApi.GetWebhook(meta.ctx, webhookID).Execute() if isStatusNotFound(res) { return false, nil } diff --git a/launchdarkly/resource_launchdarkly_webhook_test.go b/launchdarkly/resource_launchdarkly_webhook_test.go index b88783a3..87d602d9 100644 --- a/launchdarkly/resource_launchdarkly_webhook_test.go +++ b/launchdarkly/resource_launchdarkly_webhook_test.go @@ -117,9 +117,9 @@ func TestAccWebhook_Create(t *testing.T) { Config: testAccWebhookCreate, Check: resource.ComposeTestCheckFunc( testAccCheckWebhookExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "example-webhook"), - resource.TestCheckResourceAttr(resourceName, "url", "http://webhooks.com"), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, NAME, "example-webhook"), + resource.TestCheckResourceAttr(resourceName, URL, "http://webhooks.com"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "tags.#", "1"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), resource.TestCheckResourceAttr(resourceName, "statements.#", "0"), @@ -146,9 +146,9 @@ func TestAccWebhook_CreateWithEnabled(t *testing.T) { Config: testAccWebhookCreateWithEnabled, Check: resource.ComposeTestCheckFunc( testAccCheckWebhookExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "example-webhook"), - resource.TestCheckResourceAttr(resourceName, "url", "http://webhooks.com"), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, NAME, "example-webhook"), + resource.TestCheckResourceAttr(resourceName, URL, "http://webhooks.com"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "tags.#", "1"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), resource.TestCheckResourceAttr(resourceName, "statements.#", "0"), @@ -174,9 +174,9 @@ func TestAccWebhook_Update(t *testing.T) { Config: testAccWebhookCreateWithEnabled, Check: resource.ComposeTestCheckFunc( testAccCheckWebhookExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "example-webhook"), - resource.TestCheckResourceAttr(resourceName, "url", "http://webhooks.com"), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, NAME, "example-webhook"), + resource.TestCheckResourceAttr(resourceName, URL, "http://webhooks.com"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "tags.#", "1"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), resource.TestCheckResourceAttr(resourceName, "statements.#", "0"), @@ -186,9 +186,9 @@ func TestAccWebhook_Update(t *testing.T) { Config: testAccWebhookCreate, Check: resource.ComposeTestCheckFunc( testAccCheckWebhookExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "example-webhook"), - resource.TestCheckResourceAttr(resourceName, "url", "http://webhooks.com"), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, NAME, "example-webhook"), + resource.TestCheckResourceAttr(resourceName, URL, "http://webhooks.com"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "tags.#", "1"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), resource.TestCheckResourceAttr(resourceName, "statements.#", "0"), @@ -198,9 +198,9 @@ func TestAccWebhook_Update(t *testing.T) { Config: testAccWebhookUpdate, Check: resource.ComposeTestCheckFunc( testAccCheckWebhookExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Example Webhook"), - resource.TestCheckResourceAttr(resourceName, "url", "http://webhooks.com/updatedUrl"), - resource.TestCheckResourceAttr(resourceName, "on", "false"), + resource.TestCheckResourceAttr(resourceName, NAME, "Example Webhook"), + resource.TestCheckResourceAttr(resourceName, URL, "http://webhooks.com/updatedUrl"), + resource.TestCheckResourceAttr(resourceName, ON, "false"), resource.TestCheckResourceAttr(resourceName, "tags.#", "2"), resource.TestCheckResourceAttr(resourceName, "tags.0", "terraform"), resource.TestCheckResourceAttr(resourceName, "tags.1", "updated"), @@ -228,9 +228,9 @@ func TestAccWebhook_CreateWithStatements(t *testing.T) { Config: testAccWebhookWithStatements, Check: resource.ComposeTestCheckFunc( testAccCheckWebhookExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Webhook with policy statements"), - resource.TestCheckResourceAttr(resourceName, "url", "http://webhooks.com"), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, NAME, "Webhook with policy statements"), + resource.TestCheckResourceAttr(resourceName, URL, "http://webhooks.com"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "statements.#", "1"), resource.TestCheckResourceAttr(resourceName, "statements.0.effect", "allow"), resource.TestCheckResourceAttr(resourceName, "statements.0.actions.#", "1"), @@ -260,9 +260,9 @@ func TestAccWebhook_CreateWithPolicyStatements(t *testing.T) { Config: testAccWebhookWithPolicyStatements, Check: resource.ComposeTestCheckFunc( testAccCheckWebhookExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Webhook with policy statements"), - resource.TestCheckResourceAttr(resourceName, "url", "http://webhooks.com"), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, NAME, "Webhook with policy statements"), + resource.TestCheckResourceAttr(resourceName, URL, "http://webhooks.com"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "statements.#", "1"), resource.TestCheckResourceAttr(resourceName, "statements.0.effect", "allow"), resource.TestCheckResourceAttr(resourceName, "statements.0.actions.#", "1"), @@ -287,9 +287,9 @@ func TestAccWebhook_UpdateWithStatements(t *testing.T) { Config: testAccWebhookWithStatements, Check: resource.ComposeTestCheckFunc( testAccCheckWebhookExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Webhook with policy statements"), - resource.TestCheckResourceAttr(resourceName, "url", "http://webhooks.com"), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, NAME, "Webhook with policy statements"), + resource.TestCheckResourceAttr(resourceName, URL, "http://webhooks.com"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "statements.#", "1"), resource.TestCheckResourceAttr(resourceName, "statements.0.effect", "allow"), resource.TestCheckResourceAttr(resourceName, "statements.0.actions.#", "1"), @@ -302,9 +302,9 @@ func TestAccWebhook_UpdateWithStatements(t *testing.T) { Config: testAccWebhookWithPolicyUpdate, Check: resource.ComposeTestCheckFunc( testAccCheckWebhookExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Webhook with policy statements"), - resource.TestCheckResourceAttr(resourceName, "url", "http://webhooks.com"), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, NAME, "Webhook with policy statements"), + resource.TestCheckResourceAttr(resourceName, URL, "http://webhooks.com"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "statements.#", "2"), resource.TestCheckResourceAttr(resourceName, "statements.0.effect", "allow"), resource.TestCheckResourceAttr(resourceName, "statements.0.actions.#", "1"), @@ -322,9 +322,9 @@ func TestAccWebhook_UpdateWithStatements(t *testing.T) { Config: testAccWebhookWithStatementsRemoved, Check: resource.ComposeTestCheckFunc( testAccCheckWebhookExists(resourceName), - resource.TestCheckResourceAttr(resourceName, "name", "Webhook without statements"), - resource.TestCheckResourceAttr(resourceName, "url", "http://webhooks.com"), - resource.TestCheckResourceAttr(resourceName, "on", "true"), + resource.TestCheckResourceAttr(resourceName, NAME, "Webhook without statements"), + resource.TestCheckResourceAttr(resourceName, URL, "http://webhooks.com"), + resource.TestCheckResourceAttr(resourceName, ON, "true"), resource.TestCheckResourceAttr(resourceName, "statements.#", "0"), ), }, diff --git a/launchdarkly/rollout_helper.go b/launchdarkly/rollout_helper.go index 69f577f3..9f3c00c9 100644 --- a/launchdarkly/rollout_helper.go +++ b/launchdarkly/rollout_helper.go @@ -14,7 +14,9 @@ func rolloutSchema() *schema.Schema { Type: schema.TypeList, Optional: true, Elem: &schema.Schema{ - Type: schema.TypeInt, + Type: schema.TypeInt, + // Can't use validation.ToDiagFunc converted validators on TypeList at the moment + // https://github.com/hashicorp/terraform-plugin-sdk/issues/734 ValidateFunc: validation.IntBetween(0, 100000), }, } diff --git a/launchdarkly/rule_helper.go b/launchdarkly/rule_helper.go index 702ad918..7245933e 100644 --- a/launchdarkly/rule_helper.go +++ b/launchdarkly/rule_helper.go @@ -18,11 +18,11 @@ func rulesSchema() *schema.Schema { Schema: map[string]*schema.Schema{ CLAUSES: clauseSchema(), VARIATION: { - Type: schema.TypeInt, - Elem: &schema.Schema{Type: schema.TypeInt}, - Optional: true, - Description: "The integer variation index to serve if the rule clauses evaluate to true. This argument is only valid if clauses are also specified", - ValidateFunc: validation.IntAtLeast(0), + Type: schema.TypeInt, + Elem: &schema.Schema{Type: schema.TypeInt}, + Optional: true, + Description: "The integer variation index to serve if the rule clauses evaluate to true. This argument is only valid if clauses are also specified", + ValidateDiagFunc: validation.ToDiagFunc(validation.IntAtLeast(0)), }, ROLLOUT_WEIGHTS: rolloutSchema(), BUCKET_BY: { diff --git a/launchdarkly/segment_rule_helper.go b/launchdarkly/segment_rule_helper.go index 55c2d04e..1e8a228d 100644 --- a/launchdarkly/segment_rule_helper.go +++ b/launchdarkly/segment_rule_helper.go @@ -14,11 +14,11 @@ func segmentRulesSchema() *schema.Schema { Schema: map[string]*schema.Schema{ CLAUSES: clauseSchema(), WEIGHT: { - Type: schema.TypeInt, - Elem: &schema.Schema{Type: schema.TypeInt}, - Optional: true, - ValidateFunc: validation.IntBetween(0, 100000), - Description: "The integer weight of the rule (between 0 and 100000).", + Type: schema.TypeInt, + Elem: &schema.Schema{Type: schema.TypeInt}, + Optional: true, + ValidateDiagFunc: validation.ToDiagFunc(validation.IntBetween(0, 100000)), + Description: "The integer weight of the rule (between 0 and 100000).", }, BUCKET_BY: { Type: schema.TypeString, diff --git a/launchdarkly/segments_helper.go b/launchdarkly/segments_helper.go index 289aa89c..34e70335 100644 --- a/launchdarkly/segments_helper.go +++ b/launchdarkly/segments_helper.go @@ -1,12 +1,12 @@ package launchdarkly import ( + "context" "fmt" "log" - "net/http" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - ldapi "github.com/launchdarkly/api-client-go/v7" ) func baseSegmentSchema() map[string]*schema.Schema { @@ -38,23 +38,25 @@ func baseSegmentSchema() map[string]*schema.Schema { } } -func segmentRead(d *schema.ResourceData, raw interface{}, isDataSource bool) error { +func segmentRead(ctx context.Context, d *schema.ResourceData, raw interface{}, isDataSource bool) diag.Diagnostics { + var diags diag.Diagnostics client := raw.(*Client) projectKey := d.Get(PROJECT_KEY).(string) envKey := d.Get(ENV_KEY).(string) segmentKey := d.Get(KEY).(string) - segmentRaw, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.SegmentsApi.GetSegment(client.ctx, projectKey, envKey, segmentKey).Execute() - }) - segment := segmentRaw.(ldapi.UserSegment) + segment, res, err := client.ld.SegmentsApi.GetSegment(client.ctx, projectKey, envKey, segmentKey).Execute() if isStatusNotFound(res) && !isDataSource { log.Printf("[WARN] failed to find segment %q in project %q, environment %q, removing from state", segmentKey, projectKey, envKey) + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: fmt.Sprintf("[WARN] failed to find segment %q in project %q, environment %q, removing from state", segmentKey, projectKey, envKey), + }) d.SetId("") - return nil + return diags } if err != nil { - return fmt.Errorf("failed to get segment %q of project %q: %s", segmentKey, projectKey, handleLdapiErr(err)) + return diag.Errorf("failed to get segment %q of project %q: %s", segmentKey, projectKey, handleLdapiErr(err)) } if isDataSource { @@ -66,26 +68,26 @@ func segmentRead(d *schema.ResourceData, raw interface{}, isDataSource bool) err err = d.Set(TAGS, segment.Tags) if err != nil { - return fmt.Errorf("failed to set tags on segment with key %q: %v", segmentKey, err) + return diag.Errorf("failed to set tags on segment with key %q: %v", segmentKey, err) } err = d.Set(INCLUDED, segment.Included) if err != nil { - return fmt.Errorf("failed to set included on segment with key %q: %v", segmentKey, err) + return diag.Errorf("failed to set included on segment with key %q: %v", segmentKey, err) } err = d.Set(EXCLUDED, segment.Excluded) if err != nil { - return fmt.Errorf("failed to set excluded on segment with key %q: %v", segmentKey, err) + return diag.Errorf("failed to set excluded on segment with key %q: %v", segmentKey, err) } rules, err := segmentRulesToResourceData(segment.Rules) if err != nil { - return fmt.Errorf("failed to read rules on segment with key %q: %v", segmentKey, err) + return diag.Errorf("failed to read rules on segment with key %q: %v", segmentKey, err) } err = d.Set(RULES, rules) if err != nil { - return fmt.Errorf("failed to set excluded on segment with key %q: %v", segmentKey, err) + return diag.Errorf("failed to set excluded on segment with key %q: %v", segmentKey, err) } - return nil + return diags } diff --git a/launchdarkly/tags_helper.go b/launchdarkly/tags_helper.go index 8c681df5..45621ff1 100644 --- a/launchdarkly/tags_helper.go +++ b/launchdarkly/tags_helper.go @@ -9,8 +9,10 @@ func tagsSchema() *schema.Schema { Type: schema.TypeSet, Set: schema.HashString, Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validateTags(), + Type: schema.TypeString, + // Can't use validation.ToDiagFunc converted validators on TypeSet at the moment + // https://github.com/hashicorp/terraform-plugin-sdk/issues/734 + ValidateFunc: validateTagsNoDiag(), }, Optional: true, Description: "Tags associated with your resource", diff --git a/launchdarkly/target_helper.go b/launchdarkly/target_helper.go index c973a6cc..c03503fb 100644 --- a/launchdarkly/target_helper.go +++ b/launchdarkly/target_helper.go @@ -21,10 +21,10 @@ func targetsSchema() *schema.Schema { Description: "List of user strings to target", }, VARIATION: { - Type: schema.TypeInt, - Required: true, - Description: "Index of the variation to serve if a user_target is matched", - ValidateFunc: validation.IntAtLeast(0), + Type: schema.TypeInt, + Required: true, + Description: "Index of the variation to serve if a user_target is matched", + ValidateDiagFunc: validation.ToDiagFunc(validation.IntAtLeast(0)), }, }, }, diff --git a/launchdarkly/team_member_helper.go b/launchdarkly/team_member_helper.go index 45485c2d..e7349083 100644 --- a/launchdarkly/team_member_helper.go +++ b/launchdarkly/team_member_helper.go @@ -2,19 +2,13 @@ package launchdarkly import ( "fmt" - "net/http" - - ldapi "github.com/launchdarkly/api-client-go/v7" ) // The LD api returns custom role IDs (not keys). Since we want to set custom_roles with keys, we need to look up their IDs func customRoleIDsToKeys(client *Client, ids []string) ([]string, error) { customRoleKeys := make([]string, 0, len(ids)) for _, customRoleID := range ids { - roleRaw, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.CustomRolesApi.GetCustomRole(client.ctx, customRoleID).Execute() - }) - role := roleRaw.(ldapi.CustomRole) + role, res, err := client.ld.CustomRolesApi.GetCustomRole(client.ctx, customRoleID).Execute() if isStatusNotFound(res) { return nil, fmt.Errorf("failed to find custom role key for ID %q", customRoleID) } @@ -30,10 +24,7 @@ func customRoleIDsToKeys(client *Client, ids []string) ([]string, error) { func customRoleKeysToIDs(client *Client, keys []string) ([]string, error) { customRoleIds := make([]string, 0, len(keys)) for _, key := range keys { - roleRaw, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.CustomRolesApi.GetCustomRole(client.ctx, key).Execute() - }) - role := roleRaw.(ldapi.CustomRole) + role, res, err := client.ld.CustomRolesApi.GetCustomRole(client.ctx, key).Execute() if isStatusNotFound(res) { return nil, fmt.Errorf("failed to find custom ID for key %q", key) } diff --git a/launchdarkly/test_utils.go b/launchdarkly/test_utils.go index 38b6ca3b..6c4890d7 100644 --- a/launchdarkly/test_utils.go +++ b/launchdarkly/test_utils.go @@ -1,24 +1,16 @@ package launchdarkly import ( - "fmt" - "net/http" - ldapi "github.com/launchdarkly/api-client-go/v7" ) // testAccDataSourceProjectCreate creates a project with the given project parameters func testAccDataSourceProjectCreate(client *Client, projectBody ldapi.ProjectPost) (*ldapi.Project, error) { - project, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.ProjectsApi.PostProject(client.ctx).ProjectPost(projectBody).Execute() - }) + project, _, err := client.ld.ProjectsApi.PostProject(client.ctx).ProjectPost(projectBody).Execute() if err != nil { return nil, err } - if project, ok := project.(ldapi.Project); ok { - return &project, nil - } - return nil, fmt.Errorf("failed to create project") + return &project, nil } func testAccDataSourceProjectDelete(client *Client, projectKey string) error { @@ -39,16 +31,11 @@ func testAccDataSourceFeatureFlagScaffold(client *Client, projectKey string, fla return nil, err } - flag, _, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.FeatureFlagsApi.PostFeatureFlag(client.ctx, project.Key).FeatureFlagBody(flagBody).Execute() - }) + flag, _, err := client.ld.FeatureFlagsApi.PostFeatureFlag(client.ctx, project.Key).FeatureFlagBody(flagBody).Execute() if err != nil { return nil, err } - if flag, ok := flag.(ldapi.FeatureFlag); ok { - return &flag, nil - } - return nil, fmt.Errorf("failed to create flag") + return &flag, nil } diff --git a/launchdarkly/validation_helper.go b/launchdarkly/validation_helper.go index 46ddf14e..fb7e9608 100644 --- a/launchdarkly/validation_helper.go +++ b/launchdarkly/validation_helper.go @@ -7,21 +7,47 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) -func validateKey() schema.SchemaValidateFunc { +// Can't use validation.ToDiagFunc converted validators on TypeList at the moment +// https://github.com/hashicorp/terraform-plugin-sdk/issues/734 +//nolint:staticcheck // SA1019 TODO: return SchemaValidateDiagFunc type +func validateKeyNoDiag() schema.SchemaValidateFunc { return validation.StringMatch( regexp.MustCompile(`^[a-zA-Z0-9][a-zA-Z0-9_.-]*$`), "Must contain only letters, numbers, '.', '-', or '_' and must start with an alphanumeric", ) } -func validateID() schema.SchemaValidateFunc { +func validateKey() schema.SchemaValidateDiagFunc { + return validation.ToDiagFunc(validation.StringMatch( + regexp.MustCompile(`^[a-zA-Z0-9][a-zA-Z0-9_.-]*$`), + "Must contain only letters, numbers, '.', '-', or '_' and must start with an alphanumeric", + )) +} + +// Can't use validation.ToDiagFunc converted validators on TypeList at the moment +// https://github.com/hashicorp/terraform-plugin-sdk/issues/734 +//nolint:staticcheck // SA1019 TODO: return SchemaValidateDiagFunc type +func validateKeyAndLength(minLength, maxLength int) schema.SchemaValidateFunc { return validation.All( + validation.StringMatch( + regexp.MustCompile(`^[a-zA-Z0-9][a-zA-Z0-9_.-]*$`), + "Must contain only letters, numbers, '.', '-', or '_' and must start with an alphanumeric", + ), + validation.StringLenBetween(minLength, maxLength), + ) +} + +func validateID() schema.SchemaValidateDiagFunc { + return validation.ToDiagFunc(validation.All( validation.StringMatch(regexp.MustCompile(`^[a-fA-F0-9]*$`), "Must be a 24 character hexadecimal string"), validation.StringLenBetween(24, 24), - ) + )) } -func validateTags() schema.SchemaValidateFunc { +// Can't use validation.ToDiagFunc converted validators on TypeList at the moment +// https://github.com/hashicorp/terraform-plugin-sdk/issues/734 +//nolint:staticcheck // SA1019 TODO: return SchemaValidateDiagFunc type +func validateTagsNoDiag() schema.SchemaValidateFunc { return validation.All( validation.StringLenBetween(1, 64), validation.StringMatch( @@ -31,8 +57,18 @@ func validateTags() schema.SchemaValidateFunc { ) } -func validateOp() schema.SchemaValidateFunc { - return validation.StringInSlice([]string{ +// func validateTags() schema.SchemaValidateDiagFunc { +// return validation.ToDiagFunc(validation.All( +// validation.StringLenBetween(1, 64), +// validation.StringMatch( +// regexp.MustCompile(`^[a-zA-Z0-9_.-]*$`), +// "Must contain only letters, numbers, '.', '-', or '_' and be at most 64 characters", +// ), +// )) +// } + +func validateOp() schema.SchemaValidateDiagFunc { + return validation.ToDiagFunc(validation.StringInSlice([]string{ "in", "endsWith", "startsWith", @@ -48,5 +84,5 @@ func validateOp() schema.SchemaValidateFunc { "semVerEqual", "semVerLessThan", "semVerGreaterThan", - }, false) + }, false)) } diff --git a/launchdarkly/variations_helper.go b/launchdarkly/variations_helper.go index 5fcf6e27..be201936 100644 --- a/launchdarkly/variations_helper.go +++ b/launchdarkly/variations_helper.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ldapi "github.com/launchdarkly/api-client-go/v7" ) @@ -26,7 +27,7 @@ func variationTypeSchema() *schema.Schema { ForceNew: true, Description: fmt.Sprintf("The uniform type for all variations. Can be either %q, %q, %q, or %q.", BOOL_VARIATION, STRING_VARIATION, NUMBER_VARIATION, JSON_VARIATION), - ValidateFunc: validateVariationType, + ValidateDiagFunc: validation.ToDiagFunc(validateVariationType), } } @@ -50,10 +51,10 @@ func variationsSchema() *schema.Schema { Description: "A description for the variation", }, VALUE: { - Type: schema.TypeString, - Required: true, - Description: "The value of the flag for this variation", - ValidateFunc: validateVariationValue, + Type: schema.TypeString, + Required: true, + Description: "The value of the flag for this variation", + ValidateDiagFunc: validation.ToDiagFunc(validateVariationValue), StateFunc: func(i interface{}) string { // All values are stored as strings in TF state v, err := structure.NormalizeJsonString(i) diff --git a/launchdarkly/webhooks_helper.go b/launchdarkly/webhooks_helper.go index 02a28b07..0bd59fc2 100644 --- a/launchdarkly/webhooks_helper.go +++ b/launchdarkly/webhooks_helper.go @@ -1,12 +1,11 @@ package launchdarkly import ( - "fmt" + "context" "log" - "net/http" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - ldapi "github.com/launchdarkly/api-client-go/v7" ) func baseWebhookSchema() map[string]*schema.Schema { @@ -27,7 +26,8 @@ func baseWebhookSchema() map[string]*schema.Schema { } } -func webhookRead(d *schema.ResourceData, meta interface{}, isDataSource bool) error { +func webhookRead(ctx context.Context, d *schema.ResourceData, meta interface{}, isDataSource bool) diag.Diagnostics { + var diags diag.Diagnostics client := meta.(*Client) var webhookID string if isDataSource { @@ -36,23 +36,20 @@ func webhookRead(d *schema.ResourceData, meta interface{}, isDataSource bool) er webhookID = d.Id() } - webhookRaw, res, err := handleRateLimit(func() (interface{}, *http.Response, error) { - return client.ld.WebhooksApi.GetWebhook(client.ctx, webhookID).Execute() - }) - webhook := webhookRaw.(ldapi.Webhook) + webhook, res, err := client.ld.WebhooksApi.GetWebhook(client.ctx, webhookID).Execute() if isStatusNotFound(res) && !isDataSource { log.Printf("[WARN] failed to find webhook with id %q, removing from state", webhookID) d.SetId("") return nil } if err != nil { - return fmt.Errorf("failed to get webhook with id %q: %s", webhookID, handleLdapiErr(err)) + return diag.Errorf("failed to get webhook with id %q: %s", webhookID, handleLdapiErr(err)) } if webhook.Statements != nil { statements := policyStatementsToResourceData(*webhook.Statements) err = d.Set(STATEMENTS, statements) if err != nil { - return fmt.Errorf("failed to set statements on webhook with id %q: %v", webhookID, err) + return diag.Errorf("failed to set statements on webhook with id %q: %v", webhookID, err) } } @@ -66,7 +63,7 @@ func webhookRead(d *schema.ResourceData, meta interface{}, isDataSource bool) er err = d.Set(TAGS, webhook.Tags) if err != nil { - return fmt.Errorf("failed to set tags on webhook with id %q: %v", webhookID, err) + return diag.Errorf("failed to set tags on webhook with id %q: %v", webhookID, err) } - return nil + return diags } diff --git a/scripts/errcheck.sh b/scripts/errcheck.sh index 15464f5a..76590d5c 100755 --- a/scripts/errcheck.sh +++ b/scripts/errcheck.sh @@ -5,7 +5,7 @@ echo "==> Checking for unchecked errors..." if ! which errcheck > /dev/null; then echo "==> Installing errcheck..." - go get -u github.com/kisielk/errcheck + go install github.com/kisielk/errcheck@v1.5.0 fi err_files=$(errcheck -ignoretests \ diff --git a/scripts/generate_integration_audit_log_configs.py b/scripts/generate_integration_audit_log_configs.py new file mode 100644 index 00000000..6aa32f8a --- /dev/null +++ b/scripts/generate_integration_audit_log_configs.py @@ -0,0 +1,62 @@ +import os +import requests +import json + +def get_audit_log_manifests(host, api_key): + if not host or not api_key: + raise Exception('host or api key not set') + path_get_manifests = '/api/v2/integration-manifests' + resp = requests.get(host + path_get_manifests, headers={'Authorization': api_key}) + if resp.status_code != 200: + raise Exception(resp.status_code, 'unsuccessful get manifests request') + return filter_manifests(resp.json()['items']) + +def filter_manifests(manifests): + filtered = [] + for m in manifests: + if 'capabilities' in m and 'auditLogEventsHook' in m['capabilities']: + filtered.append(m) + return filtered + +def construct_config(manifest): + """ takes an audit log manifest and returns the form variables in the format + { : { + 'type': , + 'isOptional': , + 'allowedValues': , + 'defaultValue': , + 'isSecret': + } } + """ + rawFormVariables = manifest['formVariables'] + formVariables = {} + for rawV in rawFormVariables: + v = { 'type': rawV['type'] } + for attribute in ['isOptional', 'allowedValues', 'defaultValue', 'isSecret']: + if attribute in rawV: + v[attribute] = rawV[attribute] + formVariables[rawV['key']] = v + return formVariables + +def construct_config_dict(manifests): + cfgs = {} + for m in manifests: + cfgs[m['key']] = construct_config(m) + return cfgs + +def seed_config_file(): + host = os.getenv('LAUNCHDARKLY_API_HOST', 'https://app.launchdarkly.com') + if not host.startswith('http'): + host = 'https://' + host + api_key = os.getenv('LAUNCHDARKLY_ACCESS_TOKEN') + print('getting manifests...') + manifests = get_audit_log_manifests(host, api_key) + print('constructing configs...') + configs = construct_config_dict(manifests) + print('seeding file...') + with open('launchdarkly/audit_log_subscription_configs.json', 'w') as f: + json.dump(configs, f) + print('COMPLETE, config data written to launchdarkly/audit_log_subscription_configs.json') + +if __name__ == '__main__': + seed_config_file() \ No newline at end of file diff --git a/scripts/gofmtcheck.sh b/scripts/gofmtcheck.sh index 1c055815..dcaab03c 100755 --- a/scripts/gofmtcheck.sh +++ b/scripts/gofmtcheck.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash +echo "==> Checking that code complies with gofmt and gmts requirements..." # Check gofmt -echo "==> Checking that code complies with gofmt requirements..." gofmt_files=$(gofmt -l `find . -name '*.go' | grep -v vendor`) if [[ -n ${gofmt_files} ]]; then echo 'gofmt needs running on the following files:' @@ -10,4 +10,18 @@ if [[ -n ${gofmt_files} ]]; then exit 1 fi +# Check gofmts +if ! which gofmts > /dev/null; then + echo "==> Installing gofmts..." + go install github.com/ashanbrown/gofmts/cmd/gofmts@v0.1.4 +fi +gofmts_files=$(gofmts -l `find . -name '*.go' | grep -v vendor`) +if [[ -n ${gofmt_files} ]]; then + echo 'gofmts needs running on the following files:' + echo "${gofmts_files}" + echo "You can use the command: \`make fmt\` to reformat code." + exit 1 +fi + + exit 0 diff --git a/website/docs/d/audit_log_subscription.html.markdown b/website/docs/d/audit_log_subscription.html.markdown new file mode 100644 index 00000000..a7668ab2 --- /dev/null +++ b/website/docs/d/audit_log_subscription.html.markdown @@ -0,0 +1,55 @@ +--- +layout: "launchdarkly" +page_title: "LaunchDarkly: launchdarkly_audit_log_subscription" +description: |- + Get information about LaunchDarkly audit log subscriptions. +--- + +# launchdarkly_audit_log_subscription + +Provides a LaunchDarkly audit log subscription data source. + +This data source allows you to retrieve information about LaunchDarkly audit log subscriptions. + +# Example Usage + +```hcl +data "launchdarkly_audit_log_subscription" "test" { + id = "5f0cd446a77cba0b4c5644a7" + integration_key = "msteams" +} +``` + +## Argument Reference + +- `id` (Required) - The unique subscription ID. This can be found in the URL of the pull-out configuration sidebar for the given subscription on your [LaunchDarkly Integrations page](https://app.launchdarkly.com/default/integrations). + +- `integration_key` (Required) - The integration key. As of January 2022, supported integrations are `"datadog"`, `"dynatrace"`, `"elastic"`, `"honeycomb"`, `"logdna"`, `"msteams"`, `"new-relic-apm"`, `"signalfx"`, and `"splunk"`. + +## Attributes Reference + +In addition to the arguments above, the resource exports following attributes: + +- `name` - The subscription's human-readable name. + +- `config` - A block of configuration fields associated with your integration type. + +- `statements` - The statement block used to filter subscription events. To learn more, read [Statement Blocks](#statement-blocks). + +- `on` - Whether the subscription is enabled. + +- `tags` - Set of tags associated with the subscription. + +### Statement Blocks + +Audit log subscription `statements` blocks are composed of the following arguments: + +- `effect` - Either `allow` or `deny`. This argument defines whether the statement allows or denies access to the named resources and actions. + +- `resources` - The list of resource specifiers defining the resources to which the statement applies. To learn more about how to configure these read [Using resources](https://docs.launchdarkly.com/home/members/role-resources). + +- `not_resources` - The list of resource specifiers defining the resources to which the statement does not apply. To learn more about how to configure these, read [Using resources](https://docs.launchdarkly.com/home/members/role-resources). + +- `actions` The list of action specifiers defining the actions to which the statement applies. For a list of available actions, read [Using actions](https://docs.launchdarkly.com/home/members/role-actions). + +- `not_actions` The list of action specifiers defining the actions to which the statement does not apply. For a list of available actions, read [Using actions](https://docs.launchdarkly.com/home/members/role-actions). diff --git a/website/docs/d/flag_trigger.html.markdown b/website/docs/d/flag_trigger.html.markdown new file mode 100644 index 00000000..4c5ad5ab --- /dev/null +++ b/website/docs/d/flag_trigger.html.markdown @@ -0,0 +1,56 @@ +--- +layout: "launchdarkly" +page_title: "LaunchDarkly: launchdarkly_flag_trigger" +description: |- + Get information about LaunchDarkly flag trigers. +--- + +# launchdarkly_flag_trigger + +Provides a LaunchDarkly flag trigger data source. + +-> **Note:** Flag triggers are available to customers on an Enterprise LaunchDarkly plan. To learn more, read about our pricing. To upgrade your plan, [contact LaunchDarkly Sales](https://launchdarkly.com/contact-sales/). + +This data source allows you to retrieve information about flag triggers from your LaunchDarkly organization. + +## Example Usage + +```hcl +data "launchdarkly_flag_trigger" "example" { + id = "///61d490757f7821150815518f" + integration_key = "datadog" + instructions { + kind = "turnFlagOff" + } +} +``` + +## Argument Reference + +- `id` - (Required) The Terraform trigger ID. This ID takes the following format: `///`. The unique trigger ID can be found in your saved trigger URL: + +``` +https://app.launchdarkly.com/webhook/triggers//aff25a53-17d9-4112-a9b8-12718d1a2e79 +``` + +Please note that if you did not save this upon creation of the resource, you will have to reset it to get a new value, which can cause breaking changes. + +## Attributes Reference + +In addition to the arguments above, the resource exports the following attributes: + +- `project_key` - The unique key of the project encompassing the associated flag. + +- `env_key` - The unique key of the environment the flag trigger will work in. + +- `flag_key` - The unique key of the associated flag. + +- `integration_key` - The unique identifier of the integration your trigger is set up with. + +- `instructions` - Instructions containing the action to perform when invoking the trigger. Currently supported flag actions are `"turnFlagOn"` and `"turnFlagOff"`. These can be found on the `kind` field nested on the `instructions` attribute. + +- `maintainer_id` - The ID of the member responsible for maintaining the flag trigger. If created via Terraform, this value will be the ID of the member associated with the API key used for your provider configuration. + +- `enabled` - Whether the trigger is currently active or not. + +Please note that the original trigger URL itself will not be surfaced. diff --git a/website/docs/d/metric.html.markdown b/website/docs/d/metric.html.markdown new file mode 100644 index 00000000..9ec68da9 --- /dev/null +++ b/website/docs/d/metric.html.markdown @@ -0,0 +1,59 @@ +--- +layout: "launchdarkly" +page_title: "LaunchDarkly: launchdarkly_metric" +description: |- + Get information about LaunchDarkly metrics. +--- + +# launchdarkly_metric + +Provides a LaunchDarkly metric data source. + +This data source allows you to retrieve metric information from your LaunchDarkly organization. + +## Example Usage + +```hcl +data "launchdarkly_metric" "example" { + key = "example-metric" + project_key = "example-project" +} +``` + +## Argument Reference + +- `key` - (Required) The metric's unique key. + +- `project_key` - (Required) The metric's project key. + +## Attributes Reference + +In addition to the arguments above, the resource exports the following attributes: + +- `id` - The unique metric ID in the format `project_key/metric_key`. + +- `name` - The name of the metric. + +- `project_key` - The metrics's project key. + +- `kind` - The metric type. Available choices are `click`, `custom`, and `pageview`. + +- `tags` - Set of tags associated with the metric. + +- `description` - The description of the metric's purpose. + +- `isNumeric` - Whether a `custom` metric is a numeric metric or not. + +- `isActive` - Whether a metric is a active. + +- `maintainerId` - The userId of the user maintaining the metric. + +- `selector` - The CSS selector for `click` metrics. + +- `urls` - Which URLs the metric watches. + +- `event_key` - The event key to watch for `custom` metrics. + +- `success_criteria` - The success criteria for numeric `custom` metrics. + +- `unit` - The unit for numeric `custom` metrics. diff --git a/website/docs/d/relay_proxy_configuration.html.markdown b/website/docs/d/relay_proxy_configuration.html.markdown new file mode 100644 index 00000000..3e4cdde4 --- /dev/null +++ b/website/docs/d/relay_proxy_configuration.html.markdown @@ -0,0 +1,57 @@ +--- +title: "launchdarkly_relay_proxy_configuration" +description: "Get information about Relay Proxy configurations." +--- + +# launchdarkly_relay_proxy_configuration + +Provides a LaunchDarkly Relay Proxy configuration data source for use with the Relay Proxy's [automatic configuration feature](https://docs.launchdarkly.com/home/relay-proxy/automatic-configuration). + +-> **Note:** Relay Proxy automatic configuration is available to customers on an Enterprise LaunchDarkly plan. To learn more, read about our pricing. To upgrade your plan, [contact LaunchDarkly Sales](https://launchdarkly.com/contact-sales/). + +This data source allows you to retrieve Relay Proxy configuration information from your LaunchDarkly organization. + +-> **Note:** It is not possible for this data source to retrieve your Relay Proxy configuration's unique key. This is because the unique key is only exposed upon creation. If you need to reference the Relay Proxy configuration's unique key in your terraform config, use the `launchdarkly_relay_proxy_configuration` resource instead. + +## Example Usage + +```hcl +resource "launchdarkly_relay_proxy_configuration" "example" { + name = "example-config" + policy { + actions = ["*"] + effect = "allow" + resources = ["proj/*:env/*"] + } +} +``` + +## Argument Reference + +- `id` - (Required) The Relay Proxy configuration's unique 24 character ID. The unique relay proxy ID can be found in the relay proxy edit page URL, which you can locate by clicking the three dot menu on your relay proxy item in the UI and selecting 'Edit configuration': + +``` +https://app.launchdarkly.com/settings/relay//edit +``` + +## Attribute Reference + +In addition to the argument above, the resource exports the following attributes: + +- `name` - The human-readable name for your Relay Proxy configuration. + +- `display_key` - The last 4 characters of the Relay Proxy configuration's unique key. + +- `policy` - The Relay Proxy configuration's rule policy block. This determines what content the Relay Proxy receives. To learn more, read [Understanding policies](https://docs.launchdarkly.com/home/members/role-policies#understanding-policies). + +Relay proxy configuration `policy` blocks are composed of the following arguments: + +- `effect` - Either `allow` or `deny`. This argument defines whether the rule policy allows or denies access to the named resources and actions. + +- `resources` - The list of resource specifiers defining the resources to which the rule policy applies. Either `resources` or `not_resources` must be specified. For a list of available resources read [Understanding resource types and scopes](https://docs.launchdarkly.com/home/account-security/custom-roles/resources#understanding-resource-types-and-scopes). + +- `not_resources` - The list of resource specifiers defining the resources to which the rule policy does not apply. Either `resources` or `not_resources` must be specified. For a list of available resources read [Understanding resource types and scopes](https://docs.launchdarkly.com/home/account-security/custom-roles/resources#understanding-resource-types-and-scopes). + +- `actions` The list of action specifiers defining the actions to which the rule policy applies. Either `actions` or `not_actions` must be specified. For a list of available actions read [Actions reference](https://docs.launchdarkly.com/home/account-security/custom-roles/actions#actions-reference). + +- `not_actions` The list of action specifiers defining the actions to which the rule policy does not apply. Either `actions` or `not_actions` must be specified. For a list of available actions read [Actions reference](https://docs.launchdarkly.com/home/account-security/custom-roles/actions#actions-reference). diff --git a/website/docs/d/team_members.html.markdown b/website/docs/d/team_members.html.markdown new file mode 100644 index 00000000..e346218a --- /dev/null +++ b/website/docs/d/team_members.html.markdown @@ -0,0 +1,41 @@ +--- +layout: "launchdarkly" +page_title: "LaunchDarkly: launchdarkly_team_members" +description: |- + Get information about multiple LaunchDarkly team members. +--- + +# launchdarkly_team_members + +Provides a LaunchDarkly team members data source. + +This data source allows you to retrieve team member information from your LaunchDarkly organization on multiple team members. + +## Example Usage + +```hcl +data "launchdarkly_team_member" "example" { + emails = ["example@example.com", "example2@example.com", "example3@example.com"] +} +``` + +## Argument Reference + +- `emails` - (Required) An array of unique email addresses associated with the team members. + +- `ignore_missing` - (Optional) A boolean to determine whether to ignore members that weren't found. + +## Attributes Reference + +In addition to the arguments above, the resource exports the found members as `team_members`. +The following attributes are available for each member: + +- `id` - The 24 character alphanumeric ID of the team member. + +- `first_name` - The team member's given name. + +- `last_name` - The team member's family name. + +- `role` - The role associated with team member. Possible roles are `owner`, `reader`, `writer`, or `admin`. + +- `custom_role` - (Optional) The list of custom roles keys associated with the team member. Custom roles are only available to customers on enterprise plans. To learn more about enterprise plans, contact sales@launchdarkly.com. diff --git a/website/docs/r/access_token.html.markdown b/website/docs/r/access_token.html.markdown index 821b0840..16bf93f6 100644 --- a/website/docs/r/access_token.html.markdown +++ b/website/docs/r/access_token.html.markdown @@ -5,7 +5,7 @@ description: |- Create and manage LaunchDarkly access tokens. --- -# launchdarkly_access +# launchdarkly_access_token Provides a LaunchDarkly access token resource. @@ -15,7 +15,7 @@ This resource allows you to create and manage access tokens within your LaunchDa ## Example Usage -Resource must contain either a `role`, `custom_role` or an `inline_roles` (previously `policy_statements`) block. As of v1.7.0, `policy_statements` has been deprecated in favor of `inline_roles`. +The resource must contain either a `role`, `custom_role` or an `inline_roles` (previously `policy_statements`) block. As of v1.7.0, `policy_statements` has been deprecated in favor of `inline_roles`. With a built-in role @@ -53,8 +53,9 @@ resource "launchdarkly_access_token" "token_with_policy_statements" { - `name` - (Optional) A human-friendly name for the access token. -- `service_token` - (Optional) Whether the token will be a [service token](https://docs.launchdarkly.com/home/account-security/api-access-tokens#service-tokens) -- `default_api_version` - (Optional) The default API version for this token. Defaults to the latest API version. +- `service_token` - (Optional) Whether the token will be a [service token](https://docs.launchdarkly.com/home/account-security/api-access-tokens#service-tokens). A change in this field will force the destruction of the existing token and the creation of a new one. + +- `default_api_version` - (Optional) The default API version for this token. Defaults to the latest API version. A change in this field will force the destruction of the existing token in state and the creation of a new one. An access token may have its permissions specified by a built-in LaunchDarkly role, a set of custom role keys, or by an inline custom role (policy statements). diff --git a/website/docs/r/audit_log_subscription.html.markdown b/website/docs/r/audit_log_subscription.html.markdown new file mode 100644 index 00000000..9a266673 --- /dev/null +++ b/website/docs/r/audit_log_subscription.html.markdown @@ -0,0 +1,64 @@ +--- +layout: "launchdarkly" +page_title: "LaunchDarkly: launchdarkly_audit_log_subscription" +description: |- + Create and manage LaunchDarkly integration audit log subscriptions. +--- + +# launchdarkly_audit_log_subscription + +Provides a LaunchDarkly audit log subscription resource. + +This resource allows you to create and manage LaunchDarkly audit log subscriptions. + +# Example Usage + +```hcl +resource "launchdarkly_audit_log_subscription" "example" { + integration_key = "datadog" + name = "Example Datadog Subscription" + config { + api_key = "yoursecretkey" + host_url = "https://api.datadoghq.com" + } + tags = [ + "integrations", + "terraform" + ] + statements { + actions = ["*"] + effect = "allow" + resources = ["proj/*:env/*:flag/*"] + } +} +``` + +## Argument Reference + +- `integration_key` (Required) The integration key. As of January 2022, supported integrations are `"datadog"`, `"dynatrace"`, `"elastic"`, `"honeycomb"`, `"logdna"`, `"msteams"`, `"new-relic-apm"`, `"signalfx"`, and `"splunk"`. A change in this field will force the destruction of the existing resource and the creation of a new one. + +- `name` (Required) - A human-friendly name for your audit log subscription viewable from within the LaunchDarkly Integrations page. + +- `config` (Required) - The set of configuration fields corresponding to the value defined for `integration_key`. Refer to the `"formVariables"` field in the corresponding `integrations//manifest.json` file in [this repo](https://github.com/launchdarkly/integration-framework/tree/master/integrations) for a full list of fields for the integration you wish to configure. **IMPORTANT**: Please note that Terraform will only accept these in snake case, regardless of the case shown in the manifest. + +- `statements` (Required) - A block representing the resources to which you wish to subscribe. To learn more about how to configure these blocks, read [Nested Subscription Statements Blocks](#nested-subscription-statements-blocks). + +- `on` (Required) - Whether or not you want your subscription enabled, i.e. to actively send events. + +- `tags` (Optional) - Set of tags associated with the subscription object. + +### Nested Subscription Statements Blocks + +Nested subscription `statements` blocks have the following structure: + +- `effect` (Required) - Either `allow` or `deny`. This argument defines whether the statement allows or denies access to the named resources and actions. + +- `resources` - The list of resource specifiers defining the resources to which the statement applies. To learn more about how to configure these, read [Using resources](https://docs.launchdarkly.com/home/members/role-resources). + +- `not_resources` - The list of resource specifiers defining the resources to which the statement does not apply. To learn more about how to configure these, read [Using resources](https://docs.launchdarkly.com/home/members/role-resources). + +- `actions` The list of action specifiers defining the actions to which the statement applies. For a list of available actions, read [Using actions](https://docs.launchdarkly.com/home/members/role-actions). + +- `not_actions` The list of action specifiers defining the actions to which the statement does not apply. For a list of available actions, read [Using actions](https://docs.launchdarkly.com/home/members/role-actions). + +Please note that either `resources` and `actions` _or_ `not_resources` and `not_actions` must be defined. diff --git a/website/docs/r/custom_role.html.markdown b/website/docs/r/custom_role.html.markdown index 6b20ca9b..c2c9deea 100644 --- a/website/docs/r/custom_role.html.markdown +++ b/website/docs/r/custom_role.html.markdown @@ -9,9 +9,9 @@ description: |- Provides a LaunchDarkly custom role resource. -This resource allows you to create and manage custom roles within your LaunchDarkly organization. +-> **Note:** Custom roles are available to customers on an Enterprise LaunchDarkly plan. To learn more, read about our pricing. To upgrade your plan, [contact LaunchDarkly Sales](https://launchdarkly.com/contact-sales/). --> **Note:** Custom roles are only available to customers on enterprise plans. To learn more about enterprise plans, contact sales@launchdarkly.com. +This resource allows you to create and manage custom roles within your LaunchDarkly organization. ## Example Usage @@ -36,7 +36,7 @@ resource "launchdarkly_custom_role" "example" { ## Argument Reference -- `key` - (Required) The unique key that references the custom role. +- `key` - (Required) The unique key that references the custom role. A change in this field will force the destruction of the existing resource and the creation of a new one. - `name` - (Required) The human-readable name for the custom role. diff --git a/website/docs/r/destination.html.markdown b/website/docs/r/destination.html.markdown index 396515a5..5a3ed0a2 100644 --- a/website/docs/r/destination.html.markdown +++ b/website/docs/r/destination.html.markdown @@ -9,6 +9,8 @@ description: |- Provides a LaunchDarkly Data Export Destination resource. +-> **Note:** Data Export is available to customers on an Enterprise LaunchDarkly plan. To learn more, read about our pricing. To upgrade your plan, [contact LaunchDarkly Sales](https://launchdarkly.com/contact-sales/). + Data Export Destinations are locations that receive exported data. This resource allows you to configure destinations for the export of raw analytics data, including feature flag requests, analytics events, custom events, and more. To learn more about data export, read [Data Export Documentation](https://docs.launchdarkly.com/integrations/data-export). @@ -98,13 +100,13 @@ resource "launchdarkly_destination" "example" { ## Argument Reference -- `project_key` - (Required) - The LaunchDarkly project key. +- `project_key` - (Required) - The LaunchDarkly project key. A change in this field will force the destruction of the existing resource and the creation of a new one. -- `env_key` - (Required) - The environment key. +- `env_key` - (Required) - The environment key. A change in this field will force the destruction of the existing resource and the creation of a new one. - `name` - (Required) - A human-readable name for your data export destination. -- `kind` - (Required) - The data export destination type. Available choices are `kinesis`, `google-pubsub`, `mparticle`, `azure-event-hubs`, and `segment`. +- `kind` - (Required) - The data export destination type. Available choices are `kinesis`, `google-pubsub`, `mparticle`, `azure-event-hubs`, and `segment`. A change in this field will force the destruction of the existing resource and the creation of a new one. - `config` - (Required) - The destination-specific configuration. To learn more, read [Destination-Specific Configs](#destination-specific-configs). diff --git a/website/docs/r/environment.html.markdown b/website/docs/r/environment.html.markdown index 1d6782e6..f6770191 100644 --- a/website/docs/r/environment.html.markdown +++ b/website/docs/r/environment.html.markdown @@ -46,11 +46,11 @@ resource "launchdarkly_environment" "approvals_example" { ## Argument Reference -- `project_key` - (Required) - The environment's project key. +- `project_key` - (Required) - The environment's project key. A change in this field will force the destruction of the existing resource and the creation of a new one. - `name` - (Required) The name of the environment. -- `key` - (Required) The project-unique key for the environment. +- `key` - (Required) The project-unique key for the environment. A change in this field will force the destruction of the existing resource and the creation of a new one. - `color` - (Required) The color swatch as an RGB hex value with no leading `#`. For example: `000000`. diff --git a/website/docs/r/feature_flag.html.markdown b/website/docs/r/feature_flag.html.markdown index 7e4bdd0f..22cfda4c 100644 --- a/website/docs/r/feature_flag.html.markdown +++ b/website/docs/r/feature_flag.html.markdown @@ -76,9 +76,9 @@ resource "launchdarkly_feature_flag" "json_example" { ## Argument Reference -- `project_key` - (Required) The feature flag's project key. +- `project_key` - (Required) The feature flag's project key. A change in this field will force the destruction of the existing resource and the creation of a new one. -- `key` - (Required) The unique feature flag key that references the flag in your application code. +- `key` - (Required) The unique feature flag key that references the flag in your application code. A change in this field will force the destruction of the existing resource and the creation of a new one. - `name` - (Required) The human-readable name of the feature flag. diff --git a/website/docs/r/feature_flag_environment.html.markdown b/website/docs/r/feature_flag_environment.html.markdown index 6a907177..337d678a 100644 --- a/website/docs/r/feature_flag_environment.html.markdown +++ b/website/docs/r/feature_flag_environment.html.markdown @@ -59,9 +59,9 @@ resource "launchdarkly_feature_flag_environment" "number_env" { ## Argument Reference -- `flag_id` - (Required) The feature flag's unique `id` in the format `project_key/flag_key`. +- `flag_id` - (Required) The feature flag's unique `id` in the format `project_key/flag_key`. A change in this field will force the destruction of the existing resource and the creation of a new one. -- `env_key` - (Required) The environment key. +- `env_key` - (Required) The environment key. A change in this field will force the destruction of the existing resource and the creation of a new one. - `on` (previously `targeting_enabled`) - (Optional) Whether targeting is enabled. Defaults to `false` if not set. diff --git a/website/docs/r/flag_trigger.html.markdown b/website/docs/r/flag_trigger.html.markdown new file mode 100644 index 00000000..3a235940 --- /dev/null +++ b/website/docs/r/flag_trigger.html.markdown @@ -0,0 +1,69 @@ +--- +layout: "launchdarkly" +page_title: "LaunchDarkly: launchdarkly_flag_trigger" +description: |- + Create and manage LaunchDarkly flag triggers. +--- + +# launchdarkly_flag_trigger + +Provides a LaunchDarkly flag trigger resource. + +-> **Note:** Flag triggers are available to customers on an Enterprise LaunchDarkly plan. To learn more, read about our pricing. To upgrade your plan, [contact LaunchDarkly Sales](https://launchdarkly.com/contact-sales/). + +This resource allows you to create and manage flag triggers within your LaunchDarkly organization. + +-> **Note:** This resource will store sensitive unique trigger URL value in plaintext in your Terraform state. Be sure your state is configured securely before using this resource. See https://www.terraform.io/docs/state/sensitive-data.html for more details. + +## Example Usage + +```hcl +resource "launchdarkly_flag_trigger" "example" { + project_key = launchdarkly_project.example.key + env_key = "test" + flag_key = launchdarkly_feature_flag.trigger_flag.key + integration_key = "generic-trigger" + instructions { + kind = "turnFlagOn" + } + enabled = false +} +``` + +## Argument Reference + +- `project_key` - (Required) The unique key of the project encompassing the associated flag. A change in this field will force the destruction of the existing resource and the creation of a new one. + +- `env_key` - (Required) The unique key of the environment the flag trigger will work in. A change in this field will force the destruction of the existing resource and the creation of a new one. + +- `flag_key` - (Required) The unique key of the associated flag. A change in this field will force the destruction of the existing resource and the creation of a new one. + +- `integration_key` - (Required) The unique identifier of the integration you intend to set your trigger up with. Currently supported are `"datadog"`, `"dynatrace"`, `"honeycomb"`, `"new-relic-apm"`, `"signalfx"`, and `"generic-trigger"`. `"generic-trigger"` should be used for integrations not explicitly supported. A change in this field will force the destruction of the existing resource and the creation of a new one. + +- `instructions` - (Required) Instructions containing the action to perform when invoking the trigger. Currently supported flag actions are `"turnFlagOn"` and `"turnFlagOff"`. This must be passed as the key-value pair `{ kind = "" }`. + +- `enabled` - (Optional) Whether the trigger is currently active or not. This property defaults to true upon creation and will thereafter conform to the last Terraform-configured value. + +## Additional Attributes + +In addition to the above arguments, this resource supports the following computed attributes: + +`trigger_url` - The unique URL used to invoke the trigger. + +`maintainer_id` - The ID of the member responsible for maintaining the flag trigger. If created via Terraform, this value will be the ID of the member associated with the API key used for your provider configuration. + +## Import + +LaunchDarkly flag triggers can be imported using the following syntax: + +``` +$ terraform import launchdarkly_flag_trigger.example /// +``` + +The unique trigger ID can be found in your saved trigger URL: + +``` +https://app.launchdarkly.com/webhook/triggers//aff25a53-17d9-4112-a9b8-12718d1a2e79 +``` + +Please note that if you did not save this upon creation of the resource, you will have to reset it to get a new value, which can cause breaking changes. diff --git a/website/docs/r/metric.html.markdown b/website/docs/r/metric.html.markdown new file mode 100644 index 00000000..4b1b8c24 --- /dev/null +++ b/website/docs/r/metric.html.markdown @@ -0,0 +1,89 @@ +--- +layout: "launchdarkly" +page_title: "LaunchDarkly: launchdarkly_metric" +description: |- + Create and manage LaunchDarkly metrics. +--- + +# launchdarkly_metric + +Provides a LaunchDarkly metric resource. + +This resource allows you to create and manage metrics within your LaunchDarkly organization. + +To learn more about metrics and experimentation, read [Experimentation Documentation](https://docs.launchdarkly.com/home/experimentation). + +## Example Usage + +```hcl +resource "launchdarkly_metric" "example" { + project_key = launchdarkly_project.example.key + key = "example-metric" + name = "Example Metric" + description = "Metric description." + kind = "pageview" + tags = ["example"] + urls { + kind = "substring" + substring = "foo" + } +} +``` + +## Argument Reference + +- `key` - (Required) The unique key that references the metric. A change in this field will force the destruction of the existing resource and the creation of a new one. + +- `project_key` - (Required) The metrics's project key. A change in this field will force the destruction of the existing resource and the creation of a new one. + +- `name` - (Required) The human-friendly name for the metric. + +- `kind` - (Required) The metric type. Available choices are `click`, `custom`, and `pageview`. A change in this field will force the destruction of the existing resource and the creation of a new one. + +- `description` - (Optional) The description of the metric's purpose. + +- `tags` - (Optional) Set of tags for the metric. + +- `isNumeric` - (Optional) Whether a `custom` metric is a numeric metric or not. + +- `isActive` - (Optional) Whether a metric is a active. + +- `maintainerId` - (Optional) The userId of the user maintaining the metric. + +- `selector` - (Required for kind `click`) The CSS selector for `click` metrics. + +- `urls` - (Required for kind `click` and `pageview`) A block determining which URLs the metric watches. To learn more, read [Nested Urls Blocks](#nested-urls-blocks). + +- `event_key` - (Required for kind `custom`) The event key to watch for `custom` metrics. + +- `success_criteria` - (Required for kind `custom`) The success criteria for numeric `custom` metrics. + +- `unit` - (Required for kind `custom`) The unit for numeric `custom` metrics. + +### Nested Urls Blocks + +Nested `urls` blocks have the following structure: + +- `kind` - (Required) The URL type. Available choices are `exact`, `canonical`, `substring` and `regex`. + +- `url` - (Required for kind `exact` and `canonical`) The exact or canonical URL. + +- `substring` - (Required for kind `substring`) The URL substring to match by. + +- `pattern` - (Required for kind `regex`) The regex pattern to match by. + +## Attributes Reference + +In addition to the arguments above, the resource exports the following attribute: + +- `id` - The unique environment ID in the format `project_key/metric_key`. + +- `creation_date` - The metric's creation date represented as a UNIX epoch timestamp. + +## Import + +LaunchDarkly metrics can be imported using the metric's ID in the form `project_key/metric_key`, e.g. + +``` +$ terraform import launchdarkly_metric.example example-project/example-metric-key +``` diff --git a/website/docs/r/project.html.markdown b/website/docs/r/project.html.markdown index 829f894d..1ced2b40 100644 --- a/website/docs/r/project.html.markdown +++ b/website/docs/r/project.html.markdown @@ -46,13 +46,15 @@ resource "launchdarkly_project" "example" { ## Argument Reference -- `key` - (Required) The project's unique key. +- `key` - (Required) The project's unique key. A change in this field will force the destruction of the existing resource and the creation of a new one. - `name` - (Required) The project's name. - `environments` - (Required) List of nested `environments` blocks describing LaunchDarkly environments that belong to the project. When managing LaunchDarkly projects in Terraform, you should always manage your environments as nested project resources. To learn more, read [Nested Environments Blocks](#nested-environments-blocks). ### Nested Environments Blocks +### Nested Environments Blocks + -> **Note:** Mixing the use of nested `environments` blocks and [`launchdarkly_environment`](/docs/providers/launchdarkly/r/environment.html) resources is not recommended. `launchdarkly_environment` resources should only be used when the encapsulating project is not managed in Terraform. - `include_in_snippet` - **Deprecated** (Optional) Whether feature flags created under the project should be available to client-side SDKs by default. Please migrate to `default_client_side_availability` to maintain future compatibility. @@ -67,7 +69,7 @@ Nested `environments` blocks have the following structure: - `name` - (Required) The name of the environment. -- `key` - (Required) The project-unique key for the environment. +- `key` - (Required) The project-unique key for the environment. A change in this field will force the destruction of the existing environment and the creation of a new one. - `color` - (Required) The color swatch as an RGB hex value with no leading `#`. For example: `000000`. diff --git a/website/docs/r/relay_proxy_configuration.html.markdown b/website/docs/r/relay_proxy_configuration.html.markdown new file mode 100644 index 00000000..19cc5253 --- /dev/null +++ b/website/docs/r/relay_proxy_configuration.html.markdown @@ -0,0 +1,67 @@ +--- +title: "launchdarkly_relay_proxy_configuration" +description: "Create and manage Relay Proxy configurations" +--- + +# launchdarkly_relay_proxy_configuration + +Provides a LaunchDarkly Relay Proxy configuration resource for use with the Relay Proxy's [automatic configuration feature](https://docs.launchdarkly.com/home/relay-proxy/automatic-configuration). + +-> **Note:** Relay Proxy automatic configuration is available to customers on an Enterprise LaunchDarkly plan. To learn more, read about our pricing. To upgrade your plan, [contact LaunchDarkly Sales](https://launchdarkly.com/contact-sales/). + +This resource allows you to create and manage Relay Proxy configurations within your LaunchDarkly organization. + +-> **Note:** This resource will store the full plaintext secret for your Relay Proxy configuration's unique key in Terraform state. Be sure your state is configured securely before using this resource. See https://www.terraform.io/docs/state/sensitive-data.html for more details. + +## Example Usage + +```hcl +resource "launchdarkly_relay_proxy_configuration" "example" { + name = "example-config" + policy { + actions = ["*"] + effect = "allow" + resources = ["proj/*:env/*"] + } +} +``` + +## Argument Reference + +- `name` - (Required) The human-readable name for your Relay Proxy configuration. + +- `policy` - (Required) The Relay Proxy configuration's rule policy block. This determines what content the Relay Proxy receives. To learn more, read [Understanding policies](https://docs.launchdarkly.com/home/members/role-policies#understanding-policies). + +Relay proxy configuration `policy` blocks are composed of the following arguments + +- `effect` - (Required) - Either `allow` or `deny`. This argument defines whether the rule policy allows or denies access to the named resources and actions. + +- `resources` - (Optional) - The list of resource specifiers defining the resources to which the rule policy applies. Either `resources` or `not_resources` must be specified. For a list of available resources read [Understanding resource types and scopes](https://docs.launchdarkly.com/home/account-security/custom-roles/resources#understanding-resource-types-and-scopes). + +- `not_resources` - (Optional) - The list of resource specifiers defining the resources to which the rule policy does not apply. Either `resources` or `not_resources` must be specified. For a list of available resources read [Understanding resource types and scopes](https://docs.launchdarkly.com/home/account-security/custom-roles/resources#understanding-resource-types-and-scopes). + +- `actions` - (Optional) The list of action specifiers defining the actions to which the rule policy applies. Either `actions` or `not_actions` must be specified. For a list of available actions read [Actions reference](https://docs.launchdarkly.com/home/account-security/custom-roles/actions#actions-reference). + +- `not_actions` - (Optional) The list of action specifiers defining the actions to which the rule policy does not apply. Either `actions` or `not_actions` must be specified. For a list of available actions read [Actions reference](https://docs.launchdarkly.com/home/account-security/custom-roles/actions#actions-reference). + +## Attribute Reference + +- `id` - The Relay Proxy configuration's ID + +- `full_key` - The Relay Proxy configuration's unique key. Because the `full_key` is only exposed upon creation, it will not be available if the resource is imported. + +- `display_key` - The last 4 characters of the Relay Proxy configuration's unique key. + +## Import + +Relay Proxy configurations can be imported using the configuration's unique 24 character ID, e.g. + +```shell-session +$ terraform import launchdarkly_relay_proxy_configuration.example 51d440e30c9ff61457c710f6 +``` + +The unique relay proxy ID can be found in the relay proxy edit page URL, which you can locate by clicking the three dot menu on your relay proxy item in the UI and selecting 'Edit configuration': + +``` +https://app.launchdarkly.com/settings/relay//edit +``` diff --git a/website/docs/r/segment.html.markdown b/website/docs/r/segment.html.markdown index abcd3c35..159ccfab 100644 --- a/website/docs/r/segment.html.markdown +++ b/website/docs/r/segment.html.markdown @@ -37,11 +37,11 @@ resource "launchdarkly_segment" "example" { ## Argument Reference -- `key` - (Required) The unique key that references the segment. +- `key` - (Required) The unique key that references the segment. A change in this field will force the destruction of the existing resource and the creation of a new one. -- `project_key` - (Required) The segment's project key. +- `project_key` - (Required) The segment's project key. A change in this field will force the destruction of the existing resource and the creation of a new one. -- `env_key` - (Required) The segment's environment key. +- `env_key` - (Required) The segment's environment key. A change in this field will force the destruction of the existing resource and the creation of a new one. - `name` - (Required) The human-friendly name for the segment. diff --git a/website/docs/r/team_member.html.markdown b/website/docs/r/team_member.html.markdown index 85c1d571..fceb3575 100644 --- a/website/docs/r/team_member.html.markdown +++ b/website/docs/r/team_member.html.markdown @@ -26,7 +26,7 @@ resource "launchdarkly_team_member" "example" { ## Argument Reference -- `email` - (Required) The unique email address associated with the team member. +- `email` - (Required) The unique email address associated with the team member. A change in this field will force the destruction of the existing resource and the creation of a new one. - `first_name` - (Optional) The team member's given name. Please note that, once created, this cannot be updated except by the team member themself. diff --git a/website/launchdarkly.erb b/website/launchdarkly.erb index 5065c875..08ecc2eb 100644 --- a/website/launchdarkly.erb +++ b/website/launchdarkly.erb @@ -11,6 +11,9 @@
  • Data Sources
  • Resources